3580: More error-resilient MBE expansion r=matklad a=flodiebold

This is the beginning of an attempt to make macro-by-example expansion more resilient, so that we still get an expansion even if no rule exactly matches, with the goal to make completion work better in macro calls.

 The general idea is to make everything return `(T, Option<ExpandError>)` instead of `Result<T, ExpandError>`; and then to try each macro arm in turn, and somehow choose the 'best' matching rule if none matches without errors. Finding that 'best' match isn't done yet; I'm currently counting how many tokens were consumed from the args before an error, but it also needs to take into account whether there were further patterns that had nothing to match.

I'll continue this later, but I'm interested whether you think this is the right path, @matklad & @edwin0cheng.

Co-authored-by: Florian Diebold <florian.diebold@freiheit.com>
Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
bors[bot] 2020-03-17 09:41:30 +00:00 committed by GitHub
commit 6aa432d86b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 470 additions and 180 deletions

View File

@ -2,7 +2,7 @@
use std::sync::Arc;
use mbe::MacroRules;
use mbe::{ExpandResult, MacroRules};
use ra_db::{salsa, SourceDatabase};
use ra_parser::FragmentKind;
use ra_prof::profile;
@ -27,11 +27,12 @@ impl TokenExpander {
db: &dyn AstDatabase,
id: LazyMacroId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
) -> mbe::ExpandResult<tt::Subtree> {
match self {
TokenExpander::MacroRules(it) => it.expand(tt),
TokenExpander::Builtin(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
// FIXME switch these to ExpandResult as well
TokenExpander::Builtin(it) => it.expand(db, id, tt).into(),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(),
}
}
@ -66,7 +67,7 @@ pub trait AstDatabase: SourceDatabase {
fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>;
fn parse_macro(&self, macro_file: MacroFile)
-> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>;
fn macro_expand(&self, macro_call: MacroCallId) -> (Option<Arc<tt::Subtree>>, Option<String>);
#[salsa::interned]
fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId;
@ -153,7 +154,7 @@ pub(crate) fn macro_arg(
pub(crate) fn macro_expand(
db: &dyn AstDatabase,
id: MacroCallId,
) -> Result<Arc<tt::Subtree>, String> {
) -> (Option<Arc<tt::Subtree>>, Option<String>) {
macro_expand_with_arg(db, id, None)
}
@ -174,31 +175,38 @@ fn macro_expand_with_arg(
db: &dyn AstDatabase,
id: MacroCallId,
arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
) -> Result<Arc<tt::Subtree>, String> {
) -> (Option<Arc<tt::Subtree>>, Option<String>) {
let lazy_id = match id {
MacroCallId::LazyMacro(id) => id,
MacroCallId::EagerMacro(id) => {
if arg.is_some() {
return Err(
"hypothetical macro expansion not implemented for eager macro".to_owned()
return (
None,
Some("hypothetical macro expansion not implemented for eager macro".to_owned()),
);
} else {
return Ok(db.lookup_intern_eager_expansion(id).subtree);
return (Some(db.lookup_intern_eager_expansion(id).subtree), None);
}
}
};
let loc = db.lookup_intern_macro(lazy_id);
let macro_arg = arg.or_else(|| db.macro_arg(id)).ok_or("Fail to args in to tt::TokenTree")?;
let macro_arg = match arg.or_else(|| db.macro_arg(id)) {
Some(it) => it,
None => return (None, Some("Fail to args in to tt::TokenTree".into())),
};
let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
let tt = macro_rules.0.expand(db, lazy_id, &macro_arg.0).map_err(|err| format!("{:?}", err))?;
let macro_rules = match db.macro_def(loc.def) {
Some(it) => it,
None => return (None, Some("Fail to find macro definition".into())),
};
let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, &macro_arg.0);
// Set a hard limit for the expanded tt
let count = tt.count();
if count > 65536 {
return Err(format!("Total tokens count exceed limit : count = {}", count));
return (None, Some(format!("Total tokens count exceed limit : count = {}", count)));
}
Ok(Arc::new(tt))
(Some(Arc::new(tt)), err.map(|e| format!("{:?}", e)))
}
pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
@ -225,42 +233,41 @@ pub fn parse_macro_with_arg(
let _p = profile("parse_macro_query");
let macro_call_id = macro_file.macro_call_id;
let expansion = if let Some(arg) = arg {
let (tt, err) = if let Some(arg) = arg {
macro_expand_with_arg(db, macro_call_id, Some(arg))
} else {
db.macro_expand(macro_call_id)
};
let tt = expansion
.map_err(|err| {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
match macro_call_id {
MacroCallId::LazyMacro(id) => {
let loc: MacroCallLoc = db.lookup_intern_macro(id);
let node = loc.kind.node(db);
if let Some(err) = err {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
match macro_call_id {
MacroCallId::LazyMacro(id) => {
let loc: MacroCallLoc = db.lookup_intern_macro(id);
let node = loc.kind.node(db);
// collect parent information for warning log
let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
it.file_id.call_node(db)
})
.map(|n| format!("{:#}", n.value))
.collect::<Vec<_>>()
.join("\n");
// collect parent information for warning log
let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
it.file_id.call_node(db)
})
.map(|n| format!("{:#}", n.value))
.collect::<Vec<_>>()
.join("\n");
log::warn!(
"fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}",
err,
node.value,
parents
);
}
_ => {
log::warn!("fail on macro_parse: (reason: {})", err);
}
log::warn!(
"fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}",
err,
node.value,
parents
);
}
})
.ok()?;
_ => {
log::warn!("fail on macro_parse: (reason: {})", err);
}
}
};
let tt = tt?;
let fragment_kind = to_fragment_kind(db, macro_call_id);

View File

@ -462,7 +462,7 @@ fn main() {
fn infer_builtin_macros_include() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
@ -483,7 +483,7 @@ fn bar() -> u32 {0}
fn infer_builtin_macros_include_concat() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
@ -507,7 +507,7 @@ fn bar() -> u32 {0}
fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}
@ -534,7 +534,7 @@ fn bar() -> u32 {0}
fn infer_builtin_macros_include_itself_should_failed() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {() => {}}

View File

@ -720,7 +720,18 @@ mod tests {
}
",
),
@r###"[]"###
@r###"
[
CompletionItem {
label: "the_field",
source_range: [156; 156),
delete: [156; 156),
insert: "the_field",
kind: Field,
detail: "u32",
},
]
"###
);
}
@ -751,6 +762,43 @@ mod tests {
);
}
#[test]
fn macro_expansion_resilient() {
assert_debug_snapshot!(
do_ref_completion(
r"
macro_rules! dbg {
() => {};
($val:expr) => {
match $val { tmp => { tmp } }
};
// Trailing comma with single argument is ignored
($val:expr,) => { $crate::dbg!($val) };
($($val:expr),+ $(,)?) => {
($($crate::dbg!($val)),+,)
};
}
struct A { the_field: u32 }
fn foo(a: A) {
dbg!(a.<|>)
}
",
),
@r###"
[
CompletionItem {
label: "the_field",
source_range: [552; 552),
delete: [552; 552),
insert: "the_field",
kind: Field,
detail: "u32",
},
]
"###
);
}
#[test]
fn test_method_completion_3547() {
assert_debug_snapshot!(

View File

@ -89,7 +89,6 @@ mod tests {
#[test]
fn completes_in_simple_macro_call() {
// FIXME: doesn't work yet because of missing error recovery in macro expansion
let completions = complete(
r"
macro_rules! m { ($e:expr) => { $e } }
@ -102,6 +101,16 @@ mod tests {
}
",
);
assert_debug_snapshot!(completions, @r###"[]"###);
assert_debug_snapshot!(completions, @r###"
[
CompletionItem {
label: "E",
source_range: [151; 151),
delete: [151; 151),
insert: "E",
kind: Enum,
},
]
"###);
}
}

View File

@ -811,7 +811,44 @@ mod tests {
}
"
),
@"[]"
@r###"
[
CompletionItem {
label: "m!",
source_range: [145; 145),
delete: [145; 145),
insert: "m!($0)",
kind: Macro,
detail: "macro_rules! m",
},
CompletionItem {
label: "quux(…)",
source_range: [145; 145),
delete: [145; 145),
insert: "quux(${1:x})$0",
kind: Function,
lookup: "quux",
detail: "fn quux(x: i32)",
trigger_call_info: true,
},
CompletionItem {
label: "x",
source_range: [145; 145),
delete: [145; 145),
insert: "x",
kind: Binding,
detail: "i32",
},
CompletionItem {
label: "y",
source_range: [145; 145),
delete: [145; 145),
insert: "y",
kind: Binding,
detail: "i32",
},
]
"###
);
}
@ -868,6 +905,59 @@ mod tests {
);
}
#[test]
fn completes_in_simple_macro_without_closing_parens() {
assert_debug_snapshot!(
do_reference_completion(
r"
macro_rules! m { ($e:expr) => { $e } }
fn quux(x: i32) {
let y = 92;
m!(x<|>
}
"
),
@r###"
[
CompletionItem {
label: "m!",
source_range: [145; 146),
delete: [145; 146),
insert: "m!($0)",
kind: Macro,
detail: "macro_rules! m",
},
CompletionItem {
label: "quux(…)",
source_range: [145; 146),
delete: [145; 146),
insert: "quux(${1:x})$0",
kind: Function,
lookup: "quux",
detail: "fn quux(x: i32)",
trigger_call_info: true,
},
CompletionItem {
label: "x",
source_range: [145; 146),
delete: [145; 146),
insert: "x",
kind: Binding,
detail: "i32",
},
CompletionItem {
label: "y",
source_range: [145; 146),
delete: [145; 146),
insert: "y",
kind: Binding,
detail: "i32",
},
]
"###
);
}
#[test]
fn completes_unresolved_uses() {
assert_debug_snapshot!(

View File

@ -135,7 +135,7 @@ impl<'a> CompletionContext<'a> {
),
) {
let new_offset = hypothetical_expansion.1.text_range().start();
if new_offset >= actual_expansion.text_range().end() {
if new_offset > actual_expansion.text_range().end() {
break;
}
original_file = actual_expansion;

View File

@ -259,7 +259,7 @@ fn some_thing() -> u32 {
);
assert_eq!(res.name, "foo");
assert_snapshot!(res.expansion, @r###"bar!()"###);
assert_snapshot!(res.expansion, @r###""###);
}
#[test]

View File

@ -150,7 +150,7 @@ impl MacroRules {
Ok(MacroRules { rules, shift: Shift::new(tt) })
}
pub fn expand(&self, tt: &tt::Subtree) -> Result<tt::Subtree, ExpandError> {
pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
// apply shift
let mut tt = tt.clone();
self.shift.shift_all(&mut tt);
@ -209,5 +209,35 @@ fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> {
Ok(())
}
pub struct ExpandResult<T>(pub T, pub Option<ExpandError>);
impl<T> ExpandResult<T> {
pub fn ok(t: T) -> ExpandResult<T> {
ExpandResult(t, None)
}
pub fn only_err(err: ExpandError) -> ExpandResult<T>
where
T: Default,
{
ExpandResult(Default::default(), Some(err))
}
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> {
ExpandResult(f(self.0), self.1)
}
pub fn result(self) -> Result<T, ExpandError> {
self.1.map(Err).unwrap_or(Ok(self.0))
}
}
impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> {
fn from(result: Result<T, ExpandError>) -> ExpandResult<T> {
result
.map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None))
}
}
#[cfg(test)]
mod tests;

View File

@ -8,19 +8,51 @@ mod transcriber;
use ra_syntax::SmolStr;
use rustc_hash::FxHashMap;
use crate::ExpandError;
use crate::{ExpandError, ExpandResult};
pub(crate) fn expand(
rules: &crate::MacroRules,
input: &tt::Subtree,
) -> Result<tt::Subtree, ExpandError> {
rules.rules.iter().find_map(|it| expand_rule(it, input).ok()).ok_or(ExpandError::NoMatchingRule)
pub(crate) fn expand(rules: &crate::MacroRules, input: &tt::Subtree) -> ExpandResult<tt::Subtree> {
expand_rules(&rules.rules, input)
}
fn expand_rule(rule: &crate::Rule, input: &tt::Subtree) -> Result<tt::Subtree, ExpandError> {
let bindings = matcher::match_(&rule.lhs, input)?;
let res = transcriber::transcribe(&rule.rhs, &bindings)?;
Ok(res)
fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt::Subtree> {
let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
for rule in rules {
let new_match = match matcher::match_(&rule.lhs, input) {
Ok(m) => m,
Err(_e) => {
// error in pattern parsing
continue;
}
};
if new_match.err.is_none() {
// If we find a rule that applies without errors, we're done.
// Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail.
let ExpandResult(res, transcribe_err) =
transcriber::transcribe(&rule.rhs, &new_match.bindings);
if transcribe_err.is_none() {
return ExpandResult::ok(res);
}
}
// Use the rule if we matched more tokens, or had fewer errors
if let Some((prev_match, _)) = &match_ {
if (new_match.unmatched_tts, new_match.err_count)
< (prev_match.unmatched_tts, prev_match.err_count)
{
match_ = Some((new_match, rule));
}
} else {
match_ = Some((new_match, rule));
}
}
if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors
let ExpandResult(result, transcribe_err) =
transcriber::transcribe(&rule.rhs, &match_.bindings);
ExpandResult(result, match_.err.or(transcribe_err))
} else {
ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule))
}
}
/// The actual algorithm for expansion is not too hard, but is pretty tricky.
@ -111,7 +143,7 @@ mod tests {
}
fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) {
assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation), Err(err));
assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err));
}
fn format_macro(macro_body: &str) -> String {
@ -135,10 +167,7 @@ mod tests {
crate::MacroRules::parse(&definition_tt).unwrap()
}
fn expand_first(
rules: &crate::MacroRules,
invocation: &str,
) -> Result<tt::Subtree, ExpandError> {
fn expand_first(rules: &crate::MacroRules, invocation: &str) -> ExpandResult<tt::Subtree> {
let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
@ -146,6 +175,6 @@ mod tests {
let (invocation_tt, _) =
ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
expand_rule(&rules.rules[0], &invocation_tt)
expand_rules(&rules.rules, &invocation_tt)
}
}

View File

@ -8,6 +8,7 @@ use crate::{
ExpandError,
};
use super::ExpandResult;
use ra_parser::{FragmentKind::*, TreeSink};
use ra_syntax::{SmolStr, SyntaxKind};
use tt::buffer::{Cursor, TokenBuffer};
@ -58,36 +59,61 @@ macro_rules! err {
};
}
macro_rules! bail {
($($tt:tt)*) => {
return Err(err!($($tt)*))
};
#[derive(Debug, Default)]
pub(super) struct Match {
pub bindings: Bindings,
/// We currently just keep the first error and count the rest to compare matches.
pub err: Option<ExpandError>,
pub err_count: usize,
/// How many top-level token trees were left to match.
pub unmatched_tts: usize,
}
pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Bindings, ExpandError> {
impl Match {
pub fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take();
self.err = prev_err.or(Some(err));
self.err_count += 1;
}
}
// General note: These functions have two channels to return errors, a `Result`
// return value and the `&mut Match`. The returned Result is for pattern parsing
// errors; if a branch of the macro definition doesn't parse, it doesn't make
// sense to try using it. Matching errors are added to the `Match`. It might
// make sense to make pattern parsing a separate step?
pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Match, ExpandError> {
assert!(pattern.delimiter == None);
let mut res = Bindings::default();
let mut res = Match::default();
let mut src = TtIter::new(src);
match_subtree(&mut res, pattern, &mut src)?;
if src.len() > 0 {
bail!("leftover tokens");
res.unmatched_tts += src.len();
res.add_err(err!("leftover tokens"));
}
Ok(res)
}
fn match_subtree(
bindings: &mut Bindings,
res: &mut Match,
pattern: &tt::Subtree,
src: &mut TtIter,
) -> Result<(), ExpandError> {
for op in parse_pattern(pattern) {
match op? {
Op::TokenTree(tt::TokenTree::Leaf(lhs)) => {
let rhs = src.expect_leaf().map_err(|()| err!("expected leaf: `{}`", lhs))?;
let rhs = match src.expect_leaf() {
Ok(l) => l,
Err(()) => {
res.add_err(err!("expected leaf: `{}`", lhs));
continue;
}
};
match (lhs, rhs) {
(
tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
@ -101,31 +127,51 @@ fn match_subtree(
tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
) if lhs == rhs => (),
_ => return Err(ExpandError::UnexpectedToken),
_ => {
res.add_err(ExpandError::UnexpectedToken);
}
}
}
Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
let rhs = src.expect_subtree().map_err(|()| err!("expected subtree"))?;
let rhs = match src.expect_subtree() {
Ok(s) => s,
Err(()) => {
res.add_err(err!("expected subtree"));
continue;
}
};
if lhs.delimiter_kind() != rhs.delimiter_kind() {
bail!("mismatched delimiter")
res.add_err(err!("mismatched delimiter"));
continue;
}
let mut src = TtIter::new(rhs);
match_subtree(bindings, lhs, &mut src)?;
match_subtree(res, lhs, &mut src)?;
if src.len() > 0 {
bail!("leftover tokens");
res.add_err(err!("leftover tokens"));
}
}
Op::Var { name, kind } => {
let kind = kind.as_ref().ok_or(ExpandError::UnexpectedToken)?;
match match_meta_var(kind.as_str(), src)? {
Some(fragment) => {
bindings.inner.insert(name.clone(), Binding::Fragment(fragment));
let kind = match kind {
Some(k) => k,
None => {
res.add_err(ExpandError::UnexpectedToken);
continue;
}
None => bindings.push_optional(name),
};
let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src);
match matched {
Some(fragment) => {
res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment));
}
None if match_err.is_none() => res.bindings.push_optional(name),
_ => {}
}
if let Some(err) = match_err {
res.add_err(err);
}
}
Op::Repeat { subtree, kind, separator } => {
match_repeat(bindings, subtree, kind, separator, src)?
match_repeat(res, subtree, kind, separator, src)?;
}
}
}
@ -221,7 +267,7 @@ impl<'a> TtIter<'a> {
pub(crate) fn expect_fragment(
&mut self,
fragment_kind: ra_parser::FragmentKind,
) -> Result<tt::TokenTree, ()> {
) -> ExpandResult<Option<tt::TokenTree>> {
pub(crate) struct OffsetTokenSink<'a> {
pub(crate) cursor: Cursor<'a>,
pub(crate) error: bool,
@ -246,45 +292,51 @@ impl<'a> TtIter<'a> {
ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind);
let mut err = None;
if !sink.cursor.is_root() || sink.error {
// FIXME better recovery in this case would help completion inside macros immensely
return Err(());
err = Some(err!("expected {:?}", fragment_kind));
}
let mut curr = buffer.begin();
let mut res = vec![];
while curr != sink.cursor {
if let Some(token) = curr.token_tree() {
res.push(token);
if sink.cursor.is_root() {
while curr != sink.cursor {
if let Some(token) = curr.token_tree() {
res.push(token);
}
curr = curr.bump();
}
curr = curr.bump();
}
self.inner = self.inner.as_slice()[res.len()..].iter();
match res.len() {
0 => Err(()),
1 => Ok(res[0].clone()),
_ => Ok(tt::TokenTree::Subtree(tt::Subtree {
if res.len() == 0 && err.is_none() {
err = Some(err!("no tokens consumed"));
}
let res = match res.len() {
1 => Some(res[0].clone()),
0 => None,
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
delimiter: None,
token_trees: res.into_iter().cloned().collect(),
})),
}
};
ExpandResult(res, err)
}
pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> {
let mut fork = self.clone();
match fork.expect_fragment(Visibility) {
Ok(tt) => {
ExpandResult(tt, None) => {
*self = fork;
Some(tt)
tt
}
Err(()) => None,
ExpandResult(_, Some(_)) => None,
}
}
}
pub(super) fn match_repeat(
bindings: &mut Bindings,
res: &mut Match,
pattern: &tt::Subtree,
kind: RepeatKind,
separator: Option<Separator>,
@ -304,36 +356,46 @@ pub(super) fn match_repeat(
}
}
let mut nested = Bindings::default();
match match_subtree(&mut nested, pattern, &mut fork) {
Ok(()) => {
limit -= 1;
if limit == 0 {
log::warn!("match_lhs excced in repeat pattern exceed limit => {:#?}\n{:#?}\n{:#?}\n{:#?}", pattern, src, kind, separator);
let mut nested = Match::default();
match_subtree(&mut nested, pattern, &mut fork)?;
if nested.err.is_none() {
limit -= 1;
if limit == 0 {
log::warn!(
"match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}",
pattern,
src,
kind,
separator
);
break;
}
*src = fork;
if let Err(err) = res.bindings.push_nested(counter, nested.bindings) {
res.add_err(err);
}
counter += 1;
if counter == 1 {
if let RepeatKind::ZeroOrOne = kind {
break;
}
*src = fork;
bindings.push_nested(counter, nested)?;
counter += 1;
if counter == 1 {
if let RepeatKind::ZeroOrOne = kind {
break;
}
}
}
Err(_) => break,
} else {
break;
}
}
match (kind, counter) {
(RepeatKind::OneOrMore, 0) => return Err(ExpandError::UnexpectedToken),
(RepeatKind::OneOrMore, 0) => {
res.add_err(ExpandError::UnexpectedToken);
}
(_, 0) => {
// Collect all empty variables in subtrees
let mut vars = Vec::new();
collect_vars(&mut vars, pattern)?;
for var in vars {
bindings.push_empty(&var)
res.bindings.push_empty(&var)
}
}
_ => (),
@ -341,7 +403,7 @@ pub(super) fn match_repeat(
Ok(())
}
fn match_meta_var(kind: &str, input: &mut TtIter) -> Result<Option<Fragment>, ExpandError> {
fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> {
let fragment = match kind {
"path" => Path,
"expr" => Expr,
@ -352,34 +414,32 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> Result<Option<Fragment>, Ex
"meta" => MetaItem,
"item" => Item,
_ => {
let tt = match kind {
"ident" => {
let ident = input.expect_ident().map_err(|()| err!("expected ident"))?.clone();
tt::Leaf::from(ident).into()
}
"tt" => input.expect_tt().map_err(|()| err!())?.clone(),
"lifetime" => {
let ident = input.expect_lifetime().map_err(|()| err!())?;
tt::Leaf::Ident(ident.clone()).into()
}
"literal" => {
let literal = input.expect_literal().map_err(|()| err!())?.clone();
tt::Leaf::from(literal).into()
}
let tt_result = match kind {
"ident" => input
.expect_ident()
.map(|ident| Some(tt::Leaf::from(ident.clone()).into()))
.map_err(|()| err!("expected ident")),
"tt" => input.expect_tt().map(Some).map_err(|()| err!()),
"lifetime" => input
.expect_lifetime()
.map(|ident| Some(tt::Leaf::Ident(ident.clone()).into()))
.map_err(|()| err!("expected lifetime")),
"literal" => input
.expect_literal()
.map(|literal| Some(tt::Leaf::from(literal.clone()).into()))
.map_err(|()| err!()),
// `vis` is optional
"vis" => match input.eat_vis() {
Some(vis) => vis,
None => return Ok(None),
Some(vis) => Ok(Some(vis)),
None => Ok(None),
},
_ => return Err(ExpandError::UnexpectedToken),
_ => Err(ExpandError::UnexpectedToken),
};
return Ok(Some(Fragment::Tokens(tt)));
return tt_result.map(|it| it.map(Fragment::Tokens)).into();
}
};
let tt =
input.expect_fragment(fragment).map_err(|()| err!("fragment did not parse as {}", kind))?;
let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) };
Ok(Some(fragment))
let result = input.expect_fragment(fragment);
result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) })
}
fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &tt::Subtree) -> Result<(), ExpandError> {

View File

@ -3,6 +3,7 @@
use ra_syntax::SmolStr;
use super::ExpandResult;
use crate::{
mbe_expander::{Binding, Bindings, Fragment},
parser::{parse_template, Op, RepeatKind, Separator},
@ -49,10 +50,7 @@ impl Bindings {
}
}
pub(super) fn transcribe(
template: &tt::Subtree,
bindings: &Bindings,
) -> Result<tt::Subtree, ExpandError> {
pub(super) fn transcribe(template: &tt::Subtree, bindings: &Bindings) -> ExpandResult<tt::Subtree> {
assert!(template.delimiter == None);
let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
expand_subtree(&mut ctx, template)
@ -75,35 +73,46 @@ struct ExpandCtx<'a> {
nesting: Vec<NestingState>,
}
fn expand_subtree(ctx: &mut ExpandCtx, template: &tt::Subtree) -> Result<tt::Subtree, ExpandError> {
fn expand_subtree(ctx: &mut ExpandCtx, template: &tt::Subtree) -> ExpandResult<tt::Subtree> {
let mut buf: Vec<tt::TokenTree> = Vec::new();
let mut err = None;
for op in parse_template(template) {
match op? {
let op = match op {
Ok(op) => op,
Err(e) => {
err = Some(e);
break;
}
};
match op {
Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => buf.push(tt.clone()),
Op::TokenTree(tt::TokenTree::Subtree(tt)) => {
let tt = expand_subtree(ctx, tt)?;
let ExpandResult(tt, e) = expand_subtree(ctx, tt);
err = err.or(e);
buf.push(tt.into());
}
Op::Var { name, kind: _ } => {
let fragment = expand_var(ctx, name)?;
let ExpandResult(fragment, e) = expand_var(ctx, name);
err = err.or(e);
push_fragment(&mut buf, fragment);
}
Op::Repeat { subtree, kind, separator } => {
let fragment = expand_repeat(ctx, subtree, kind, separator)?;
let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator);
err = err.or(e);
push_fragment(&mut buf, fragment)
}
}
}
Ok(tt::Subtree { delimiter: template.delimiter, token_trees: buf })
ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: buf }, err)
}
fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError> {
let res = if v == "crate" {
fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> {
if v == "crate" {
// We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
let tt =
tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() })
.into();
Fragment::Tokens(tt)
ExpandResult::ok(Fragment::Tokens(tt))
} else if !ctx.bindings.contains(v) {
// Note that it is possible to have a `$var` inside a macro which is not bound.
// For example:
@ -132,11 +141,13 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError>
],
}
.into();
Fragment::Tokens(tt)
ExpandResult::ok(Fragment::Tokens(tt))
} else {
ctx.bindings.get(&v, &mut ctx.nesting)?.clone()
};
Ok(res)
ctx.bindings.get(&v, &mut ctx.nesting).map_or_else(
|e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)),
|b| ExpandResult::ok(b.clone()),
)
}
}
fn expand_repeat(
@ -144,17 +155,17 @@ fn expand_repeat(
template: &tt::Subtree,
kind: RepeatKind,
separator: Option<Separator>,
) -> Result<Fragment, ExpandError> {
) -> ExpandResult<Fragment> {
let mut buf: Vec<tt::TokenTree> = Vec::new();
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
// Dirty hack to make macro-expansion terminate.
// This should be replaced by a propper macro-by-example implementation
// This should be replaced by a proper macro-by-example implementation
let limit = 65536;
let mut has_seps = 0;
let mut counter = 0;
loop {
let res = expand_subtree(ctx, template);
let ExpandResult(mut t, e) = expand_subtree(ctx, template);
let nesting_state = ctx.nesting.last_mut().unwrap();
if nesting_state.at_end || !nesting_state.hit {
break;
@ -172,10 +183,10 @@ fn expand_repeat(
break;
}
let mut t = match res {
Ok(t) => t,
Err(_) => continue,
};
if e.is_some() {
continue;
}
t.delimiter = None;
push_subtree(&mut buf, t);
@ -209,14 +220,14 @@ fn expand_repeat(
buf.pop();
}
if RepeatKind::OneOrMore == kind && counter == 0 {
return Err(ExpandError::UnexpectedToken);
}
// Check if it is a single token subtree without any delimiter
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
let tt = tt::Subtree { delimiter: None, token_trees: buf }.into();
Ok(Fragment::Tokens(tt))
if RepeatKind::OneOrMore == kind && counter == 0 {
return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken));
}
ExpandResult::ok(Fragment::Tokens(tt))
}
fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {

View File

@ -1430,7 +1430,7 @@ impl MacroFixture {
let (invocation_tt, _) =
ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
self.rules.expand(&invocation_tt)
self.rules.expand(&invocation_tt).result()
}
fn assert_expand_err(&self, invocation: &str, err: &ExpandError) {
@ -1662,5 +1662,5 @@ fn test_expand_bad_literal() {
macro_rules! foo { ($i:literal) => {}; }
"#,
)
.assert_expand_err(r#"foo!(&k");"#, &ExpandError::NoMatchingRule);
.assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".to_string()));
}

View File

@ -61,7 +61,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
LIFETIME,
]);
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR];
pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
if let Some(m) = literal(p) {

View File

@ -40,6 +40,12 @@ pub enum TokenTree {
}
impl_froms!(TokenTree: Leaf, Subtree);
impl TokenTree {
pub fn empty() -> Self {
TokenTree::Subtree(Subtree::default())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Leaf {
Literal(Literal),