2197: Remove typed macro parsing API r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-11-09 10:16:48 +00:00 committed by GitHub
commit defc7ad772
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 92 additions and 78 deletions

1
Cargo.lock generated
View File

@ -1060,6 +1060,7 @@ dependencies = [
"ra_arena 0.1.0", "ra_arena 0.1.0",
"ra_db 0.1.0", "ra_db 0.1.0",
"ra_mbe 0.1.0", "ra_mbe 0.1.0",
"ra_parser 0.1.0",
"ra_prof 0.1.0", "ra_prof 0.1.0",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"ra_tt 0.1.0", "ra_tt 0.1.0",

View File

@ -10,6 +10,7 @@ log = "0.4.5"
ra_arena = { path = "../ra_arena" } ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_parser = { path = "../ra_parser" }
ra_prof = { path = "../ra_prof" } ra_prof = { path = "../ra_prof" }
tt = { path = "../ra_tt", package = "ra_tt" } tt = { path = "../ra_tt", package = "ra_tt" }
mbe = { path = "../ra_mbe", package = "ra_mbe" } mbe = { path = "../ra_mbe", package = "ra_mbe" }

View File

@ -4,6 +4,7 @@
use mbe::MacroRules; use mbe::MacroRules;
use ra_db::{salsa, SourceDatabase}; use ra_db::{salsa, SourceDatabase};
use ra_parser::FragmentKind;
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{AstNode, Parse, SyntaxNode}; use ra_syntax::{AstNode, Parse, SyntaxNode};
@ -108,12 +109,10 @@ pub(crate) fn parse_macro(
}) })
.ok()?; .ok()?;
match macro_file.macro_file_kind { let fragment_kind = match macro_file.macro_file_kind {
MacroFileKind::Items => { MacroFileKind::Items => FragmentKind::Items,
mbe::token_tree_to_items(&tt).ok().map(|(p, map)| (p.to_syntax(), Arc::new(map))) MacroFileKind::Expr => FragmentKind::Expr,
} };
MacroFileKind::Expr => { let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?;
mbe::token_tree_to_expr(&tt).ok().map(|(p, map)| (p.to_syntax(), Arc::new(map))) Some((parse, Arc::new(rev_token_map)))
}
}
} }

View File

@ -151,19 +151,21 @@ pub struct ExpansionInfo {
impl ExpansionInfo { impl ExpansionInfo {
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> { pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> {
exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1)
}
let token_id = look_in_rev_map(&self.exp_map, from)?; let token_id = look_in_rev_map(&self.exp_map, from)?;
let (token_map, file_offset, token_id) = if token_id.0 >= self.shift {
let (token_map, (file_id, start_offset), token_id) = if token_id.0 >= self.shift {
(&self.macro_arg.1, self.arg_start, tt::TokenId(token_id.0 - self.shift).into()) (&self.macro_arg.1, self.arg_start, tt::TokenId(token_id.0 - self.shift).into())
} else { } else {
(&self.macro_def.1, self.def_start, token_id) (&self.macro_def.1, self.def_start, token_id)
}; };
let range = token_map.relative_range_of(token_id)?; let range = token_map.relative_range_of(token_id)?;
Some((file_offset.0, TextRange::offset_len(range.start() + file_offset.1, range.len())))
return Some((file_id, range + start_offset));
fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> {
exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1)
}
} }
} }

View File

@ -31,8 +31,7 @@ pub enum ExpandError {
} }
pub use crate::syntax_bridge::{ pub use crate::syntax_bridge::{
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items, ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, RevTokenMap, TokenMap,
token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, RevTokenMap, TokenMap,
}; };
/// This struct contains AST for a single `macro_rules` definition. What might /// This struct contains AST for a single `macro_rules` definition. What might

View File

@ -1,9 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_parser::{ use ra_parser::{FragmentKind, ParseError, TreeSink};
FragmentKind::{self, *},
ParseError, TreeSink,
};
use ra_syntax::{ use ra_syntax::{
ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
SyntaxTreeBuilder, TextRange, TextUnit, T, SyntaxTreeBuilder, TextRange, TextUnit, T,
@ -55,7 +52,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
// * ImplItems(SmallVec<[ast::ImplItem; 1]>) // * ImplItems(SmallVec<[ast::ImplItem; 1]>)
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]> // * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
fn fragment_to_syntax_node( pub fn token_tree_to_syntax_node(
tt: &tt::Subtree, tt: &tt::Subtree,
fragment_kind: FragmentKind, fragment_kind: FragmentKind,
) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> { ) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> {
@ -79,31 +76,6 @@ fn fragment_to_syntax_node(
Ok((parse, range_map)) Ok((parse, range_map))
} }
macro_rules! impl_token_tree_conversions {
($($(#[$attr:meta])* $name:ident => ($kind:ident, $t:ty) ),*) => {
$(
$(#[$attr])*
pub fn $name(tt: &tt::Subtree) -> Result<(Parse<$t>, RevTokenMap), ExpandError> {
let (parse, map) = fragment_to_syntax_node(tt, $kind)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
}
)*
}
}
impl_token_tree_conversions! {
/// Parses the token tree (result of macro expansion) to an expression
token_tree_to_expr => (Expr, ast::Expr),
/// Parses the token tree (result of macro expansion) to a Pattern
token_tree_to_pat => (Pattern, ast::Pat),
/// Parses the token tree (result of macro expansion) to a Type
token_tree_to_ty => (Type, ast::TypeRef),
/// Parses the token tree (result of macro expansion) as a sequence of stmts
token_tree_to_macro_stmts => (Statements, ast::MacroStmts),
/// Parses the token tree (result of macro expansion) as a sequence of items
token_tree_to_items => (Items, ast::MacroItems)
}
impl TokenMap { impl TokenMap {
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> { pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
let idx = tt.0 as usize; let idx = tt.0 as usize;
@ -446,6 +418,6 @@ macro_rules! stmts {
"#, "#,
); );
let expansion = expand(&rules, "stmts!();"); let expansion = expand(&rules, "stmts!();");
assert!(token_tree_to_expr(&expansion).is_err()); assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err());
} }
} }

View File

@ -1,3 +1,4 @@
use ra_parser::FragmentKind;
use ra_syntax::{ast, AstNode, NodeOrToken, WalkEvent}; use ra_syntax::{ast, AstNode, NodeOrToken, WalkEvent};
use test_utils::assert_eq_text; use test_utils::assert_eq_text;
@ -126,9 +127,9 @@ macro_rules! foo {
"#, "#,
); );
let expanded = expand(&rules, "foo! { 1 + 1}"); let expanded = expand(&rules, "foo! { 1 + 1}");
let tree = token_tree_to_items(&expanded).unwrap().0.tree(); let tree = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
let dump = format!("{:#?}", tree.syntax()); let dump = format!("{:#?}", tree);
assert_eq_text!( assert_eq_text!(
dump.trim(), dump.trim(),
r#"MACRO_ITEMS@[0; 15) r#"MACRO_ITEMS@[0; 15)
@ -383,9 +384,9 @@ macro_rules! structs {
", ",
); );
let expansion = expand(&rules, "structs!(Foo, Bar);"); let expansion = expand(&rules, "structs!(Foo, Bar);");
let tree = token_tree_to_items(&expansion).unwrap().0.tree(); let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Items).unwrap().0.syntax_node();
assert_eq!( assert_eq!(
format!("{:#?}", tree.syntax()).trim(), format!("{:#?}", tree).trim(),
r#" r#"
MACRO_ITEMS@[0; 40) MACRO_ITEMS@[0; 40)
STRUCT_DEF@[0; 20) STRUCT_DEF@[0; 20)
@ -501,10 +502,11 @@ macro_rules! foo {
); );
let expanded = expand(&rules, "foo!{}"); let expanded = expand(&rules, "foo!{}");
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().0.tree(); let stmts =
token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node();
assert_eq!( assert_eq!(
format!("{:#?}", stmts.syntax()).trim(), format!("{:#?}", stmts).trim(),
r#"MACRO_STMTS@[0; 15) r#"MACRO_STMTS@[0; 15)
LET_STMT@[0; 7) LET_STMT@[0; 7)
LET_KW@[0; 3) "let" LET_KW@[0; 3) "let"
@ -754,7 +756,10 @@ macro_rules! foo {
} }
"#, "#,
); );
assert_expansion(MacroKind::Items, &rules, r#" assert_expansion(
MacroKind::Items,
&rules,
r#"
foo! { foo! {
extern crate a; extern crate a;
mod b; mod b;
@ -770,7 +775,9 @@ fn h() {}
extern {} extern {}
type T = u8; type T = u8;
} }
"#, r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#); "#,
r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#,
);
} }
#[test] #[test]
@ -946,10 +953,10 @@ macro_rules! vec {
); );
let expansion = expand(&rules, r#"vec![1u32,2];"#); let expansion = expand(&rules, r#"vec![1u32,2];"#);
let tree = token_tree_to_expr(&expansion).unwrap().0.tree(); let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Expr).unwrap().0.syntax_node();
assert_eq!( assert_eq!(
format!("{:#?}", tree.syntax()).trim(), format!("{:#?}", tree).trim(),
r#"BLOCK_EXPR@[0; 45) r#"BLOCK_EXPR@[0; 45)
BLOCK@[0; 45) BLOCK@[0; 45)
L_CURLY@[0; 1) "{" L_CURLY@[0; 1) "{"
@ -1088,8 +1095,12 @@ fn foo(){}
"#, "#,
); );
assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#, assert_expansion(
"fn foo () {}"); MacroKind::Items,
&rules,
r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
"fn foo () {}",
);
} }
#[test] #[test]
@ -1171,8 +1182,12 @@ fn foo () {}
"#, "#,
); );
assert_expansion(MacroKind::Items, &rules, r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#, assert_expansion(
"fn foo () {}"); MacroKind::Items,
&rules,
r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#,
"fn foo () {}",
);
} }
#[test] #[test]
@ -1189,8 +1204,12 @@ macro_rules! __cfg_if_items {
"#, "#,
); );
assert_expansion(MacroKind::Items, &rules, r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#, assert_expansion(
"__cfg_if_items ! {(rustdoc ,) ;}"); MacroKind::Items,
&rules,
r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
"__cfg_if_items ! {(rustdoc ,) ;}",
);
} }
#[test] #[test]
@ -1233,10 +1252,13 @@ macro_rules! cfg_if {
"#, "#,
"__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}"); "__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}");
assert_expansion(MacroKind::Items, &rules, r#" assert_expansion(
MacroKind::Items,
&rules,
r#"
cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , } cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , }
"#, "#,
"" "",
); );
} }
@ -1291,10 +1313,13 @@ impl $interface {
}"#, }"#,
); );
let expanded = expand(&rules, r#" let expanded = expand(
&rules,
r#"
RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) { RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) {
fn GetDataSize(&mut self) -> UINT fn GetDataSize(&mut self) -> UINT
}}"#); }}"#,
);
assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}"); assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}");
} }
@ -1340,7 +1365,8 @@ macro_rules! quick_error {
#[test] #[test]
fn test_empty_repeat_vars_in_empty_repeat_vars() { fn test_empty_repeat_vars_in_empty_repeat_vars() {
let rules = create_rules(r#" let rules = create_rules(
r#"
macro_rules! delegate_impl { macro_rules! delegate_impl {
([$self_type:ident, $self_wrap:ty, $self_map:ident] ([$self_type:ident, $self_wrap:ty, $self_map:ident]
pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* { pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
@ -1385,9 +1411,15 @@ fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret {
} }
} }
} }
"#); "#,
);
assert_expansion(MacroKind::Items, &rules, r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#, "impl <> Data for & \'a mut G where G : Data {}"); assert_expansion(
MacroKind::Items,
&rules,
r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#,
"impl <> Data for & \'a mut G where G : Data {}",
);
} }
pub(crate) fn create_rules(macro_definition: &str) -> MacroRules { pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
@ -1436,22 +1468,30 @@ pub(crate) fn assert_expansion(
}; };
let (expanded_tree, expected_tree) = match kind { let (expanded_tree, expected_tree) = match kind {
MacroKind::Items => { MacroKind::Items => {
let expanded_tree = token_tree_to_items(&expanded).unwrap().0.tree(); let expanded_tree =
let expected_tree = token_tree_to_items(&expected).unwrap().0.tree(); token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
let expected_tree =
token_tree_to_syntax_node(&expected, FragmentKind::Items).unwrap().0.syntax_node();
( (
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(), debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
) )
} }
MacroKind::Stmts => { MacroKind::Stmts => {
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().0.tree(); let expanded_tree = token_tree_to_syntax_node(&expanded, FragmentKind::Statements)
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().0.tree(); .unwrap()
.0
.syntax_node();
let expected_tree = token_tree_to_syntax_node(&expected, FragmentKind::Statements)
.unwrap()
.0
.syntax_node();
( (
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(), debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
) )
} }
}; };