2020-01-16 00:47:03 +02:00
|
|
|
//! This module generates AST datatype used by rust-analyzer.
|
2019-10-23 18:13:40 +03:00
|
|
|
//!
|
|
|
|
//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
|
|
|
|
//! wrappers around `SyntaxNode` which implement `ra_syntax::AstNode`.
|
|
|
|
|
2020-04-10 15:54:05 +02:00
|
|
|
use std::collections::HashSet;
|
2020-04-09 15:47:48 +02:00
|
|
|
|
2019-10-23 18:13:40 +03:00
|
|
|
use proc_macro2::{Punct, Spacing};
|
|
|
|
use quote::{format_ident, quote};
|
|
|
|
|
|
|
|
use crate::{
|
2020-04-10 10:07:09 +02:00
|
|
|
ast_src::{AstSrc, Field, FieldSrc, KindsSrc, AST_SRC, KINDS_SRC},
|
2019-10-23 18:13:40 +03:00
|
|
|
codegen::{self, update, Mode},
|
|
|
|
project_root, Result,
|
|
|
|
};
|
|
|
|
|
|
|
|
pub fn generate_syntax(mode: Mode) -> Result<()> {
|
|
|
|
let syntax_kinds_file = project_root().join(codegen::SYNTAX_KINDS);
|
2020-01-03 20:37:02 +01:00
|
|
|
let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?;
|
2019-10-23 18:13:40 +03:00
|
|
|
update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?;
|
|
|
|
|
2020-04-10 15:53:09 +02:00
|
|
|
let ast_tokens_file = project_root().join(codegen::AST_TOKENS);
|
2020-04-10 15:54:05 +02:00
|
|
|
let contents = generate_tokens(AST_SRC)?;
|
2020-04-10 15:53:09 +02:00
|
|
|
update(ast_tokens_file.as_path(), &contents, mode)?;
|
|
|
|
|
2020-04-09 16:25:06 +02:00
|
|
|
let ast_nodes_file = project_root().join(codegen::AST_NODES);
|
2020-04-09 17:47:46 +02:00
|
|
|
let contents = generate_nodes(KINDS_SRC, AST_SRC)?;
|
2020-04-09 16:25:06 +02:00
|
|
|
update(ast_nodes_file.as_path(), &contents, mode)?;
|
|
|
|
|
2019-10-23 18:13:40 +03:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-04-10 15:54:05 +02:00
|
|
|
fn generate_tokens(grammar: AstSrc<'_>) -> Result<String> {
|
2020-04-10 15:53:09 +02:00
|
|
|
let tokens = grammar.tokens.iter().map(|token| {
|
|
|
|
let name = format_ident!("{}", token);
|
|
|
|
let kind = format_ident!("{}", to_upper_snake_case(token));
|
|
|
|
quote! {
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
|
|
|
pub struct #name {
|
|
|
|
pub(crate) syntax: SyntaxToken,
|
|
|
|
}
|
|
|
|
impl std::fmt::Display for #name {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
|
|
std::fmt::Display::fmt(&self.syntax, f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl AstToken for #name {
|
|
|
|
fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
|
|
|
|
fn cast(syntax: SyntaxToken) -> Option<Self> {
|
|
|
|
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
|
|
|
|
}
|
|
|
|
fn syntax(&self) -> &SyntaxToken { &self.syntax }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let pretty = crate::reformat(quote! {
|
|
|
|
use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
|
|
|
|
#(#tokens)*
|
2020-04-10 16:00:24 +02:00
|
|
|
})?
|
|
|
|
.replace("#[derive", "\n#[derive");
|
2020-04-10 15:53:09 +02:00
|
|
|
Ok(pretty)
|
|
|
|
}
|
|
|
|
|
2020-04-09 17:47:46 +02:00
|
|
|
fn generate_nodes(kinds: KindsSrc<'_>, grammar: AstSrc<'_>) -> Result<String> {
|
2020-01-03 20:37:02 +01:00
|
|
|
let nodes = grammar.nodes.iter().map(|node| {
|
|
|
|
let name = format_ident!("{}", node.name);
|
|
|
|
let kind = format_ident!("{}", to_upper_snake_case(&name.to_string()));
|
|
|
|
let traits = node.traits.iter().map(|trait_name| {
|
|
|
|
let trait_name = format_ident!("{}", trait_name);
|
|
|
|
quote!(impl ast::#trait_name for #name {})
|
|
|
|
});
|
2019-10-23 18:13:40 +03:00
|
|
|
|
2020-04-10 10:07:09 +02:00
|
|
|
let methods = node.fields.iter().map(|field| {
|
|
|
|
let method_name = field.method_name();
|
|
|
|
let ty = field.ty();
|
2020-01-03 20:37:02 +01:00
|
|
|
|
2020-04-10 10:07:09 +02:00
|
|
|
if field.is_many() {
|
|
|
|
quote! {
|
|
|
|
pub fn #method_name(&self) -> AstChildren<#ty> {
|
|
|
|
support::children(&self.syntax)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if let Some(token_kind) = field.token_kind() {
|
2020-01-03 20:37:02 +01:00
|
|
|
quote! {
|
2020-04-10 10:07:09 +02:00
|
|
|
pub fn #method_name(&self) -> Option<#ty> {
|
2020-04-10 15:54:05 +02:00
|
|
|
support::token(&self.syntax, #token_kind)
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
|
|
|
}
|
2020-04-10 10:07:09 +02:00
|
|
|
} else {
|
2020-04-10 11:49:13 +02:00
|
|
|
quote! {
|
|
|
|
pub fn #method_name(&self) -> Option<#ty> {
|
|
|
|
support::child(&self.syntax)
|
2020-01-03 20:37:02 +01:00
|
|
|
}
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-01-03 20:37:02 +01:00
|
|
|
});
|
2019-10-23 18:13:40 +03:00
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
quote! {
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
|
|
|
pub struct #name {
|
|
|
|
pub(crate) syntax: SyntaxNode,
|
|
|
|
}
|
2019-10-23 18:13:40 +03:00
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
impl AstNode for #name {
|
|
|
|
fn can_cast(kind: SyntaxKind) -> bool {
|
2020-04-09 18:08:54 +02:00
|
|
|
kind == #kind
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
2020-04-09 10:47:05 +02:00
|
|
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
|
|
|
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
|
2020-01-03 20:37:02 +01:00
|
|
|
}
|
|
|
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
2020-04-03 21:12:08 +02:00
|
|
|
}
|
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
#(#traits)*
|
|
|
|
|
|
|
|
impl #name {
|
|
|
|
#(#methods)*
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
2019-10-23 18:13:40 +03:00
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
let enums = grammar.enums.iter().map(|en| {
|
|
|
|
let variants = en.variants.iter().map(|var| format_ident!("{}", var)).collect::<Vec<_>>();
|
|
|
|
let name = format_ident!("{}", en.name);
|
2020-04-09 10:47:05 +02:00
|
|
|
let kinds = variants
|
2020-01-03 20:37:02 +01:00
|
|
|
.iter()
|
2020-04-09 10:47:05 +02:00
|
|
|
.map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
|
2020-01-03 20:37:02 +01:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
let traits = en.traits.iter().map(|trait_name| {
|
2019-10-23 18:13:40 +03:00
|
|
|
let trait_name = format_ident!("{}", trait_name);
|
|
|
|
quote!(impl ast::#trait_name for #name {})
|
|
|
|
});
|
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
quote! {
|
|
|
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
|
|
|
pub enum #name {
|
|
|
|
#(#variants(#variants),)*
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
#(
|
|
|
|
impl From<#variants> for #name {
|
|
|
|
fn from(node: #variants) -> #name {
|
|
|
|
#name::#variants(node)
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
|
|
|
}
|
2020-01-03 20:37:02 +01:00
|
|
|
)*
|
2019-10-23 18:13:40 +03:00
|
|
|
|
2020-04-09 17:58:15 +02:00
|
|
|
impl AstNode for #name {
|
|
|
|
fn can_cast(kind: SyntaxKind) -> bool {
|
|
|
|
match kind {
|
|
|
|
#(#kinds)|* => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
|
|
|
let res = match syntax.kind() {
|
|
|
|
#(
|
|
|
|
#kinds => #name::#variants(#variants { syntax }),
|
|
|
|
)*
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
Some(res)
|
|
|
|
}
|
|
|
|
fn syntax(&self) -> &SyntaxNode {
|
|
|
|
match self {
|
|
|
|
#(
|
|
|
|
#name::#variants(it) => &it.syntax,
|
|
|
|
)*
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-04-09 13:00:09 +02:00
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
#(#traits)*
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2020-04-09 18:11:16 +02:00
|
|
|
let displays = grammar
|
|
|
|
.enums
|
|
|
|
.iter()
|
|
|
|
.map(|it| format_ident!("{}", it.name))
|
|
|
|
.chain(grammar.nodes.iter().map(|it| format_ident!("{}", it.name)))
|
|
|
|
.map(|name| {
|
|
|
|
quote! {
|
|
|
|
impl std::fmt::Display for #name {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
|
|
std::fmt::Display::fmt(self.syntax(), f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2020-04-03 21:12:08 +02:00
|
|
|
let defined_nodes: HashSet<_> = grammar.nodes.iter().map(|node| node.name).collect();
|
|
|
|
|
|
|
|
for node in kinds
|
|
|
|
.nodes
|
|
|
|
.iter()
|
|
|
|
.map(|kind| to_pascal_case(*kind))
|
|
|
|
.filter(|name| !defined_nodes.contains(&**name))
|
|
|
|
{
|
|
|
|
eprintln!("Warning: node {} not defined in ast source", node);
|
|
|
|
}
|
|
|
|
|
2019-10-23 18:13:40 +03:00
|
|
|
let ast = quote! {
|
|
|
|
use crate::{
|
2020-04-09 23:35:05 +02:00
|
|
|
SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
|
2020-04-09 17:58:15 +02:00
|
|
|
ast::{self, AstNode, AstChildren, support},
|
2020-04-10 10:07:09 +02:00
|
|
|
T,
|
2019-10-23 18:13:40 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
#(#nodes)*
|
2020-01-03 20:37:02 +01:00
|
|
|
#(#enums)*
|
2020-04-09 18:11:16 +02:00
|
|
|
#(#displays)*
|
2019-10-23 18:13:40 +03:00
|
|
|
};
|
|
|
|
|
2020-04-10 10:18:43 +02:00
|
|
|
let ast = ast.to_string().replace("T ! [ ", "T![").replace(" ] )", "])");
|
2020-04-10 16:00:24 +02:00
|
|
|
let pretty = crate::reformat(ast)?.replace("#[derive", "\n#[derive");
|
2019-10-23 18:13:40 +03:00
|
|
|
Ok(pretty)
|
|
|
|
}
|
|
|
|
|
2020-01-03 20:37:02 +01:00
|
|
|
fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
|
2019-10-23 18:13:40 +03:00
|
|
|
let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
|
|
|
|
.punct
|
|
|
|
.iter()
|
|
|
|
.filter(|(token, _name)| token.len() == 1)
|
|
|
|
.map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
|
|
|
|
.unzip();
|
|
|
|
|
|
|
|
let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
|
|
|
|
if "{}[]()".contains(token) {
|
|
|
|
let c = token.chars().next().unwrap();
|
|
|
|
quote! { #c }
|
|
|
|
} else {
|
|
|
|
let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
|
|
|
|
quote! { #(#cs)* }
|
|
|
|
}
|
|
|
|
});
|
|
|
|
let punctuation =
|
|
|
|
grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let full_keywords_values = &grammar.keywords;
|
|
|
|
let full_keywords =
|
|
|
|
full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw)));
|
|
|
|
|
|
|
|
let all_keywords_values =
|
|
|
|
grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
|
|
|
|
let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
|
|
|
|
let all_keywords = all_keywords_values
|
|
|
|
.iter()
|
|
|
|
.map(|name| format_ident!("{}_KW", to_upper_snake_case(&name)))
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let literals =
|
|
|
|
grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let ast = quote! {
|
|
|
|
#![allow(bad_style, missing_docs, unreachable_pub)]
|
|
|
|
/// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`.
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
|
|
|
#[repr(u16)]
|
|
|
|
pub enum SyntaxKind {
|
|
|
|
// Technical SyntaxKinds: they appear temporally during parsing,
|
|
|
|
// but never end up in the final tree
|
|
|
|
#[doc(hidden)]
|
|
|
|
TOMBSTONE,
|
|
|
|
#[doc(hidden)]
|
|
|
|
EOF,
|
|
|
|
#(#punctuation,)*
|
|
|
|
#(#all_keywords,)*
|
|
|
|
#(#literals,)*
|
|
|
|
#(#tokens,)*
|
|
|
|
#(#nodes,)*
|
|
|
|
|
|
|
|
// Technical kind so that we can cast from u16 safely
|
|
|
|
#[doc(hidden)]
|
|
|
|
__LAST,
|
|
|
|
}
|
|
|
|
use self::SyntaxKind::*;
|
|
|
|
|
|
|
|
impl SyntaxKind {
|
|
|
|
pub fn is_keyword(self) -> bool {
|
|
|
|
match self {
|
|
|
|
#(#all_keywords)|* => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_punct(self) -> bool {
|
|
|
|
match self {
|
|
|
|
#(#punctuation)|* => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_literal(self) -> bool {
|
|
|
|
match self {
|
|
|
|
#(#literals)|* => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
|
|
|
|
let kw = match ident {
|
|
|
|
#(#full_keywords_values => #full_keywords,)*
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
Some(kw)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_char(c: char) -> Option<SyntaxKind> {
|
|
|
|
let tok = match c {
|
|
|
|
#(#single_byte_tokens_values => #single_byte_tokens,)*
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
Some(tok)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[macro_export]
|
|
|
|
macro_rules! T {
|
2020-04-10 17:06:57 +02:00
|
|
|
#([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
|
|
|
|
#([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
|
|
|
|
[lifetime] => { $crate::SyntaxKind::LIFETIME };
|
|
|
|
[ident] => { $crate::SyntaxKind::IDENT };
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-01-10 11:23:11 +01:00
|
|
|
crate::reformat(ast)
|
2019-10-23 18:13:40 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
fn to_upper_snake_case(s: &str) -> String {
|
|
|
|
let mut buf = String::with_capacity(s.len());
|
2020-04-03 21:12:08 +02:00
|
|
|
let mut prev = false;
|
2019-10-23 18:13:40 +03:00
|
|
|
for c in s.chars() {
|
2020-04-03 21:12:08 +02:00
|
|
|
if c.is_ascii_uppercase() && prev {
|
2019-10-23 18:13:40 +03:00
|
|
|
buf.push('_')
|
|
|
|
}
|
2020-04-03 21:12:08 +02:00
|
|
|
prev = true;
|
2019-10-23 18:13:40 +03:00
|
|
|
|
|
|
|
buf.push(c.to_ascii_uppercase());
|
|
|
|
}
|
|
|
|
buf
|
|
|
|
}
|
|
|
|
|
|
|
|
fn to_lower_snake_case(s: &str) -> String {
|
|
|
|
let mut buf = String::with_capacity(s.len());
|
2020-04-03 21:12:08 +02:00
|
|
|
let mut prev = false;
|
2019-10-23 18:13:40 +03:00
|
|
|
for c in s.chars() {
|
2020-04-03 21:12:08 +02:00
|
|
|
if c.is_ascii_uppercase() && prev {
|
2019-10-23 18:13:40 +03:00
|
|
|
buf.push('_')
|
|
|
|
}
|
2020-04-03 21:12:08 +02:00
|
|
|
prev = true;
|
2019-10-23 18:13:40 +03:00
|
|
|
|
|
|
|
buf.push(c.to_ascii_lowercase());
|
|
|
|
}
|
|
|
|
buf
|
|
|
|
}
|
2020-04-03 21:12:08 +02:00
|
|
|
|
|
|
|
fn to_pascal_case(s: &str) -> String {
|
|
|
|
let mut buf = String::with_capacity(s.len());
|
|
|
|
let mut prev_is_underscore = true;
|
|
|
|
for c in s.chars() {
|
|
|
|
if c == '_' {
|
|
|
|
prev_is_underscore = true;
|
|
|
|
} else if prev_is_underscore {
|
|
|
|
buf.push(c.to_ascii_uppercase());
|
|
|
|
prev_is_underscore = false;
|
|
|
|
} else {
|
|
|
|
buf.push(c.to_ascii_lowercase());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
buf
|
|
|
|
}
|
2020-04-10 10:07:09 +02:00
|
|
|
|
|
|
|
impl Field<'_> {
|
|
|
|
fn is_many(&self) -> bool {
|
|
|
|
match self {
|
|
|
|
Field::Node { src: FieldSrc::Many(_), .. } => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
|
|
|
|
let res = match self {
|
|
|
|
Field::Token(token) => {
|
2020-04-10 10:11:05 +02:00
|
|
|
let token: proc_macro2::TokenStream = token.parse().unwrap();
|
2020-04-10 10:07:09 +02:00
|
|
|
quote! { T![#token] }
|
|
|
|
}
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
Some(res)
|
|
|
|
}
|
|
|
|
fn method_name(&self) -> proc_macro2::Ident {
|
|
|
|
match self {
|
2020-04-10 10:11:05 +02:00
|
|
|
Field::Token(name) => {
|
|
|
|
let name = match *name {
|
|
|
|
";" => "semicolon",
|
2020-04-10 10:18:43 +02:00
|
|
|
"->" => "thin_arrow",
|
|
|
|
"'{'" => "l_curly",
|
|
|
|
"'}'" => "r_curly",
|
2020-04-10 10:29:59 +02:00
|
|
|
"'('" => "l_paren",
|
|
|
|
"')'" => "r_paren",
|
|
|
|
"'['" => "l_brack",
|
|
|
|
"']'" => "r_brack",
|
2020-04-10 11:49:13 +02:00
|
|
|
"<" => "l_angle",
|
|
|
|
">" => "r_angle",
|
2020-04-10 10:35:39 +02:00
|
|
|
"=" => "eq",
|
|
|
|
"!" => "excl",
|
2020-04-10 11:49:13 +02:00
|
|
|
"*" => "star",
|
|
|
|
"&" => "amp",
|
|
|
|
"_" => "underscore",
|
|
|
|
"." => "dot",
|
|
|
|
".." => "dotdot",
|
|
|
|
"..." => "dotdotdot",
|
|
|
|
"=>" => "fat_arrow",
|
|
|
|
"@" => "at",
|
|
|
|
":" => "colon",
|
|
|
|
"::" => "coloncolon",
|
|
|
|
"#" => "pound",
|
2020-04-10 10:11:05 +02:00
|
|
|
_ => name,
|
|
|
|
};
|
|
|
|
format_ident!("{}_token", name)
|
|
|
|
}
|
2020-04-10 10:07:09 +02:00
|
|
|
Field::Node { name, src } => match src {
|
|
|
|
FieldSrc::Shorthand => format_ident!("{}", to_lower_snake_case(name)),
|
|
|
|
_ => format_ident!("{}", name),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn ty(&self) -> proc_macro2::Ident {
|
|
|
|
match self {
|
|
|
|
Field::Token(_) => format_ident!("SyntaxToken"),
|
|
|
|
Field::Node { name, src } => match src {
|
|
|
|
FieldSrc::Optional(ty) | FieldSrc::Many(ty) => format_ident!("{}", ty),
|
|
|
|
FieldSrc::Shorthand => format_ident!("{}", name),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|