Auto merge of #45791 - eddyb:quote-unquote, r=jseyfried
Prefer libproc_macro APIs to libsyntax ones in the quasi-quoter. The shift to using `proc_macro`'s own APIs in `proc_macro::quote`, both in the implementation of the quasi-quoter and the Rust code it generates to build `TokenStream`s at runtime, greatly reduces the dependency on `libsyntax`, with the generated runtime code being completely free of it. This is a prerequirement for introducing more abstraction/indirection between `proc_macro` and compiler implementation details (mainly those from `libsyntax`), which I want to attempt. cc @alexcrichton @jseyfried @nrc
This commit is contained in:
commit
563dc5171f
@ -191,7 +191,7 @@ fn default() -> Span {
|
||||
/// This is needed to implement a custom quoter.
|
||||
#[unstable(feature = "proc_macro", issue = "38356")]
|
||||
pub fn quote_span(span: Span) -> TokenStream {
|
||||
TokenStream(quote::Quote::quote(&span.0))
|
||||
quote::Quote::quote(span)
|
||||
}
|
||||
|
||||
macro_rules! diagnostic_method {
|
||||
@ -728,7 +728,7 @@ fn to_internal(self) -> tokenstream::TokenStream {
|
||||
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
||||
#[doc(hidden)]
|
||||
pub mod __internal {
|
||||
pub use quote::{Quoter, __rt};
|
||||
pub use quote::{LiteralKind, Quoter, unquote};
|
||||
|
||||
use std::cell::Cell;
|
||||
|
||||
|
@ -11,253 +11,255 @@
|
||||
//! # Quasiquoter
|
||||
//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
|
||||
|
||||
//! This quasiquoter uses macros 2.0 hygiene to reliably use items from `__rt`,
|
||||
//! including re-exported API `libsyntax`, to build a `syntax::tokenstream::TokenStream`
|
||||
//! and wrap it into a `proc_macro::TokenStream`.
|
||||
//! This quasiquoter uses macros 2.0 hygiene to reliably access
|
||||
//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
|
||||
|
||||
use {Delimiter, Literal, Spacing, Span, Term, TokenNode, TokenStream, TokenTree};
|
||||
|
||||
use syntax::ast::Ident;
|
||||
use syntax::ext::base::{ExtCtxt, ProcMacro};
|
||||
use syntax::parse::token::{self, Token, Lit};
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream::{Delimited, TokenTree, TokenStream, TokenStreamBuilder};
|
||||
use syntax_pos::{DUMMY_SP, Span};
|
||||
use syntax_pos::hygiene::SyntaxContext;
|
||||
use syntax::parse::token;
|
||||
use syntax::tokenstream;
|
||||
|
||||
pub struct Quoter;
|
||||
|
||||
pub mod __rt {
|
||||
pub use syntax::ast::Ident;
|
||||
pub use syntax::parse::token;
|
||||
pub use syntax::symbol::Symbol;
|
||||
pub use syntax::tokenstream::{TokenStream, TokenStreamBuilder, TokenTree, Delimited};
|
||||
pub use super::{ctxt, span};
|
||||
|
||||
pub fn unquote<T: Into<::TokenStream> + Clone>(tokens: &T) -> TokenStream {
|
||||
T::into(tokens.clone()).0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ctxt() -> SyntaxContext {
|
||||
::__internal::with_sess(|(_, mark)| SyntaxContext::empty().apply_mark(mark))
|
||||
}
|
||||
|
||||
pub fn span() -> Span {
|
||||
::Span::default().0
|
||||
pub fn unquote<T: Into<TokenStream> + Clone>(tokens: &T) -> TokenStream {
|
||||
T::into(tokens.clone())
|
||||
}
|
||||
|
||||
pub trait Quote {
|
||||
fn quote(&self) -> TokenStream;
|
||||
fn quote(self) -> TokenStream;
|
||||
}
|
||||
|
||||
macro_rules! quote_tok {
|
||||
(,) => { Token::Comma };
|
||||
(.) => { Token::Dot };
|
||||
(:) => { Token::Colon };
|
||||
(::) => { Token::ModSep };
|
||||
(!) => { Token::Not };
|
||||
(<) => { Token::Lt };
|
||||
(>) => { Token::Gt };
|
||||
(_) => { Token::Underscore };
|
||||
(0) => { Token::Literal(token::Lit::Integer(Symbol::intern("0")), None) };
|
||||
(&) => { Token::BinOp(token::And) };
|
||||
($i:ident) => { Token::Ident(Ident { name: Symbol::intern(stringify!($i)), ctxt: ctxt() }) };
|
||||
(,) => { TokenNode::Op(',', Spacing::Alone) };
|
||||
(.) => { TokenNode::Op('.', Spacing::Alone) };
|
||||
(:) => { TokenNode::Op(':', Spacing::Alone) };
|
||||
(::) => {
|
||||
[
|
||||
TokenNode::Op(':', Spacing::Joint),
|
||||
TokenNode::Op(':', Spacing::Alone)
|
||||
].iter().cloned().collect::<TokenStream>()
|
||||
};
|
||||
(!) => { TokenNode::Op('!', Spacing::Alone) };
|
||||
(<) => { TokenNode::Op('<', Spacing::Alone) };
|
||||
(>) => { TokenNode::Op('>', Spacing::Alone) };
|
||||
(_) => { TokenNode::Op('_', Spacing::Alone) };
|
||||
(0) => { TokenNode::Literal(::Literal::integer(0)) };
|
||||
(&) => { TokenNode::Op('&', Spacing::Alone) };
|
||||
($i:ident) => { TokenNode::Term(Term::intern(stringify!($i))) };
|
||||
}
|
||||
|
||||
macro_rules! quote_tree {
|
||||
((unquote $($t:tt)*)) => { TokenStream::from($($t)*) };
|
||||
((unquote $($t:tt)*)) => { $($t)* };
|
||||
((quote $($t:tt)*)) => { ($($t)*).quote() };
|
||||
(($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
|
||||
([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
|
||||
({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
|
||||
(rt) => { quote!(::__internal::__rt) };
|
||||
($t:tt) => { TokenStream::from(TokenTree::Token(span(), quote_tok!($t))) };
|
||||
}
|
||||
|
||||
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
|
||||
TokenTree::Delimited(span(), Delimited { delim: delim, tts: stream.into() }).into()
|
||||
(($($t:tt)*)) => { TokenNode::Group(Delimiter::Parenthesis, quote!($($t)*)) };
|
||||
([$($t:tt)*]) => { TokenNode::Group(Delimiter::Bracket, quote!($($t)*)) };
|
||||
({$($t:tt)*}) => { TokenNode::Group(Delimiter::Brace, quote!($($t)*)) };
|
||||
($t:tt) => { quote_tok!($t) };
|
||||
}
|
||||
|
||||
macro_rules! quote {
|
||||
() => { TokenStream::empty() };
|
||||
($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
|
||||
($($t:tt)*) => {
|
||||
[
|
||||
$(TokenStream::from(quote_tree!($t)),)*
|
||||
].iter().cloned().collect::<TokenStream>()
|
||||
};
|
||||
}
|
||||
|
||||
impl ProcMacro for Quoter {
|
||||
fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, _: Span, stream: TokenStream) -> TokenStream {
|
||||
fn expand<'cx>(&self, cx: &'cx mut ExtCtxt,
|
||||
_: ::syntax_pos::Span,
|
||||
stream: tokenstream::TokenStream)
|
||||
-> tokenstream::TokenStream {
|
||||
let mut info = cx.current_expansion.mark.expn_info().unwrap();
|
||||
info.callee.allow_internal_unstable = true;
|
||||
cx.current_expansion.mark.set_expn_info(info);
|
||||
::__internal::set_sess(cx, || quote!(::TokenStream { 0: (quote stream) }))
|
||||
::__internal::set_sess(cx, || TokenStream(stream).quote().0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Quote> Quote for Option<T> {
|
||||
fn quote(&self) -> TokenStream {
|
||||
match *self {
|
||||
Some(ref t) => quote!(Some((quote t))),
|
||||
fn quote(self) -> TokenStream {
|
||||
match self {
|
||||
Some(t) => quote!(Some((quote t))),
|
||||
None => quote!(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for TokenStream {
|
||||
fn quote(&self) -> TokenStream {
|
||||
let mut builder = TokenStreamBuilder::new();
|
||||
builder.push(quote!(rt::TokenStreamBuilder::new()));
|
||||
|
||||
let mut trees = self.trees();
|
||||
loop {
|
||||
let (mut tree, mut is_joint) = match trees.next_as_stream() {
|
||||
Some(next) => next.as_tree(),
|
||||
None => return builder.add(quote!(.build())).build(),
|
||||
};
|
||||
if let TokenTree::Token(_, Token::Dollar) = tree {
|
||||
let (next_tree, next_is_joint) = match trees.next_as_stream() {
|
||||
Some(next) => next.as_tree(),
|
||||
None => panic!("unexpected trailing `$` in `quote!`"),
|
||||
};
|
||||
match next_tree {
|
||||
TokenTree::Token(_, Token::Ident(..)) => {
|
||||
builder.push(quote!(.add(rt::unquote(&(unquote next_tree)))));
|
||||
continue
|
||||
}
|
||||
TokenTree::Token(_, Token::Dollar) => {
|
||||
tree = next_tree;
|
||||
is_joint = next_is_joint;
|
||||
fn quote(self) -> TokenStream {
|
||||
if self.is_empty() {
|
||||
return quote!(::TokenStream::empty());
|
||||
}
|
||||
let mut after_dollar = false;
|
||||
let tokens = self.into_iter().filter_map(|tree| {
|
||||
if after_dollar {
|
||||
after_dollar = false;
|
||||
match tree.kind {
|
||||
TokenNode::Term(_) => {
|
||||
return Some(quote!(::__internal::unquote(&(unquote tree)),));
|
||||
}
|
||||
TokenNode::Op('$', _) => {}
|
||||
_ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
|
||||
}
|
||||
} else if let TokenNode::Op('$', _) = tree.kind {
|
||||
after_dollar = true;
|
||||
return None;
|
||||
}
|
||||
|
||||
builder.push(match is_joint {
|
||||
true => quote!(.add((quote tree).joint())),
|
||||
false => quote!(.add(rt::TokenStream::from((quote tree)))),
|
||||
});
|
||||
Some(quote!(::TokenStream::from((quote tree)),))
|
||||
}).collect::<TokenStream>();
|
||||
|
||||
if after_dollar {
|
||||
panic!("unexpected trailing `$` in `quote!`");
|
||||
}
|
||||
|
||||
quote!([(unquote tokens)].iter().cloned().collect::<::TokenStream>())
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for TokenTree {
|
||||
fn quote(&self) -> TokenStream {
|
||||
match *self {
|
||||
TokenTree::Token(span, ref token) => quote! {
|
||||
rt::TokenTree::Token((quote span), (quote token))
|
||||
},
|
||||
TokenTree::Delimited(span, ref delimited) => quote! {
|
||||
rt::TokenTree::Delimited((quote span), (quote delimited))
|
||||
},
|
||||
}
|
||||
fn quote(self) -> TokenStream {
|
||||
quote!(::TokenTree { span: (quote self.span), kind: (quote self.kind) })
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Delimited {
|
||||
fn quote(&self) -> TokenStream {
|
||||
quote!(rt::Delimited { delim: (quote self.delim), tts: (quote self.stream()).into() })
|
||||
impl Quote for TokenNode {
|
||||
fn quote(self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident($($arg:ident),+)),*) => {
|
||||
match self {
|
||||
$(TokenNode::$i($($arg),+) => quote! {
|
||||
::TokenNode::$i($((quote $arg)),+)
|
||||
},)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match! { Op(op, kind), Group(delim, tokens), Term(term), Literal(lit) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for char {
|
||||
fn quote(self) -> TokenStream {
|
||||
TokenNode::Literal(Literal::character(self)).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Quote for &'a str {
|
||||
fn quote(&self) -> TokenStream {
|
||||
TokenTree::Token(span(), Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
|
||||
.into()
|
||||
fn quote(self) -> TokenStream {
|
||||
TokenNode::Literal(Literal::string(self)).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for usize {
|
||||
fn quote(&self) -> TokenStream {
|
||||
let integer_symbol = Symbol::intern(&self.to_string());
|
||||
TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
|
||||
.into()
|
||||
fn quote(self) -> TokenStream {
|
||||
TokenNode::Literal(Literal::integer(self as i128)).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Ident {
|
||||
fn quote(&self) -> TokenStream {
|
||||
quote!(rt::Ident { name: (quote self.name), ctxt: rt::ctxt() })
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Symbol {
|
||||
fn quote(&self) -> TokenStream {
|
||||
quote!(rt::Symbol::intern((quote &*self.as_str())))
|
||||
impl Quote for Term {
|
||||
fn quote(self) -> TokenStream {
|
||||
quote!(::Term::intern((quote self.as_str())))
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Span {
|
||||
fn quote(&self) -> TokenStream {
|
||||
quote!(rt::span())
|
||||
fn quote(self) -> TokenStream {
|
||||
quote!(::Span::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Token {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*; $($t:tt)*) => {
|
||||
match *self {
|
||||
$( Token::$i => quote!(rt::token::$i), )*
|
||||
$( $t )*
|
||||
}
|
||||
}
|
||||
macro_rules! literals {
|
||||
($($i:ident),*; $($raw:ident),*) => {
|
||||
pub enum LiteralKind {
|
||||
$($i,)*
|
||||
$($raw(usize),)*
|
||||
}
|
||||
|
||||
gen_match! {
|
||||
Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
|
||||
DotDotEq, Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar,
|
||||
Question, Underscore;
|
||||
|
||||
Token::OpenDelim(delim) => quote!(rt::token::OpenDelim((quote delim))),
|
||||
Token::CloseDelim(delim) => quote!(rt::token::CloseDelim((quote delim))),
|
||||
Token::BinOp(tok) => quote!(rt::token::BinOp((quote tok))),
|
||||
Token::BinOpEq(tok) => quote!(rt::token::BinOpEq((quote tok))),
|
||||
Token::Ident(ident) => quote!(rt::token::Ident((quote ident))),
|
||||
Token::Lifetime(ident) => quote!(rt::token::Lifetime((quote ident))),
|
||||
Token::Literal(lit, sfx) => quote!(rt::token::Literal((quote lit), (quote sfx))),
|
||||
_ => panic!("Unhandled case!"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for token::BinOpToken {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*) => {
|
||||
match *self {
|
||||
$( token::BinOpToken::$i => quote!(rt::token::BinOpToken::$i), )*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Lit {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*; $($raw:ident),*) => {
|
||||
match *self {
|
||||
$( Lit::$i(lit) => quote!(rt::token::Lit::$i((quote lit))), )*
|
||||
$( Lit::$raw(lit, n) => {
|
||||
quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
|
||||
impl LiteralKind {
|
||||
pub fn with_contents_and_suffix(self, contents: Term, suffix: Option<Term>)
|
||||
-> Literal {
|
||||
let contents = contents.0;
|
||||
let suffix = suffix.map(|t| t.0);
|
||||
match self {
|
||||
$(LiteralKind::$i => {
|
||||
Literal(token::Literal(token::Lit::$i(contents), suffix))
|
||||
})*
|
||||
$(LiteralKind::$raw(n) => {
|
||||
Literal(token::Literal(token::Lit::$raw(contents, n), suffix))
|
||||
})*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
|
||||
}
|
||||
}
|
||||
impl Literal {
|
||||
fn kind_contents_and_suffix(self) -> (LiteralKind, Term, Option<Term>) {
|
||||
let (lit, suffix) = match self.0 {
|
||||
token::Literal(lit, suffix) => (lit, suffix),
|
||||
_ => panic!("unsupported literal {:?}", self.0),
|
||||
};
|
||||
|
||||
impl Quote for token::DelimToken {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*) => {
|
||||
match *self {
|
||||
$(token::DelimToken::$i => { quote!(rt::token::DelimToken::$i) })*
|
||||
let (kind, contents) = match lit {
|
||||
$(token::Lit::$i(contents) => (LiteralKind::$i, contents),)*
|
||||
$(token::Lit::$raw(contents, n) => (LiteralKind::$raw(n), contents),)*
|
||||
};
|
||||
(kind, Term(contents), suffix.map(Term))
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for LiteralKind {
|
||||
fn quote(self) -> TokenStream {
|
||||
match self {
|
||||
$(LiteralKind::$i => quote! {
|
||||
::__internal::LiteralKind::$i
|
||||
},)*
|
||||
$(LiteralKind::$raw(n) => quote! {
|
||||
::__internal::LiteralKind::$raw((quote n))
|
||||
},)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match!(Paren, Bracket, Brace, NoDelim)
|
||||
impl Quote for Literal {
|
||||
fn quote(self) -> TokenStream {
|
||||
let (kind, contents, suffix) = self.kind_contents_and_suffix();
|
||||
quote! {
|
||||
(quote kind).with_contents_and_suffix((quote contents), (quote suffix))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
literals!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw);
|
||||
|
||||
impl Quote for Delimiter {
|
||||
fn quote(self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*) => {
|
||||
match self {
|
||||
$(Delimiter::$i => { quote!(::Delimiter::$i) })*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match!(Parenthesis, Brace, Bracket, None)
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Spacing {
|
||||
fn quote(self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*) => {
|
||||
match self {
|
||||
$(Spacing::$i => { quote!(::Spacing::$i) })*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match!(Alone, Joint)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user