2016-08-04 14:20:01 -05:00
|
|
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
//! A support library for macro authors when defining new macros.
|
2016-08-04 14:20:01 -05:00
|
|
|
//!
|
2016-10-03 11:49:39 -05:00
|
|
|
//! This library, provided by the standard distribution, provides the types
|
|
|
|
//! consumed in the interfaces of procedurally defined macro definitions.
|
|
|
|
//! Currently the primary use of this crate is to provide the ability to define
|
|
|
|
//! new custom derive modes through `#[proc_macro_derive]`.
|
2016-08-04 14:20:01 -05:00
|
|
|
//!
|
2016-10-03 11:49:39 -05:00
|
|
|
//! Note that this crate is intentionally very bare-bones currently. The main
|
|
|
|
//! type, `TokenStream`, only supports `fmt::Display` and `FromStr`
|
|
|
|
//! implementations, indicating that it can only go to and come from a string.
|
|
|
|
//! This functionality is intended to be expanded over time as more surface
|
|
|
|
//! area for macro authors is stabilized.
|
2017-01-01 19:33:37 -06:00
|
|
|
//!
|
2017-06-12 11:30:08 -05:00
|
|
|
//! See [the book](../book/first-edition/procedural-macros.html) for more.
|
2016-08-04 14:20:01 -05:00
|
|
|
|
|
|
|
#![crate_name = "proc_macro"]
|
2017-01-01 18:14:35 -06:00
|
|
|
#![stable(feature = "proc_macro_lib", since = "1.15.0")]
|
2016-08-04 14:20:01 -05:00
|
|
|
#![crate_type = "rlib"]
|
2016-10-03 11:49:39 -05:00
|
|
|
#![crate_type = "dylib"]
|
2016-12-29 11:47:34 -06:00
|
|
|
#![deny(warnings)]
|
2016-10-03 11:49:39 -05:00
|
|
|
#![deny(missing_docs)]
|
2017-03-11 20:54:43 -06:00
|
|
|
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
|
|
|
|
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
|
|
|
|
html_root_url = "https://doc.rust-lang.org/nightly/",
|
|
|
|
html_playground_url = "https://play.rust-lang.org/",
|
|
|
|
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
|
|
|
|
test(no_crate_inject, attr(deny(warnings))),
|
|
|
|
test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
#![feature(i128_type)]
|
2016-08-04 14:20:01 -05:00
|
|
|
#![feature(rustc_private)]
|
2016-10-03 11:49:39 -05:00
|
|
|
#![feature(staged_api)]
|
|
|
|
#![feature(lang_items)]
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2017-03-28 20:55:01 -05:00
|
|
|
#[macro_use]
|
2016-08-04 14:20:01 -05:00
|
|
|
extern crate syntax;
|
2017-03-17 18:41:09 -05:00
|
|
|
extern crate syntax_pos;
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
use std::{ascii, fmt, iter};
|
2016-10-03 11:49:39 -05:00
|
|
|
use std::str::FromStr;
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
use syntax::ast;
|
2017-01-07 23:04:49 -06:00
|
|
|
use syntax::errors::DiagnosticBuilder;
|
2017-03-28 20:55:01 -05:00
|
|
|
use syntax::parse::{self, token, parse_stream_from_source_str};
|
|
|
|
use syntax::print::pprust;
|
2017-06-04 20:41:33 -05:00
|
|
|
use syntax::symbol::Symbol;
|
2017-03-17 18:23:12 -05:00
|
|
|
use syntax::tokenstream;
|
2017-03-17 18:41:09 -05:00
|
|
|
use syntax_pos::DUMMY_SP;
|
|
|
|
use syntax_pos::SyntaxContext;
|
2017-07-11 02:52:50 -05:00
|
|
|
use syntax_pos::hygiene::Mark;
|
2016-10-03 11:49:39 -05:00
|
|
|
|
|
|
|
/// The main type provided by this crate, representing an abstract stream of
|
|
|
|
/// tokens.
|
|
|
|
///
|
|
|
|
/// This is both the input and output of `#[proc_macro_derive]` definitions.
|
|
|
|
/// Currently it's required to be a list of valid Rust items, but this
|
|
|
|
/// restriction may be lifted in the future.
|
|
|
|
///
|
|
|
|
/// The API of this type is intentionally bare-bones, but it'll be expanded over
|
|
|
|
/// time!
|
2017-01-01 18:14:35 -06:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Clone, Debug)]
|
2017-03-17 18:23:12 -05:00
|
|
|
pub struct TokenStream(tokenstream::TokenStream);
|
2016-10-03 11:49:39 -05:00
|
|
|
|
|
|
|
/// Error returned from `TokenStream::from_str`.
|
2017-01-01 18:14:35 -06:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Debug)]
|
2016-10-03 11:49:39 -05:00
|
|
|
pub struct LexError {
|
|
|
|
_inner: (),
|
|
|
|
}
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
|
|
|
impl FromStr for TokenStream {
|
|
|
|
type Err = LexError;
|
|
|
|
|
|
|
|
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
|
|
|
__internal::with_sess(|(sess, mark)| {
|
|
|
|
let src = src.to_string();
|
|
|
|
let name = "<proc-macro source code>".to_string();
|
2017-07-11 02:52:50 -05:00
|
|
|
let expn_info = mark.expn_info().unwrap();
|
|
|
|
let call_site = expn_info.call_site;
|
|
|
|
// notify the expansion info that it is unhygienic
|
|
|
|
let mark = Mark::fresh(mark);
|
|
|
|
mark.set_expn_info(expn_info);
|
|
|
|
let span = syntax_pos::Span {
|
|
|
|
ctxt: SyntaxContext::empty().apply_mark(mark),
|
|
|
|
..call_site
|
|
|
|
};
|
|
|
|
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span));
|
2017-03-17 18:41:09 -05:00
|
|
|
Ok(__internal::token_stream_wrap(stream))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
|
|
|
impl fmt::Display for TokenStream {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
self.0.fmt(f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
|
|
|
|
/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
|
|
|
|
/// constructs the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
|
|
|
|
///
|
|
|
|
/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
|
|
|
|
/// To quote `$` itself, use `$$`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
#[macro_export]
|
|
|
|
macro_rules! quote { () => {} }
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
|
|
|
#[doc(hidden)]
|
|
|
|
mod quote;
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl From<TokenTree> for TokenStream {
|
|
|
|
fn from(tree: TokenTree) -> TokenStream {
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenStream(tree.to_internal())
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
impl From<TokenNode> for TokenStream {
|
|
|
|
fn from(kind: TokenNode) -> TokenStream {
|
2017-03-17 18:41:09 -05:00
|
|
|
TokenTree::from(kind).into()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
|
|
|
|
fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
|
2017-06-04 20:41:33 -05:00
|
|
|
let mut builder = tokenstream::TokenStreamBuilder::new();
|
2017-03-17 18:41:09 -05:00
|
|
|
for stream in streams {
|
|
|
|
builder.push(stream.into().0);
|
|
|
|
}
|
|
|
|
TokenStream(builder.build())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl IntoIterator for TokenStream {
|
|
|
|
type Item = TokenTree;
|
2017-06-04 20:41:33 -05:00
|
|
|
type IntoIter = TokenTreeIter;
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
fn into_iter(self) -> TokenTreeIter {
|
|
|
|
TokenTreeIter { cursor: self.0.trees(), next: None }
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TokenStream {
|
|
|
|
/// Returns an empty `TokenStream`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn empty() -> TokenStream {
|
|
|
|
TokenStream(tokenstream::TokenStream::empty())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Checks if this `TokenStream` is empty.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
|
|
self.0.is_empty()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A region of source code, along with macro expansion information.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
pub struct Span(syntax_pos::Span);
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl Default for Span {
|
|
|
|
fn default() -> Span {
|
|
|
|
::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span {
|
|
|
|
ctxt: SyntaxContext::empty().apply_mark(mark),
|
|
|
|
..mark.expn_info().unwrap().call_site
|
|
|
|
}))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
/// Quote a `Span` into a `TokenStream`.
|
|
|
|
/// This is needed to implement a custom quoter.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn quote_span(span: Span) -> TokenStream {
|
|
|
|
TokenStream(quote::Quote::quote(&span.0))
|
|
|
|
}
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
impl Span {
|
|
|
|
/// The span of the invocation of the current procedural macro.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn call_site() -> Span {
|
|
|
|
::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
pub struct TokenTree {
|
|
|
|
/// The `TokenTree`'s span
|
|
|
|
pub span: Span,
|
|
|
|
/// Description of the `TokenTree`
|
2017-06-04 20:41:33 -05:00
|
|
|
pub kind: TokenNode,
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
impl From<TokenNode> for TokenTree {
|
|
|
|
fn from(kind: TokenNode) -> TokenTree {
|
2017-03-17 18:41:09 -05:00
|
|
|
TokenTree { span: Span::default(), kind: kind }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl fmt::Display for TokenTree {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
TokenStream::from(self.clone()).fmt(f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Description of a `TokenTree`
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
pub enum TokenNode {
|
2017-03-17 18:41:09 -05:00
|
|
|
/// A delimited tokenstream.
|
2017-06-04 20:41:33 -05:00
|
|
|
Group(Delimiter, TokenStream),
|
2017-03-17 18:41:09 -05:00
|
|
|
/// A unicode identifier.
|
2017-06-04 20:41:33 -05:00
|
|
|
Term(Term),
|
2017-03-17 18:41:09 -05:00
|
|
|
/// A punctuation character (`+`, `,`, `$`, etc.).
|
2017-06-04 20:41:33 -05:00
|
|
|
Op(char, Spacing),
|
2017-03-17 18:41:09 -05:00
|
|
|
/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
|
|
|
|
Literal(Literal),
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Describes how a sequence of token trees is delimited.
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub enum Delimiter {
|
|
|
|
/// `( ... )`
|
|
|
|
Parenthesis,
|
|
|
|
/// `[ ... ]`
|
|
|
|
Brace,
|
|
|
|
/// `{ ... }`
|
|
|
|
Bracket,
|
|
|
|
/// An implicit delimiter, e.g. `$var`, where $var is `...`.
|
|
|
|
None,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// An interned string.
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
pub struct Term(Symbol);
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
impl Term {
|
|
|
|
/// Intern a string into a `Term`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn intern(string: &str) -> Term {
|
|
|
|
Term(Symbol::intern(string))
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
/// Get a reference to the interned string.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn as_str(&self) -> &str {
|
|
|
|
unsafe { &*(&*self.0.as_str() as *const str) }
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
pub enum Spacing {
|
2017-03-17 18:41:09 -05:00
|
|
|
/// e.g. `+` is `Alone` in `+ =`.
|
|
|
|
Alone,
|
|
|
|
/// e.g. `+` is `Joint` in `+=`.
|
|
|
|
Joint,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub struct Literal(token::Token);
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl fmt::Display for Literal {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f)
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! int_literals {
|
|
|
|
($($int_kind:ident),*) => {$(
|
|
|
|
/// Integer literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn $int_kind(n: $int_kind) -> Literal {
|
2017-06-04 20:41:33 -05:00
|
|
|
Literal::typed_integer(n as i128, stringify!($int_kind))
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
)*}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Literal {
|
2017-06-04 20:41:33 -05:00
|
|
|
/// Integer literal
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn integer(n: i128) -> Literal {
|
|
|
|
Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), None))
|
|
|
|
}
|
|
|
|
|
2017-07-06 17:20:01 -05:00
|
|
|
int_literals!(u8, i8, u16, i16, u32, i32, u64, i64, usize, isize);
|
2017-06-04 20:41:33 -05:00
|
|
|
fn typed_integer(n: i128, kind: &'static str) -> Literal {
|
|
|
|
Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())),
|
|
|
|
Some(Symbol::intern(kind))))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Floating point literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn float(n: f64) -> Literal {
|
|
|
|
if !n.is_finite() {
|
|
|
|
panic!("Invalid float literal {}", n);
|
|
|
|
}
|
|
|
|
Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), None))
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Floating point literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn f32(n: f32) -> Literal {
|
2017-06-04 20:41:33 -05:00
|
|
|
if !n.is_finite() {
|
|
|
|
panic!("Invalid f32 literal {}", n);
|
|
|
|
}
|
|
|
|
Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
|
|
|
|
Some(Symbol::intern("f32"))))
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Floating point literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
pub fn f64(n: f64) -> Literal {
|
|
|
|
if !n.is_finite() {
|
|
|
|
panic!("Invalid f64 literal {}", n);
|
|
|
|
}
|
|
|
|
Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
|
|
|
|
Some(Symbol::intern("f64"))))
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// String literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn string(string: &str) -> Literal {
|
|
|
|
let mut escaped = String::new();
|
|
|
|
for ch in string.chars() {
|
|
|
|
escaped.extend(ch.escape_unicode());
|
|
|
|
}
|
2017-06-04 20:41:33 -05:00
|
|
|
Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None))
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Character literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn character(ch: char) -> Literal {
|
|
|
|
let mut escaped = String::new();
|
|
|
|
escaped.extend(ch.escape_unicode());
|
2017-06-04 20:41:33 -05:00
|
|
|
Literal(token::Literal(token::Lit::Char(Symbol::intern(&escaped)), None))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Byte string literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn byte_string(bytes: &[u8]) -> Literal {
|
|
|
|
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
|
|
|
|
.map(Into::<char>::into).collect::<String>();
|
|
|
|
Literal(token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None))
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// An iterator over `TokenTree`s.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
pub struct TokenTreeIter {
|
2017-03-17 18:41:09 -05:00
|
|
|
cursor: tokenstream::Cursor,
|
|
|
|
next: Option<tokenstream::TokenStream>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
impl Iterator for TokenTreeIter {
|
2017-03-17 18:41:09 -05:00
|
|
|
type Item = TokenTree;
|
|
|
|
|
|
|
|
fn next(&mut self) -> Option<TokenTree> {
|
2017-03-28 20:55:01 -05:00
|
|
|
loop {
|
|
|
|
let next =
|
|
|
|
unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None);
|
2017-06-04 20:41:33 -05:00
|
|
|
let tree = TokenTree::from_internal(next, &mut self.next);
|
2017-03-28 20:55:01 -05:00
|
|
|
if tree.span.0 == DUMMY_SP {
|
2017-06-04 20:41:33 -05:00
|
|
|
if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
|
2017-03-28 20:55:01 -05:00
|
|
|
self.cursor.insert(stream.0);
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Some(tree);
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Delimiter {
|
2017-06-04 20:41:33 -05:00
|
|
|
fn from_internal(delim: token::DelimToken) -> Delimiter {
|
2017-03-17 18:41:09 -05:00
|
|
|
match delim {
|
|
|
|
token::Paren => Delimiter::Parenthesis,
|
|
|
|
token::Brace => Delimiter::Brace,
|
|
|
|
token::Bracket => Delimiter::Bracket,
|
|
|
|
token::NoDelim => Delimiter::None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
fn to_internal(self) -> token::DelimToken {
|
2017-03-17 18:41:09 -05:00
|
|
|
match self {
|
|
|
|
Delimiter::Parenthesis => token::Paren,
|
|
|
|
Delimiter::Brace => token::Brace,
|
|
|
|
Delimiter::Bracket => token::Bracket,
|
|
|
|
Delimiter::None => token::NoDelim,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TokenTree {
|
2017-06-04 20:41:33 -05:00
|
|
|
fn from_internal(stream: tokenstream::TokenStream, next: &mut Option<tokenstream::TokenStream>)
|
2017-03-17 18:41:09 -05:00
|
|
|
-> TokenTree {
|
|
|
|
use syntax::parse::token::*;
|
|
|
|
|
|
|
|
let (tree, is_joint) = stream.as_tree();
|
|
|
|
let (mut span, token) = match tree {
|
|
|
|
tokenstream::TokenTree::Token(span, token) => (span, token),
|
|
|
|
tokenstream::TokenTree::Delimited(span, delimed) => {
|
2017-06-04 20:41:33 -05:00
|
|
|
let delimiter = Delimiter::from_internal(delimed.delim);
|
2017-03-17 18:41:09 -05:00
|
|
|
return TokenTree {
|
|
|
|
span: Span(span),
|
2017-06-04 20:41:33 -05:00
|
|
|
kind: TokenNode::Group(delimiter, TokenStream(delimed.tts.into())),
|
2017-03-17 18:41:09 -05:00
|
|
|
};
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
|
2017-03-17 18:41:09 -05:00
|
|
|
macro_rules! op {
|
2017-06-04 20:41:33 -05:00
|
|
|
($op:expr) => { TokenNode::Op($op, op_kind) }
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! joint {
|
|
|
|
($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span,
|
|
|
|
next: &mut Option<tokenstream::TokenStream>)
|
2017-06-04 20:41:33 -05:00
|
|
|
-> TokenNode {
|
2017-03-17 18:41:09 -05:00
|
|
|
let (first_span, rest_span) = (*span, *span);
|
|
|
|
*span = first_span;
|
|
|
|
let tree = tokenstream::TokenTree::Token(rest_span, rest);
|
|
|
|
*next = Some(if is_joint { tree.joint() } else { tree.into() });
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenNode::Op(first, Spacing::Joint)
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
let kind = match token {
|
|
|
|
Eq => op!('='),
|
|
|
|
Lt => op!('<'),
|
|
|
|
Le => joint!('<', Eq),
|
|
|
|
EqEq => joint!('=', Eq),
|
|
|
|
Ne => joint!('!', Eq),
|
|
|
|
Ge => joint!('>', Eq),
|
|
|
|
Gt => op!('>'),
|
|
|
|
AndAnd => joint!('&', BinOp(And)),
|
|
|
|
OrOr => joint!('|', BinOp(Or)),
|
|
|
|
Not => op!('!'),
|
|
|
|
Tilde => op!('~'),
|
|
|
|
BinOp(Plus) => op!('+'),
|
|
|
|
BinOp(Minus) => op!('-'),
|
|
|
|
BinOp(Star) => op!('*'),
|
|
|
|
BinOp(Slash) => op!('/'),
|
|
|
|
BinOp(Percent) => op!('%'),
|
|
|
|
BinOp(Caret) => op!('^'),
|
|
|
|
BinOp(And) => op!('&'),
|
|
|
|
BinOp(Or) => op!('|'),
|
|
|
|
BinOp(Shl) => joint!('<', Lt),
|
|
|
|
BinOp(Shr) => joint!('>', Gt),
|
|
|
|
BinOpEq(Plus) => joint!('+', Eq),
|
|
|
|
BinOpEq(Minus) => joint!('-', Eq),
|
|
|
|
BinOpEq(Star) => joint!('*', Eq),
|
|
|
|
BinOpEq(Slash) => joint!('/', Eq),
|
|
|
|
BinOpEq(Percent) => joint!('%', Eq),
|
|
|
|
BinOpEq(Caret) => joint!('^', Eq),
|
|
|
|
BinOpEq(And) => joint!('&', Eq),
|
|
|
|
BinOpEq(Or) => joint!('|', Eq),
|
|
|
|
BinOpEq(Shl) => joint!('<', Le),
|
|
|
|
BinOpEq(Shr) => joint!('>', Ge),
|
|
|
|
At => op!('@'),
|
|
|
|
Dot => op!('.'),
|
|
|
|
DotDot => joint!('.', Dot),
|
|
|
|
DotDotDot => joint!('.', DotDot),
|
|
|
|
Comma => op!(','),
|
|
|
|
Semi => op!(';'),
|
|
|
|
Colon => op!(':'),
|
|
|
|
ModSep => joint!(':', Colon),
|
|
|
|
RArrow => joint!('-', Gt),
|
|
|
|
LArrow => joint!('<', BinOp(Minus)),
|
|
|
|
FatArrow => joint!('=', Gt),
|
|
|
|
Pound => op!('#'),
|
|
|
|
Dollar => op!('$'),
|
|
|
|
Question => op!('?'),
|
|
|
|
Underscore => op!('_'),
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
|
|
|
|
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2017-03-28 20:55:01 -05:00
|
|
|
Interpolated(ref nt) => __internal::with_sess(|(sess, _)| {
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenNode::Group(Delimiter::None, TokenStream(nt.1.force(|| {
|
2017-03-28 20:55:01 -05:00
|
|
|
// FIXME(jseyfried): Avoid this pretty-print + reparse hack
|
|
|
|
let name = "<macro expansion>".to_owned();
|
|
|
|
let source = pprust::token_to_string(&token);
|
|
|
|
parse_stream_from_source_str(name, source, sess, Some(span))
|
|
|
|
})))
|
|
|
|
}),
|
2017-03-17 18:41:09 -05:00
|
|
|
|
|
|
|
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
|
|
|
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
|
|
|
|
};
|
|
|
|
|
|
|
|
TokenTree { span: Span(span), kind: kind }
|
|
|
|
}
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
fn to_internal(self) -> tokenstream::TokenStream {
|
2017-03-17 18:41:09 -05:00
|
|
|
use syntax::parse::token::*;
|
|
|
|
use syntax::tokenstream::{TokenTree, Delimited};
|
|
|
|
|
|
|
|
let (op, kind) = match self.kind {
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenNode::Op(op, kind) => (op, kind),
|
|
|
|
TokenNode::Group(delimiter, tokens) => {
|
2017-03-17 18:41:09 -05:00
|
|
|
return TokenTree::Delimited(self.span.0, Delimited {
|
2017-06-04 20:41:33 -05:00
|
|
|
delim: delimiter.to_internal(),
|
2017-03-17 18:41:09 -05:00
|
|
|
tts: tokens.0.into(),
|
|
|
|
}).into();
|
|
|
|
},
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenNode::Term(symbol) => {
|
2017-03-17 18:41:09 -05:00
|
|
|
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt };
|
|
|
|
let token =
|
|
|
|
if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
|
|
|
|
return TokenTree::Token(self.span.0, token).into();
|
|
|
|
}
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),
|
2017-03-17 18:41:09 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
let token = match op {
|
|
|
|
'=' => Eq,
|
|
|
|
'<' => Lt,
|
|
|
|
'>' => Gt,
|
|
|
|
'!' => Not,
|
|
|
|
'~' => Tilde,
|
|
|
|
'+' => BinOp(Plus),
|
|
|
|
'-' => BinOp(Minus),
|
|
|
|
'*' => BinOp(Star),
|
|
|
|
'/' => BinOp(Slash),
|
|
|
|
'%' => BinOp(Percent),
|
|
|
|
'^' => BinOp(Caret),
|
|
|
|
'&' => BinOp(And),
|
|
|
|
'|' => BinOp(Or),
|
|
|
|
'@' => At,
|
|
|
|
'.' => Dot,
|
|
|
|
',' => Comma,
|
|
|
|
';' => Semi,
|
|
|
|
':' => Colon,
|
|
|
|
'#' => Pound,
|
|
|
|
'$' => Dollar,
|
|
|
|
'?' => Question,
|
|
|
|
'_' => Underscore,
|
|
|
|
_ => panic!("unsupported character {}", op),
|
|
|
|
};
|
|
|
|
|
|
|
|
let tree = TokenTree::Token(self.span.0, token);
|
|
|
|
match kind {
|
2017-06-04 20:41:33 -05:00
|
|
|
Spacing::Alone => tree.into(),
|
|
|
|
Spacing::Joint => tree.joint(),
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
/// Permanently unstable internal implementation details of this crate. This
|
|
|
|
/// should not be used.
|
|
|
|
///
|
|
|
|
/// These methods are used by the rest of the compiler to generate instances of
|
|
|
|
/// `TokenStream` to hand to macro definitions, as well as consume the output.
|
|
|
|
///
|
|
|
|
/// Note that this module is also intentionally separate from the rest of the
|
|
|
|
/// crate. This allows the `#[unstable]` directive below to naturally apply to
|
|
|
|
/// all of the contents.
|
|
|
|
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
|
|
|
#[doc(hidden)]
|
|
|
|
pub mod __internal {
|
2017-06-04 20:41:33 -05:00
|
|
|
pub use quote::{Quoter, __rt};
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
use std::cell::Cell;
|
|
|
|
|
|
|
|
use syntax::ast;
|
2017-03-28 00:32:43 -05:00
|
|
|
use syntax::ext::base::ExtCtxt;
|
|
|
|
use syntax::ext::hygiene::Mark;
|
2016-10-03 11:49:39 -05:00
|
|
|
use syntax::ptr::P;
|
2017-03-28 20:55:01 -05:00
|
|
|
use syntax::parse::{self, ParseSess};
|
|
|
|
use syntax::parse::token::{self, Token};
|
2017-03-17 18:23:12 -05:00
|
|
|
use syntax::tokenstream;
|
2017-03-28 20:55:01 -05:00
|
|
|
use syntax_pos::DUMMY_SP;
|
2017-01-07 23:04:49 -06:00
|
|
|
|
|
|
|
use super::{TokenStream, LexError};
|
2016-10-03 11:49:39 -05:00
|
|
|
|
|
|
|
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
|
2017-03-28 20:55:01 -05:00
|
|
|
let token = Token::interpolated(token::NtItem(item));
|
|
|
|
TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into())
|
2017-01-07 23:04:49 -06:00
|
|
|
}
|
|
|
|
|
2017-03-17 18:23:12 -05:00
|
|
|
pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
|
|
|
|
TokenStream(inner)
|
2017-01-07 23:04:49 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
|
2017-03-28 00:32:43 -05:00
|
|
|
with_sess(move |(sess, _)| {
|
2017-03-17 18:23:12 -05:00
|
|
|
let mut parser = parse::stream_to_parser(sess, stream.0);
|
2017-01-07 23:04:49 -06:00
|
|
|
let mut items = Vec::new();
|
|
|
|
|
|
|
|
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
|
|
|
|
items.push(item)
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(items)
|
|
|
|
})
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2017-03-17 18:23:12 -05:00
|
|
|
pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
|
|
|
|
stream.0
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
pub trait Registry {
|
|
|
|
fn register_custom_derive(&mut self,
|
|
|
|
trait_name: &str,
|
2016-11-08 05:15:02 -06:00
|
|
|
expand: fn(TokenStream) -> TokenStream,
|
|
|
|
attributes: &[&'static str]);
|
2017-01-07 23:04:49 -06:00
|
|
|
|
|
|
|
fn register_attr_proc_macro(&mut self,
|
|
|
|
name: &str,
|
|
|
|
expand: fn(TokenStream, TokenStream) -> TokenStream);
|
2017-02-27 14:03:19 -06:00
|
|
|
|
|
|
|
fn register_bang_proc_macro(&mut self,
|
|
|
|
name: &str,
|
|
|
|
expand: fn(TokenStream) -> TokenStream);
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Emulate scoped_thread_local!() here essentially
|
|
|
|
thread_local! {
|
2017-03-28 00:32:43 -05:00
|
|
|
static CURRENT_SESS: Cell<(*const ParseSess, Mark)> =
|
|
|
|
Cell::new((0 as *const _, Mark::root()));
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
|
|
|
|
2017-03-28 00:32:43 -05:00
|
|
|
pub fn set_sess<F, R>(cx: &ExtCtxt, f: F) -> R
|
2016-10-03 11:49:39 -05:00
|
|
|
where F: FnOnce() -> R
|
|
|
|
{
|
2017-03-28 00:32:43 -05:00
|
|
|
struct Reset { prev: (*const ParseSess, Mark) }
|
2016-10-03 11:49:39 -05:00
|
|
|
|
|
|
|
impl Drop for Reset {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
CURRENT_SESS.with(|p| p.set(self.prev));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
CURRENT_SESS.with(|p| {
|
|
|
|
let _reset = Reset { prev: p.get() };
|
2017-03-28 00:32:43 -05:00
|
|
|
p.set((cx.parse_sess, cx.current_expansion.mark));
|
2016-10-03 11:49:39 -05:00
|
|
|
f()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-03-28 00:32:43 -05:00
|
|
|
pub fn with_sess<F, R>(f: F) -> R
|
|
|
|
where F: FnOnce((&ParseSess, Mark)) -> R
|
2016-10-03 11:49:39 -05:00
|
|
|
{
|
|
|
|
let p = CURRENT_SESS.with(|p| p.get());
|
2017-03-28 00:32:43 -05:00
|
|
|
assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \
|
|
|
|
before set_parse_sess()!");
|
|
|
|
f(unsafe { (&*p.0, p.1) })
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-07 23:04:49 -06:00
|
|
|
fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
|
|
|
|
err.cancel();
|
|
|
|
LexError { _inner: () }
|
|
|
|
}
|