rust/src/libproc_macro/lib.rs

908 lines
31 KiB
Rust
Raw Normal View History

2016-08-04 14:20:01 -05:00
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A support library for macro authors when defining new macros.
2016-08-04 14:20:01 -05:00
//!
//! This library, provided by the standard distribution, provides the types
//! consumed in the interfaces of procedurally defined macro definitions.
//! Currently the primary use of this crate is to provide the ability to define
//! new custom derive modes through `#[proc_macro_derive]`.
2016-08-04 14:20:01 -05:00
//!
//! Note that this crate is intentionally very bare-bones currently. The main
//! type, `TokenStream`, only supports `fmt::Display` and `FromStr`
//! implementations, indicating that it can only go to and come from a string.
//! This functionality is intended to be expanded over time as more surface
//! area for macro authors is stabilized.
//!
//! See [the book](../book/first-edition/procedural-macros.html) for more.
2016-08-04 14:20:01 -05:00
#![stable(feature = "proc_macro_lib", since = "1.15.0")]
#![deny(warnings)]
#![deny(missing_docs)]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
test(no_crate_inject, attr(deny(warnings))),
test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
2016-08-04 14:20:01 -05:00
2018-03-16 19:51:49 -05:00
#![cfg_attr(stage0, feature(i128_type))]
2016-08-04 14:20:01 -05:00
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(lang_items)]
2018-03-01 19:42:22 -06:00
#![feature(optin_builtin_traits)]
2016-08-04 14:20:01 -05:00
extern crate syntax;
extern crate syntax_pos;
extern crate rustc_errors;
2018-02-27 10:11:14 -06:00
extern crate rustc_data_structures;
mod diagnostic;
#[unstable(feature = "proc_macro", issue = "38356")]
pub use diagnostic::{Diagnostic, Level};
2016-08-04 14:20:01 -05:00
2017-06-04 20:41:33 -05:00
use std::{ascii, fmt, iter};
2018-02-27 10:11:14 -06:00
use rustc_data_structures::sync::Lrc;
use std::str::FromStr;
use syntax::ast;
use syntax::errors::DiagnosticBuilder;
rustc: Forbid interpolated tokens in the HIR Right now the HIR contains raw `syntax::ast::Attribute` structure but nowadays these can contain arbitrary tokens. One variant of the `Token` enum is an "interpolated" token which basically means to shove all the tokens for a nonterminal in this position. A "nonterminal" in this case is roughly analagous to a macro argument: macro_rules! foo { ($a:expr) => { // $a is a nonterminal as an expression } } Currently nonterminals contain namely items and expressions, and this poses a problem for incremental compilation! With incremental we want a stable hash of all HIR items, but this means we may transitively need a stable hash *of the entire AST*, which is certainly not stable w/ node ids and whatnot. Hence today there's a "bug" where the "stable hash" of an AST is just the raw hash value of the AST, and this only arises with interpolated nonterminals. The downside of this approach, however, is that a bunch of errors get spewed out during compilation about how this isn't a great idea. This PR is focused at fixing these warnings, basically deleting them from the compiler. The implementation here is to alter attributes as they're lowered from the AST to HIR, expanding all nonterminals in-place as we see them. This code for expanding a nonterminal to a token stream already exists for the `proc_macro` crate, so we basically just reuse the same implementation there. After this PR it's considered a bug to have an `Interpolated` token and hence the stable hash implementation simply uses `bug!` in this location. Closes #40946
2017-09-15 10:28:34 -05:00
use syntax::parse::{self, token};
2017-06-04 20:41:33 -05:00
use syntax::symbol::Symbol;
use syntax::tokenstream;
use syntax_pos::DUMMY_SP;
use syntax_pos::{FileMap, Pos, SyntaxContext, FileName};
use syntax_pos::hygiene::Mark;
/// The main type provided by this crate, representing an abstract stream of
/// tokens.
///
/// This is both the input and output of `#[proc_macro_derive]` definitions.
/// Currently it's required to be a list of valid Rust items, but this
/// restriction may be lifted in the future.
///
/// The API of this type is intentionally bare-bones, but it'll be expanded over
/// time!
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
2017-06-04 20:41:33 -05:00
#[derive(Clone, Debug)]
pub struct TokenStream(tokenstream::TokenStream);
/// Error returned from `TokenStream::from_str`.
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
2017-06-04 20:41:33 -05:00
#[derive(Debug)]
pub struct LexError {
_inner: (),
}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
__internal::with_sess(|(sess, mark)| {
let src = src.to_string();
let name = FileName::ProcMacroSourceCode;
let expn_info = mark.expn_info().unwrap();
let call_site = expn_info.call_site;
// notify the expansion info that it is unhygienic
let mark = Mark::fresh(mark);
mark.set_expn_info(expn_info);
let span = call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark));
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span));
Ok(__internal::token_stream_wrap(stream))
})
}
}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
/// the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
///
/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
/// To quote `$` itself, use `$$`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[macro_export]
macro_rules! quote { () => {} }
2017-06-04 20:41:33 -05:00
#[unstable(feature = "proc_macro_internals", issue = "27812")]
#[doc(hidden)]
mod quote;
#[unstable(feature = "proc_macro", issue = "38356")]
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
2017-06-04 20:41:33 -05:00
TokenStream(tree.to_internal())
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
impl From<TokenNode> for TokenStream {
fn from(kind: TokenNode) -> TokenStream {
TokenTree::from(kind).into()
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
2017-06-04 20:41:33 -05:00
let mut builder = tokenstream::TokenStreamBuilder::new();
for stream in streams {
builder.push(stream.into().0);
}
TokenStream(builder.build())
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl IntoIterator for TokenStream {
type Item = TokenTree;
2017-06-04 20:41:33 -05:00
type IntoIter = TokenTreeIter;
2017-06-04 20:41:33 -05:00
fn into_iter(self) -> TokenTreeIter {
TokenTreeIter { cursor: self.0.trees(), stack: Vec::new() }
}
}
impl TokenStream {
/// Returns an empty `TokenStream`.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn empty() -> TokenStream {
TokenStream(tokenstream::TokenStream::empty())
}
/// Checks if this `TokenStream` is empty.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
/// A region of source code, along with macro expansion information.
#[unstable(feature = "proc_macro", issue = "38356")]
2017-08-01 20:05:08 -05:00
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Span(syntax_pos::Span);
impl Span {
/// A span that resolves at the macro definition site.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn def_site() -> Span {
2017-07-31 15:04:34 -05:00
::__internal::with_sess(|(_, mark)| {
let call_site = mark.expn_info().unwrap().call_site;
Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
})
}
}
2017-06-04 20:41:33 -05:00
/// Quote a `Span` into a `TokenStream`.
/// This is needed to implement a custom quoter.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn quote_span(span: Span) -> TokenStream {
quote::Quote::quote(span)
2017-06-04 20:41:33 -05:00
}
macro_rules! diagnostic_method {
($name:ident, $level:expr) => (
/// Create a new `Diagnostic` with the given `message` at the span
/// `self`.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
Diagnostic::spanned(self, $level, message)
}
)
}
impl Span {
/// The span of the invocation of the current procedural macro.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn call_site() -> Span {
::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site))
}
2017-08-01 20:05:08 -05:00
/// The original source file into which this span points.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn source_file(&self) -> SourceFile {
SourceFile {
filemap: __internal::lookup_char_pos(self.0.lo()).file,
}
}
/// The `Span` for the tokens in the previous macro expansion from which
/// `self` was generated from, if any.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn parent(&self) -> Option<Span> {
self.0.ctxt().outer().expn_info().map(|i| Span(i.call_site))
}
/// The span for the origin source code that `self` was generated from. If
/// this `Span` wasn't generated from other macro expansions then the return
/// value is the same as `*self`.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn source(&self) -> Span {
Span(self.0.source_callsite())
}
2017-08-01 20:05:08 -05:00
/// Get the starting line/column in the source file for this span.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn start(&self) -> LineColumn {
let loc = __internal::lookup_char_pos(self.0.lo());
LineColumn {
line: loc.line,
column: loc.col.to_usize()
}
}
/// Get the ending line/column in the source file for this span.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn end(&self) -> LineColumn {
let loc = __internal::lookup_char_pos(self.0.hi());
LineColumn {
line: loc.line,
column: loc.col.to_usize()
}
}
/// Create a new span encompassing `self` and `other`.
///
/// Returns `None` if `self` and `other` are from different files.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn join(&self, other: Span) -> Option<Span> {
let self_loc = __internal::lookup_char_pos(self.0.lo());
let other_loc = __internal::lookup_char_pos(other.0.lo());
2017-08-01 20:05:08 -05:00
if self_loc.file.name != other_loc.file.name { return None }
Some(Span(self.0.to(other.0)))
}
/// Creates a new span with the same line/column information as `self` but
/// that resolves symbols as though it were at `other`.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn resolved_at(&self, other: Span) -> Span {
Span(self.0.with_ctxt(other.0.ctxt()))
}
/// Creates a new span with the same name resolution behavior as `self` but
/// with the line/column information of `other`.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn located_at(&self, other: Span) -> Span {
other.resolved_at(*self)
}
diagnostic_method!(error, Level::Error);
diagnostic_method!(warning, Level::Warning);
diagnostic_method!(note, Level::Note);
diagnostic_method!(help, Level::Help);
}
2017-08-01 20:05:08 -05:00
/// A line-column pair representing the start or end of a `Span`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct LineColumn {
/// The 1-indexed line in the source file on which the span starts or ends (inclusive).
#[unstable(feature = "proc_macro", issue = "38356")]
pub line: usize,
2017-08-01 20:05:08 -05:00
/// The 0-indexed column (in UTF-8 characters) in the source file on which
/// the span starts or ends (inclusive).
#[unstable(feature = "proc_macro", issue = "38356")]
pub column: usize
2017-08-01 20:05:08 -05:00
}
/// The source file of a given `Span`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Clone)]
pub struct SourceFile {
2018-02-27 10:11:14 -06:00
filemap: Lrc<FileMap>,
2017-08-01 20:05:08 -05:00
}
2018-03-01 19:42:22 -06:00
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for SourceFile {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for SourceFile {}
2017-08-01 20:05:08 -05:00
impl SourceFile {
/// Get the path to this source file.
2017-08-01 20:05:08 -05:00
///
/// ### Note
/// If the code span associated with this `SourceFile` was generated by an external macro, this
/// may not be an actual path on the filesystem. Use [`is_real`] to check.
///
/// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
2017-08-01 20:05:08 -05:00
/// the command line, the path as given may not actually be valid.
///
/// [`is_real`]: #method.is_real
# [unstable(feature = "proc_macro", issue = "38356")]
pub fn path(&self) -> &FileName {
2017-08-01 20:05:08 -05:00
&self.filemap.name
}
/// Returns `true` if this source file is a real source file, and not generated by an external
/// macro's expansion.
# [unstable(feature = "proc_macro", issue = "38356")]
pub fn is_real(&self) -> bool {
// This is a hack until intercrate spans are implemented and we can have real source files
// for spans generated in external macros.
// https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
self.filemap.is_real_file()
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl AsRef<FileName> for SourceFile {
fn as_ref(&self) -> &FileName {
self.path()
2017-08-01 20:05:08 -05:00
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceFile")
.field("path", self.path())
2017-08-01 20:05:08 -05:00
.field("is_real", &self.is_real())
.finish()
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl PartialEq for SourceFile {
fn eq(&self, other: &Self) -> bool {
2018-02-27 10:11:14 -06:00
Lrc::ptr_eq(&self.filemap, &other.filemap)
2017-08-01 20:05:08 -05:00
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl Eq for SourceFile {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl PartialEq<FileName> for SourceFile {
fn eq(&self, other: &FileName) -> bool {
2017-08-01 20:05:08 -05:00
self.as_ref() == other
}
}
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
#[derive(Clone, Debug)]
pub struct TokenTree {
/// The `TokenTree`'s span
pub span: Span,
/// Description of the `TokenTree`
2017-06-04 20:41:33 -05:00
pub kind: TokenNode,
}
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
impl From<TokenNode> for TokenTree {
fn from(kind: TokenNode) -> TokenTree {
TokenTree { span: Span::def_site(), kind: kind }
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl fmt::Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
TokenStream::from(self.clone()).fmt(f)
}
}
/// Description of a `TokenTree`
2017-06-04 20:41:33 -05:00
#[derive(Clone, Debug)]
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
pub enum TokenNode {
/// A delimited tokenstream.
2017-06-04 20:41:33 -05:00
Group(Delimiter, TokenStream),
/// A unicode identifier.
2017-06-04 20:41:33 -05:00
Term(Term),
/// A punctuation character (`+`, `,`, `$`, etc.).
2017-06-04 20:41:33 -05:00
Op(char, Spacing),
/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
Literal(Literal),
}
/// Describes how a sequence of token trees is delimited.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[unstable(feature = "proc_macro", issue = "38356")]
pub enum Delimiter {
/// `( ... )`
Parenthesis,
/// `{ ... }`
Brace,
/// `[ ... ]`
Bracket,
/// An implicit delimiter, e.g. `$var`, where $var is `...`.
None,
}
/// An interned string.
2017-06-04 20:41:33 -05:00
#[derive(Copy, Clone, Debug)]
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
pub struct Term(Symbol);
2017-06-04 20:41:33 -05:00
impl Term {
/// Intern a string into a `Term`.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn intern(string: &str) -> Term {
Term(Symbol::intern(string))
}
2017-06-04 20:41:33 -05:00
/// Get a reference to the interned string.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn as_str(&self) -> &str {
unsafe { &*(&*self.0.as_str() as *const str) }
}
}
/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
pub enum Spacing {
/// e.g. `+` is `Alone` in `+ =`.
Alone,
/// e.g. `+` is `Joint` in `+=`.
Joint,
}
/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
2017-06-04 20:41:33 -05:00
#[derive(Clone, Debug)]
#[unstable(feature = "proc_macro", issue = "38356")]
pub struct Literal(token::Token);
#[unstable(feature = "proc_macro", issue = "38356")]
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2017-06-04 20:41:33 -05:00
TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f)
}
}
macro_rules! int_literals {
($($int_kind:ident),*) => {$(
/// Integer literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn $int_kind(n: $int_kind) -> Literal {
2017-06-04 20:41:33 -05:00
Literal::typed_integer(n as i128, stringify!($int_kind))
}
)*}
}
impl Literal {
2017-06-04 20:41:33 -05:00
/// Integer literal
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn integer(n: i128) -> Literal {
Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), None))
}
int_literals!(u8, i8, u16, i16, u32, i32, u64, i64, usize, isize);
2017-06-04 20:41:33 -05:00
fn typed_integer(n: i128, kind: &'static str) -> Literal {
Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())),
Some(Symbol::intern(kind))))
}
/// Floating point literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn float(n: f64) -> Literal {
if !n.is_finite() {
panic!("Invalid float literal {}", n);
}
Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), None))
}
/// Floating point literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn f32(n: f32) -> Literal {
2017-06-04 20:41:33 -05:00
if !n.is_finite() {
panic!("Invalid f32 literal {}", n);
}
Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
Some(Symbol::intern("f32"))))
}
/// Floating point literal.
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
pub fn f64(n: f64) -> Literal {
if !n.is_finite() {
panic!("Invalid f64 literal {}", n);
}
Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
Some(Symbol::intern("f64"))))
}
/// String literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn string(string: &str) -> Literal {
let mut escaped = String::new();
for ch in string.chars() {
escaped.extend(ch.escape_debug());
}
2017-06-04 20:41:33 -05:00
Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None))
}
/// Character literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn character(ch: char) -> Literal {
let mut escaped = String::new();
escaped.extend(ch.escape_unicode());
2017-06-04 20:41:33 -05:00
Literal(token::Literal(token::Lit::Char(Symbol::intern(&escaped)), None))
}
/// Byte string literal.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn byte_string(bytes: &[u8]) -> Literal {
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
.map(Into::<char>::into).collect::<String>();
Literal(token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None))
}
}
/// An iterator over `TokenTree`s.
2017-07-17 03:34:13 -05:00
#[derive(Clone)]
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
pub struct TokenTreeIter {
cursor: tokenstream::Cursor,
stack: Vec<TokenTree>,
}
#[unstable(feature = "proc_macro", issue = "38356")]
2017-06-04 20:41:33 -05:00
impl Iterator for TokenTreeIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
2017-03-28 20:55:01 -05:00
loop {
let tree = self.stack.pop().or_else(|| {
let next = self.cursor.next_as_stream()?;
Some(TokenTree::from_internal(next, &mut self.stack))
})?;
2017-03-28 20:55:01 -05:00
if tree.span.0 == DUMMY_SP {
2017-06-04 20:41:33 -05:00
if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
2017-03-28 20:55:01 -05:00
self.cursor.insert(stream.0);
continue
}
}
return Some(tree);
}
}
}
impl Delimiter {
2017-06-04 20:41:33 -05:00
fn from_internal(delim: token::DelimToken) -> Delimiter {
match delim {
token::Paren => Delimiter::Parenthesis,
token::Brace => Delimiter::Brace,
token::Bracket => Delimiter::Bracket,
token::NoDelim => Delimiter::None,
}
}
2017-06-04 20:41:33 -05:00
fn to_internal(self) -> token::DelimToken {
match self {
Delimiter::Parenthesis => token::Paren,
Delimiter::Brace => token::Brace,
Delimiter::Bracket => token::Bracket,
Delimiter::None => token::NoDelim,
}
}
}
impl TokenTree {
fn from_internal(stream: tokenstream::TokenStream, stack: &mut Vec<TokenTree>)
-> TokenTree {
use syntax::parse::token::*;
let (tree, is_joint) = stream.as_tree();
let (span, token) = match tree {
tokenstream::TokenTree::Token(span, token) => (span, token),
tokenstream::TokenTree::Delimited(span, delimed) => {
2017-06-04 20:41:33 -05:00
let delimiter = Delimiter::from_internal(delimed.delim);
return TokenTree {
span: Span(span),
2017-06-04 20:41:33 -05:00
kind: TokenNode::Group(delimiter, TokenStream(delimed.tts.into())),
};
}
};
2017-06-04 20:41:33 -05:00
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
macro_rules! tt {
($e:expr) => (TokenTree { span: Span(span), kind: $e })
}
macro_rules! op {
($a:expr) => (TokenNode::Op($a, op_kind));
($a:expr, $b:expr) => ({
stack.push(tt!(TokenNode::Op($b, op_kind).into()));
TokenNode::Op($a, Spacing::Joint)
});
($a:expr, $b:expr, $c:expr) => ({
stack.push(tt!(TokenNode::Op($c, op_kind)));
stack.push(tt!(TokenNode::Op($b, Spacing::Joint)));
TokenNode::Op($a, Spacing::Joint)
})
}
let kind = match token {
Eq => op!('='),
Lt => op!('<'),
Le => op!('<', '='),
EqEq => op!('=', '='),
Ne => op!('!', '='),
Ge => op!('>', '='),
Gt => op!('>'),
AndAnd => op!('&', '&'),
OrOr => op!('|', '|'),
Not => op!('!'),
Tilde => op!('~'),
BinOp(Plus) => op!('+'),
BinOp(Minus) => op!('-'),
BinOp(Star) => op!('*'),
BinOp(Slash) => op!('/'),
BinOp(Percent) => op!('%'),
BinOp(Caret) => op!('^'),
BinOp(And) => op!('&'),
BinOp(Or) => op!('|'),
BinOp(Shl) => op!('<', '<'),
BinOp(Shr) => op!('>', '>'),
BinOpEq(Plus) => op!('+', '='),
BinOpEq(Minus) => op!('-', '='),
BinOpEq(Star) => op!('*', '='),
BinOpEq(Slash) => op!('/', '='),
BinOpEq(Percent) => op!('%', '='),
BinOpEq(Caret) => op!('^', '='),
BinOpEq(And) => op!('&', '='),
BinOpEq(Or) => op!('|', '='),
BinOpEq(Shl) => op!('<', '<', '='),
BinOpEq(Shr) => op!('>', '>', '='),
At => op!('@'),
Dot => op!('.'),
DotDot => op!('.', '.'),
DotDotDot => op!('.', '.', '.'),
DotDotEq => op!('.', '.', '='),
Comma => op!(','),
Semi => op!(';'),
Colon => op!(':'),
ModSep => op!(':', ':'),
RArrow => op!('-', '>'),
LArrow => op!('<', '-'),
FatArrow => op!('=', '>'),
Pound => op!('#'),
Dollar => op!('$'),
Question => op!('?'),
Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
Literal(..) => TokenNode::Literal(self::Literal(token)),
DocComment(c) => {
let stream = vec![
tt!(TokenNode::Term(Term::intern("doc"))),
tt!(op!('=')),
tt!(TokenNode::Literal(self::Literal(Literal(Lit::Str_(c), None)))),
].into_iter().collect();
stack.push(tt!(TokenNode::Group(Delimiter::Bracket, stream)));
op!('#')
}
rustc: Forbid interpolated tokens in the HIR Right now the HIR contains raw `syntax::ast::Attribute` structure but nowadays these can contain arbitrary tokens. One variant of the `Token` enum is an "interpolated" token which basically means to shove all the tokens for a nonterminal in this position. A "nonterminal" in this case is roughly analagous to a macro argument: macro_rules! foo { ($a:expr) => { // $a is a nonterminal as an expression } } Currently nonterminals contain namely items and expressions, and this poses a problem for incremental compilation! With incremental we want a stable hash of all HIR items, but this means we may transitively need a stable hash *of the entire AST*, which is certainly not stable w/ node ids and whatnot. Hence today there's a "bug" where the "stable hash" of an AST is just the raw hash value of the AST, and this only arises with interpolated nonterminals. The downside of this approach, however, is that a bunch of errors get spewed out during compilation about how this isn't a great idea. This PR is focused at fixing these warnings, basically deleting them from the compiler. The implementation here is to alter attributes as they're lowered from the AST to HIR, expanding all nonterminals in-place as we see them. This code for expanding a nonterminal to a token stream already exists for the `proc_macro` crate, so we basically just reuse the same implementation there. After this PR it's considered a bug to have an `Interpolated` token and hence the stable hash implementation simply uses `bug!` in this location. Closes #40946
2017-09-15 10:28:34 -05:00
Interpolated(_) => {
__internal::with_sess(|(sess, _)| {
let tts = token.interpolated_to_tokenstream(sess, span);
TokenNode::Group(Delimiter::None, TokenStream(tts))
})
}
DotEq => op!('.', '='),
OpenDelim(..) | CloseDelim(..) => unreachable!(),
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
};
TokenTree { span: Span(span), kind: kind }
}
2017-06-04 20:41:33 -05:00
fn to_internal(self) -> tokenstream::TokenStream {
use syntax::parse::token::*;
use syntax::tokenstream::{TokenTree, Delimited};
let (op, kind) = match self.kind {
2017-06-04 20:41:33 -05:00
TokenNode::Op(op, kind) => (op, kind),
TokenNode::Group(delimiter, tokens) => {
return TokenTree::Delimited(self.span.0, Delimited {
2017-06-04 20:41:33 -05:00
delim: delimiter.to_internal(),
tts: tokens.0.into(),
}).into();
},
2017-06-04 20:41:33 -05:00
TokenNode::Term(symbol) => {
2017-07-31 15:04:34 -05:00
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() };
let sym_str = symbol.0.as_str();
let token =
if sym_str.starts_with("'") { Lifetime(ident) }
else if sym_str.starts_with("r#") {
let name = Symbol::intern(&sym_str[2..]);
let ident = ast::Ident { name, ctxt: self.span.0.ctxt() };
Ident(ident, true)
} else { Ident(ident, false) };
return TokenTree::Token(self.span.0, token).into();
}
TokenNode::Literal(self::Literal(Literal(Lit::Integer(ref a), b)))
if a.as_str().starts_with("-") =>
{
let minus = BinOp(BinOpToken::Minus);
let integer = Symbol::intern(&a.as_str()[1..]);
let integer = Literal(Lit::Integer(integer), b);
let a = TokenTree::Token(self.span.0, minus);
let b = TokenTree::Token(self.span.0, integer);
return vec![a, b].into_iter().collect()
}
TokenNode::Literal(self::Literal(Literal(Lit::Float(ref a), b)))
if a.as_str().starts_with("-") =>
{
let minus = BinOp(BinOpToken::Minus);
let float = Symbol::intern(&a.as_str()[1..]);
let float = Literal(Lit::Float(float), b);
let a = TokenTree::Token(self.span.0, minus);
let b = TokenTree::Token(self.span.0, float);
return vec![a, b].into_iter().collect()
}
TokenNode::Literal(token) => {
return TokenTree::Token(self.span.0, token.0).into()
}
};
let token = match op {
'=' => Eq,
'<' => Lt,
'>' => Gt,
'!' => Not,
'~' => Tilde,
'+' => BinOp(Plus),
'-' => BinOp(Minus),
'*' => BinOp(Star),
'/' => BinOp(Slash),
'%' => BinOp(Percent),
'^' => BinOp(Caret),
'&' => BinOp(And),
'|' => BinOp(Or),
'@' => At,
'.' => Dot,
',' => Comma,
';' => Semi,
':' => Colon,
'#' => Pound,
'$' => Dollar,
'?' => Question,
_ => panic!("unsupported character {}", op),
};
let tree = TokenTree::Token(self.span.0, token);
match kind {
2017-06-04 20:41:33 -05:00
Spacing::Alone => tree.into(),
Spacing::Joint => tree.joint(),
}
}
}
/// Permanently unstable internal implementation details of this crate. This
/// should not be used.
///
/// These methods are used by the rest of the compiler to generate instances of
/// `TokenStream` to hand to macro definitions, as well as consume the output.
///
/// Note that this module is also intentionally separate from the rest of the
/// crate. This allows the `#[unstable]` directive below to naturally apply to
/// all of the contents.
#[unstable(feature = "proc_macro_internals", issue = "27812")]
#[doc(hidden)]
pub mod __internal {
pub use quote::{LiteralKind, Quoter, unquote};
use std::cell::Cell;
use syntax::ast;
use syntax::ext::base::ExtCtxt;
use syntax::ext::hygiene::Mark;
use syntax::ptr::P;
2017-03-28 20:55:01 -05:00
use syntax::parse::{self, ParseSess};
use syntax::parse::token::{self, Token};
use syntax::tokenstream;
2017-08-01 20:05:08 -05:00
use syntax_pos::{BytePos, Loc, DUMMY_SP};
use super::{TokenStream, LexError};
2017-08-01 20:05:08 -05:00
pub fn lookup_char_pos(pos: BytePos) -> Loc {
with_sess(|(sess, _)| sess.codemap().lookup_char_pos(pos))
}
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
2017-03-28 20:55:01 -05:00
let token = Token::interpolated(token::NtItem(item));
TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into())
}
pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
TokenStream(inner)
}
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
with_sess(move |(sess, _)| {
let mut parser = parse::stream_to_parser(sess, stream.0);
let mut items = Vec::new();
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
items.push(item)
}
Ok(items)
})
}
2016-08-04 14:20:01 -05:00
pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
stream.0
}
2016-08-04 14:20:01 -05:00
pub trait Registry {
fn register_custom_derive(&mut self,
trait_name: &str,
expand: fn(TokenStream) -> TokenStream,
attributes: &[&'static str]);
fn register_attr_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream, TokenStream) -> TokenStream);
fn register_bang_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream) -> TokenStream);
}
// Emulate scoped_thread_local!() here essentially
thread_local! {
static CURRENT_SESS: Cell<(*const ParseSess, Mark)> =
Cell::new((0 as *const _, Mark::root()));
}
pub fn set_sess<F, R>(cx: &ExtCtxt, f: F) -> R
where F: FnOnce() -> R
{
struct Reset { prev: (*const ParseSess, Mark) }
impl Drop for Reset {
fn drop(&mut self) {
CURRENT_SESS.with(|p| p.set(self.prev));
}
}
CURRENT_SESS.with(|p| {
let _reset = Reset { prev: p.get() };
p.set((cx.parse_sess, cx.current_expansion.mark));
f()
})
}
pub fn in_sess() -> bool
{
let p = CURRENT_SESS.with(|p| p.get());
!p.0.is_null()
}
pub fn with_sess<F, R>(f: F) -> R
where F: FnOnce((&ParseSess, Mark)) -> R
{
let p = CURRENT_SESS.with(|p| p.get());
assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \
before set_parse_sess()!");
f(unsafe { (&*p.0, p.1) })
}
}
fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
err.cancel();
LexError { _inner: () }
}