2016-08-04 14:20:01 -05:00
|
|
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
//! A support library for macro authors when defining new macros.
|
2016-08-04 14:20:01 -05:00
|
|
|
//!
|
2016-10-03 11:49:39 -05:00
|
|
|
//! This library, provided by the standard distribution, provides the types
|
|
|
|
//! consumed in the interfaces of procedurally defined macro definitions.
|
|
|
|
//! Currently the primary use of this crate is to provide the ability to define
|
|
|
|
//! new custom derive modes through `#[proc_macro_derive]`.
|
2016-08-04 14:20:01 -05:00
|
|
|
//!
|
2016-10-03 11:49:39 -05:00
|
|
|
//! Note that this crate is intentionally very bare-bones currently. The main
|
|
|
|
//! type, `TokenStream`, only supports `fmt::Display` and `FromStr`
|
|
|
|
//! implementations, indicating that it can only go to and come from a string.
|
|
|
|
//! This functionality is intended to be expanded over time as more surface
|
|
|
|
//! area for macro authors is stabilized.
|
2017-01-01 19:33:37 -06:00
|
|
|
//!
|
2017-06-12 11:30:08 -05:00
|
|
|
//! See [the book](../book/first-edition/procedural-macros.html) for more.
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2017-01-01 18:14:35 -06:00
|
|
|
#![stable(feature = "proc_macro_lib", since = "1.15.0")]
|
2016-10-03 11:49:39 -05:00
|
|
|
#![deny(missing_docs)]
|
2017-03-11 20:54:43 -06:00
|
|
|
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
|
|
|
|
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
|
|
|
|
html_root_url = "https://doc.rust-lang.org/nightly/",
|
|
|
|
html_playground_url = "https://play.rust-lang.org/",
|
|
|
|
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
|
|
|
|
test(no_crate_inject, attr(deny(warnings))),
|
|
|
|
test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
|
2016-08-04 14:20:01 -05:00
|
|
|
|
|
|
|
#![feature(rustc_private)]
|
2016-10-03 11:49:39 -05:00
|
|
|
#![feature(staged_api)]
|
|
|
|
#![feature(lang_items)]
|
2018-03-01 19:42:22 -06:00
|
|
|
#![feature(optin_builtin_traits)]
|
2016-08-04 14:20:01 -05:00
|
|
|
|
|
|
|
extern crate syntax;
|
2017-03-17 18:41:09 -05:00
|
|
|
extern crate syntax_pos;
|
2017-08-28 04:56:43 -05:00
|
|
|
extern crate rustc_errors;
|
2018-02-27 10:11:14 -06:00
|
|
|
extern crate rustc_data_structures;
|
2017-08-28 04:56:43 -05:00
|
|
|
|
|
|
|
mod diagnostic;
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub use diagnostic::{Diagnostic, Level};
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
use std::{ascii, fmt, iter};
|
2018-02-27 10:11:14 -06:00
|
|
|
use rustc_data_structures::sync::Lrc;
|
2016-10-03 11:49:39 -05:00
|
|
|
use std::str::FromStr;
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
use syntax::ast;
|
2017-01-07 23:04:49 -06:00
|
|
|
use syntax::errors::DiagnosticBuilder;
|
2017-09-15 10:28:34 -05:00
|
|
|
use syntax::parse::{self, token};
|
2017-06-04 20:41:33 -05:00
|
|
|
use syntax::symbol::Symbol;
|
2017-03-17 18:23:12 -05:00
|
|
|
use syntax::tokenstream;
|
2018-04-04 16:27:53 -05:00
|
|
|
use syntax::parse::lexer::comments;
|
2017-12-14 01:09:19 -06:00
|
|
|
use syntax_pos::{FileMap, Pos, SyntaxContext, FileName};
|
2017-07-11 02:52:50 -05:00
|
|
|
use syntax_pos::hygiene::Mark;
|
2016-10-03 11:49:39 -05:00
|
|
|
|
|
|
|
/// The main type provided by this crate, representing an abstract stream of
|
2018-05-05 12:12:37 -05:00
|
|
|
/// tokens, or, more specifically, a sequence of token trees.
|
|
|
|
/// The type provide interfaces for iterating over those token trees and, conversely,
|
|
|
|
/// collecting a number of token trees into one stream.
|
2016-10-03 11:49:39 -05:00
|
|
|
///
|
2018-05-05 12:12:37 -05:00
|
|
|
/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
|
|
|
|
/// and `#[proc_macro_derive]` definitions.
|
2016-10-03 11:49:39 -05:00
|
|
|
///
|
|
|
|
/// The API of this type is intentionally bare-bones, but it'll be expanded over
|
|
|
|
/// time!
|
2017-01-01 18:14:35 -06:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
2018-04-02 10:19:32 -05:00
|
|
|
#[derive(Clone)]
|
2017-03-17 18:23:12 -05:00
|
|
|
pub struct TokenStream(tokenstream::TokenStream);
|
2016-10-03 11:49:39 -05:00
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for TokenStream {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for TokenStream {}
|
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
/// Error returned from `TokenStream::from_str`.
|
2017-01-01 18:14:35 -06:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Debug)]
|
2016-10-03 11:49:39 -05:00
|
|
|
pub struct LexError {
|
|
|
|
_inner: (),
|
|
|
|
}
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for LexError {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for LexError {}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
impl TokenStream {
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Returns an empty `TokenStream` containing no token trees.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn empty() -> TokenStream {
|
|
|
|
TokenStream(tokenstream::TokenStream::empty())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Checks if this `TokenStream` is empty.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
|
|
self.0.is_empty()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Attempts to break the string into tokens and parse those tokens into a token stream.
|
|
|
|
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
|
|
|
|
/// or characters not existing in the language.
|
|
|
|
///
|
|
|
|
/// REVIEW The function actually panics on any error and never returns `LexError`.
|
|
|
|
/// REVIEW Should the panics be documented?
|
2017-03-17 18:41:09 -05:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
|
|
|
impl FromStr for TokenStream {
|
|
|
|
type Err = LexError;
|
|
|
|
|
|
|
|
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
|
|
|
__internal::with_sess(|(sess, mark)| {
|
|
|
|
let src = src.to_string();
|
2017-12-14 01:09:19 -06:00
|
|
|
let name = FileName::ProcMacroSourceCode;
|
2017-07-11 02:52:50 -05:00
|
|
|
let expn_info = mark.expn_info().unwrap();
|
|
|
|
let call_site = expn_info.call_site;
|
|
|
|
// notify the expansion info that it is unhygienic
|
|
|
|
let mark = Mark::fresh(mark);
|
|
|
|
mark.set_expn_info(expn_info);
|
2017-11-29 03:05:31 -06:00
|
|
|
let span = call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark));
|
2017-07-11 02:52:50 -05:00
|
|
|
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span));
|
2017-03-17 18:41:09 -05:00
|
|
|
Ok(__internal::token_stream_wrap(stream))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints the token stream as a string that should be losslessly convertible back
|
|
|
|
/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
|
|
|
|
/// with `Delimiter::None` delimiters.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
|
|
|
impl fmt::Display for TokenStream {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
self.0.fmt(f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints token in a form convenient for debugging.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
|
|
|
impl fmt::Debug for TokenStream {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2018-04-06 17:20:57 -05:00
|
|
|
f.write_str("TokenStream ")?;
|
|
|
|
f.debug_list().entries(self.clone()).finish()
|
2018-04-02 10:19:32 -05:00
|
|
|
}
|
|
|
|
}
|
2017-06-04 20:41:33 -05:00
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Creates a token stream containing a single token tree.
|
|
|
|
///
|
|
|
|
/// REVIEW We don't generally have impls `From<T> for Collection<T>`, but I see why this exists
|
|
|
|
/// REVIEW from practical point of view.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl From<TokenTree> for TokenStream {
|
|
|
|
fn from(tree: TokenTree) -> TokenStream {
|
2017-06-04 20:41:33 -05:00
|
|
|
TokenStream(tree.to_internal())
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Collects a number of token trees into a single stream.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
impl iter::FromIterator<TokenTree> for TokenStream {
|
|
|
|
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
2018-04-06 09:44:21 -05:00
|
|
|
trees.into_iter().map(TokenStream::from).collect()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// A "flattening" operation on token streams, collects token trees
|
|
|
|
/// from multiple token streams into a single stream.
|
2018-04-06 09:44:21 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl iter::FromIterator<TokenStream> for TokenStream {
|
|
|
|
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
2017-06-04 20:41:33 -05:00
|
|
|
let mut builder = tokenstream::TokenStreamBuilder::new();
|
2018-04-06 09:44:21 -05:00
|
|
|
for stream in streams {
|
|
|
|
builder.push(stream.0);
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
TokenStream(builder.build())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Public implementation details for the `TokenStream` type, such as iterators.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub mod token_stream {
|
|
|
|
use syntax::tokenstream;
|
|
|
|
use syntax_pos::DUMMY_SP;
|
|
|
|
|
|
|
|
use {TokenTree, TokenStream, Delimiter};
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// An iterator over `TokenStream`'s `TokenTree`s.
|
|
|
|
/// The iteration is "shallow", e.g. the iterator doesn't recurse into delimited groups,
|
|
|
|
/// and returns whole groups as token trees.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[derive(Clone)]
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub struct IntoIter {
|
|
|
|
cursor: tokenstream::Cursor,
|
|
|
|
stack: Vec<TokenTree>,
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
impl Iterator for IntoIter {
|
|
|
|
type Item = TokenTree;
|
|
|
|
|
|
|
|
fn next(&mut self) -> Option<TokenTree> {
|
|
|
|
loop {
|
|
|
|
let tree = self.stack.pop().or_else(|| {
|
|
|
|
let next = self.cursor.next_as_stream()?;
|
|
|
|
Some(TokenTree::from_internal(next, &mut self.stack))
|
|
|
|
})?;
|
2018-05-05 12:12:37 -05:00
|
|
|
// HACK: The condition "dummy span + group with empty delimiter" represents an AST
|
|
|
|
// fragment approximately converted into a token stream. This may happen, for
|
|
|
|
// example, with inputs to proc macro attributes, including derives. Such "groups"
|
|
|
|
// need to flattened during iteration over stream's token trees.
|
|
|
|
// Eventually this needs to be removed in favor of keeping original token trees
|
|
|
|
// and not doing the roundtrip through AST.
|
|
|
|
//
|
|
|
|
// REVIEW This may actually be observable if we can create a dummy span via
|
|
|
|
// proc macro API, but it looks like we can't do it with 1.2 yet.
|
2018-04-02 10:19:32 -05:00
|
|
|
if tree.span().0 == DUMMY_SP {
|
|
|
|
if let TokenTree::Group(ref group) = tree {
|
|
|
|
if group.delimiter() == Delimiter::None {
|
|
|
|
self.cursor.insert(group.stream.clone().0);
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Some(tree);
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
impl IntoIterator for TokenStream {
|
|
|
|
type Item = TokenTree;
|
|
|
|
type IntoIter = IntoIter;
|
|
|
|
|
|
|
|
fn into_iter(self) -> IntoIter {
|
|
|
|
IntoIter { cursor: self.0.trees(), stack: Vec::new() }
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
|
|
|
|
/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
|
2018-05-05 13:09:41 -05:00
|
|
|
/// the `TokenStream` `[Word("a"), Punct('+', Alone), Word("b")]`.
|
2018-04-02 10:19:32 -05:00
|
|
|
///
|
|
|
|
/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
|
|
|
|
/// To quote `$` itself, use `$$`.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
#[macro_export]
|
|
|
|
macro_rules! quote { () => {} }
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
|
|
|
#[doc(hidden)]
|
|
|
|
mod quote;
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
/// Quote a `Span` into a `TokenStream`.
|
|
|
|
/// This is needed to implement a custom quoter.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn quote_span(span: Span) -> TokenStream {
|
2017-11-05 10:25:45 -06:00
|
|
|
quote::Quote::quote(span)
|
2017-06-04 20:41:33 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// A region of source code, along with macro expansion information.
|
2018-05-05 12:12:37 -05:00
|
|
|
///
|
|
|
|
/// REVIEW ATTENTION: `Copy` impl on a struct with private fields.
|
|
|
|
/// REVIEW Do we want to guarantee `Span` to be `Copy`? Yes.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-06 17:20:57 -05:00
|
|
|
#[derive(Copy, Clone)]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub struct Span(syntax_pos::Span);
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for Span {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for Span {}
|
|
|
|
|
2017-08-28 04:56:43 -05:00
|
|
|
macro_rules! diagnostic_method {
|
|
|
|
($name:ident, $level:expr) => (
|
|
|
|
/// Create a new `Diagnostic` with the given `message` at the span
|
|
|
|
/// `self`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
|
|
|
|
Diagnostic::spanned(self, $level, message)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
impl Span {
|
2018-04-02 10:19:32 -05:00
|
|
|
/// A span that resolves at the macro definition site.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn def_site() -> Span {
|
|
|
|
::__internal::with_sess(|(_, mark)| {
|
|
|
|
let call_site = mark.expn_info().unwrap().call_site;
|
|
|
|
Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
/// The span of the invocation of the current procedural macro.
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Identifiers created with this span will be resolved as if they were written
|
|
|
|
/// directly at the macro call location (call-site hygiene) and other code
|
|
|
|
/// at the macro call site will be able to refer to them as well.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn call_site() -> Span {
|
|
|
|
::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site))
|
|
|
|
}
|
2017-08-28 04:56:43 -05:00
|
|
|
|
2017-08-01 20:05:08 -05:00
|
|
|
/// The original source file into which this span points.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn source_file(&self) -> SourceFile {
|
|
|
|
SourceFile {
|
|
|
|
filemap: __internal::lookup_char_pos(self.0.lo()).file,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-31 20:30:13 -06:00
|
|
|
/// The `Span` for the tokens in the previous macro expansion from which
|
|
|
|
/// `self` was generated from, if any.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn parent(&self) -> Option<Span> {
|
2018-04-26 17:28:34 -05:00
|
|
|
self.0.parent().map(Span)
|
2017-12-31 20:30:13 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
/// The span for the origin source code that `self` was generated from. If
|
|
|
|
/// this `Span` wasn't generated from other macro expansions then the return
|
|
|
|
/// value is the same as `*self`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn source(&self) -> Span {
|
|
|
|
Span(self.0.source_callsite())
|
|
|
|
}
|
|
|
|
|
2017-08-01 20:05:08 -05:00
|
|
|
/// Get the starting line/column in the source file for this span.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn start(&self) -> LineColumn {
|
|
|
|
let loc = __internal::lookup_char_pos(self.0.lo());
|
|
|
|
LineColumn {
|
|
|
|
line: loc.line,
|
|
|
|
column: loc.col.to_usize()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Get the ending line/column in the source file for this span.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn end(&self) -> LineColumn {
|
|
|
|
let loc = __internal::lookup_char_pos(self.0.hi());
|
|
|
|
LineColumn {
|
|
|
|
line: loc.line,
|
|
|
|
column: loc.col.to_usize()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Create a new span encompassing `self` and `other`.
|
|
|
|
///
|
|
|
|
/// Returns `None` if `self` and `other` are from different files.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn join(&self, other: Span) -> Option<Span> {
|
|
|
|
let self_loc = __internal::lookup_char_pos(self.0.lo());
|
2018-01-03 01:37:36 -06:00
|
|
|
let other_loc = __internal::lookup_char_pos(other.0.lo());
|
2017-08-01 20:05:08 -05:00
|
|
|
|
|
|
|
if self_loc.file.name != other_loc.file.name { return None }
|
|
|
|
|
|
|
|
Some(Span(self.0.to(other.0)))
|
|
|
|
}
|
|
|
|
|
2018-01-03 01:29:11 -06:00
|
|
|
/// Creates a new span with the same line/column information as `self` but
|
|
|
|
/// that resolves symbols as though it were at `other`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn resolved_at(&self, other: Span) -> Span {
|
|
|
|
Span(self.0.with_ctxt(other.0.ctxt()))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates a new span with the same name resolution behavior as `self` but
|
|
|
|
/// with the line/column information of `other`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn located_at(&self, other: Span) -> Span {
|
|
|
|
other.resolved_at(*self)
|
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Compares to spans to see if they're equal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn eq(&self, other: &Span) -> bool {
|
|
|
|
self.0 == other.0
|
|
|
|
}
|
|
|
|
|
2017-08-28 04:56:43 -05:00
|
|
|
diagnostic_method!(error, Level::Error);
|
|
|
|
diagnostic_method!(warning, Level::Warning);
|
|
|
|
diagnostic_method!(note, Level::Note);
|
|
|
|
diagnostic_method!(help, Level::Help);
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints a span in a form convenient for debugging.
|
2018-04-06 17:20:57 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl fmt::Debug for Span {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2018-04-07 00:57:10 -05:00
|
|
|
write!(f, "{:?} bytes({}..{})",
|
2018-04-06 17:20:57 -05:00
|
|
|
self.0.ctxt(),
|
|
|
|
self.0.lo().0,
|
|
|
|
self.0.hi().0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-01 20:05:08 -05:00
|
|
|
/// A line-column pair representing the start or end of a `Span`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
|
|
|
pub struct LineColumn {
|
|
|
|
/// The 1-indexed line in the source file on which the span starts or ends (inclusive).
|
2017-12-12 11:14:54 -06:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub line: usize,
|
2017-08-01 20:05:08 -05:00
|
|
|
/// The 0-indexed column (in UTF-8 characters) in the source file on which
|
|
|
|
/// the span starts or ends (inclusive).
|
2017-12-12 11:14:54 -06:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub column: usize
|
2017-08-01 20:05:08 -05:00
|
|
|
}
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for LineColumn {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for LineColumn {}
|
|
|
|
|
2017-08-01 20:05:08 -05:00
|
|
|
/// The source file of a given `Span`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct SourceFile {
|
2018-02-27 10:11:14 -06:00
|
|
|
filemap: Lrc<FileMap>,
|
2017-08-01 20:05:08 -05:00
|
|
|
}
|
|
|
|
|
2018-03-01 19:42:22 -06:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for SourceFile {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for SourceFile {}
|
|
|
|
|
2017-08-01 20:05:08 -05:00
|
|
|
impl SourceFile {
|
2017-12-14 01:09:19 -06:00
|
|
|
/// Get the path to this source file.
|
2017-08-01 20:05:08 -05:00
|
|
|
///
|
|
|
|
/// ### Note
|
|
|
|
/// If the code span associated with this `SourceFile` was generated by an external macro, this
|
|
|
|
/// may not be an actual path on the filesystem. Use [`is_real`] to check.
|
|
|
|
///
|
2018-02-18 17:05:24 -06:00
|
|
|
/// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
|
2017-08-01 20:05:08 -05:00
|
|
|
/// the command line, the path as given may not actually be valid.
|
|
|
|
///
|
|
|
|
/// [`is_real`]: #method.is_real
|
|
|
|
# [unstable(feature = "proc_macro", issue = "38356")]
|
2017-12-14 01:09:19 -06:00
|
|
|
pub fn path(&self) -> &FileName {
|
2017-08-01 20:05:08 -05:00
|
|
|
&self.filemap.name
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns `true` if this source file is a real source file, and not generated by an external
|
|
|
|
/// macro's expansion.
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-08-01 20:05:08 -05:00
|
|
|
pub fn is_real(&self) -> bool {
|
|
|
|
// This is a hack until intercrate spans are implemented and we can have real source files
|
|
|
|
// for spans generated in external macros.
|
|
|
|
// https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
|
|
|
|
self.filemap.is_real_file()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-12-14 01:09:19 -06:00
|
|
|
impl AsRef<FileName> for SourceFile {
|
|
|
|
fn as_ref(&self) -> &FileName {
|
|
|
|
self.path()
|
2017-08-01 20:05:08 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl fmt::Debug for SourceFile {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
f.debug_struct("SourceFile")
|
2017-12-14 01:09:19 -06:00
|
|
|
.field("path", self.path())
|
2017-08-01 20:05:08 -05:00
|
|
|
.field("is_real", &self.is_real())
|
|
|
|
.finish()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl PartialEq for SourceFile {
|
|
|
|
fn eq(&self, other: &Self) -> bool {
|
2018-02-27 10:11:14 -06:00
|
|
|
Lrc::ptr_eq(&self.filemap, &other.filemap)
|
2017-08-01 20:05:08 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl Eq for SourceFile {}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-12-14 01:09:19 -06:00
|
|
|
impl PartialEq<FileName> for SourceFile {
|
|
|
|
fn eq(&self, other: &FileName) -> bool {
|
2017-08-01 20:05:08 -05:00
|
|
|
self.as_ref() == other
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-06 17:20:57 -05:00
|
|
|
#[derive(Clone)]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub enum TokenTree {
|
2018-05-05 12:12:37 -05:00
|
|
|
/// A token stream surrounded by bracket delimiters.
|
2018-04-02 10:19:32 -05:00
|
|
|
Group(Group),
|
2018-05-05 12:12:37 -05:00
|
|
|
/// An identifier or lifetime identifier.
|
2018-05-05 13:09:41 -05:00
|
|
|
Ident(Ident),
|
2018-05-05 12:12:37 -05:00
|
|
|
/// A single punctuation character (`+`, `,`, `$`, etc.).
|
2018-05-05 13:09:41 -05:00
|
|
|
Punct(Punct),
|
2018-04-02 10:19:32 -05:00
|
|
|
/// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
|
|
|
|
Literal(Literal),
|
|
|
|
}
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for TokenTree {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for TokenTree {}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
impl TokenTree {
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Returns the span of this tree, delegating to the `span` method of
|
|
|
|
/// the contained token or a delimited stream.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn span(&self) -> Span {
|
|
|
|
match *self {
|
|
|
|
TokenTree::Group(ref t) => t.span(),
|
2018-05-05 13:09:41 -05:00
|
|
|
TokenTree::Ident(ref t) => t.span(),
|
|
|
|
TokenTree::Punct(ref t) => t.span(),
|
2018-04-02 10:19:32 -05:00
|
|
|
TokenTree::Literal(ref t) => t.span(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Configures the span for *only this token*.
|
|
|
|
///
|
|
|
|
/// Note that if this token is a `Group` then this method will not configure
|
|
|
|
/// the span of each of the internal tokens, this will simply delegate to
|
|
|
|
/// the `set_span` method of each variant.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
|
|
match *self {
|
|
|
|
TokenTree::Group(ref mut t) => t.set_span(span),
|
2018-05-05 13:09:41 -05:00
|
|
|
TokenTree::Ident(ref mut t) => t.set_span(span),
|
|
|
|
TokenTree::Punct(ref mut t) => t.set_span(span),
|
2018-04-02 10:19:32 -05:00
|
|
|
TokenTree::Literal(ref mut t) => t.set_span(span),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints token treee in a form convenient for debugging.
|
2018-04-06 17:20:57 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl fmt::Debug for TokenTree {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
// Each of these has the name in the struct type in the derived debug,
|
|
|
|
// so don't bother with an extra layer of indirection
|
|
|
|
match *self {
|
|
|
|
TokenTree::Group(ref tt) => tt.fmt(f),
|
2018-05-05 13:09:41 -05:00
|
|
|
TokenTree::Ident(ref tt) => tt.fmt(f),
|
|
|
|
TokenTree::Punct(ref tt) => tt.fmt(f),
|
2018-04-06 17:20:57 -05:00
|
|
|
TokenTree::Literal(ref tt) => tt.fmt(f),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// REVIEW the impls below are kind of `From<T> for Option<T>`, not strictly necessary,
|
|
|
|
/// REVIEW but convenient. No harm, I guess. I'd actually like to see impls
|
2018-05-05 13:09:41 -05:00
|
|
|
/// REVIEW `From<Group/Ident/Punct/Literal> for TokenStream` to avoid stuttering like
|
2018-05-05 12:12:37 -05:00
|
|
|
/// REVIEW `TokenTree::Literal(Literal::string("lalala")).into()`.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl From<Group> for TokenTree {
|
|
|
|
fn from(g: Group) -> TokenTree {
|
|
|
|
TokenTree::Group(g)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl From<Ident> for TokenTree {
|
|
|
|
fn from(g: Ident) -> TokenTree {
|
|
|
|
TokenTree::Ident(g)
|
2018-04-02 10:19:32 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl From<Punct> for TokenTree {
|
|
|
|
fn from(g: Punct) -> TokenTree {
|
|
|
|
TokenTree::Punct(g)
|
2018-04-02 10:19:32 -05:00
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
impl From<Literal> for TokenTree {
|
|
|
|
fn from(g: Literal) -> TokenTree {
|
|
|
|
TokenTree::Literal(g)
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints the token tree as a string that should be losslessly convertible back
|
|
|
|
/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
|
|
|
|
/// with `Delimiter::None` delimiters.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl fmt::Display for TokenTree {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2018-04-02 10:19:32 -05:00
|
|
|
match *self {
|
|
|
|
TokenTree::Group(ref t) => t.fmt(f),
|
2018-05-05 13:09:41 -05:00
|
|
|
TokenTree::Ident(ref t) => t.fmt(f),
|
|
|
|
TokenTree::Punct(ref t) => t.fmt(f),
|
2018-04-02 10:19:32 -05:00
|
|
|
TokenTree::Literal(ref t) => t.fmt(f),
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// A delimited token stream.
|
2018-04-02 10:19:32 -05:00
|
|
|
///
|
2018-05-05 12:12:37 -05:00
|
|
|
/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
|
2017-06-04 20:41:33 -05:00
|
|
|
#[derive(Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub struct Group {
|
|
|
|
delimiter: Delimiter,
|
|
|
|
stream: TokenStream,
|
|
|
|
span: Span,
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for Group {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for Group {}
|
|
|
|
|
2017-03-17 18:41:09 -05:00
|
|
|
/// Describes how a sequence of token trees is delimited.
|
2017-08-20 16:20:34 -05:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub enum Delimiter {
|
|
|
|
/// `( ... )`
|
|
|
|
Parenthesis,
|
|
|
|
/// `{ ... }`
|
2017-08-21 10:17:27 -05:00
|
|
|
Brace,
|
|
|
|
/// `[ ... ]`
|
2017-03-17 18:41:09 -05:00
|
|
|
Bracket,
|
2018-05-05 12:12:37 -05:00
|
|
|
/// `Ø ... Ø`
|
|
|
|
/// An implicit delimiter, that may, for example, appear around tokens coming from a
|
|
|
|
/// "macro variable" `$var`. It is important to preserve operator priorities in cases like
|
|
|
|
/// `$var * 3` where `$var` is `1 + 2`.
|
|
|
|
/// Implicit delimiters may not survive roundtrip of a token stream through a string.
|
2017-03-17 18:41:09 -05:00
|
|
|
None,
|
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
impl Group {
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Creates a new `Group` with the given delimiter and token stream.
|
2018-04-02 10:19:32 -05:00
|
|
|
///
|
|
|
|
/// This constructor will set the span for this group to
|
|
|
|
/// `Span::call_site()`. To change the span you can use the `set_span`
|
|
|
|
/// method below.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
|
|
|
Group {
|
|
|
|
delimiter: delimiter,
|
|
|
|
stream: stream,
|
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Returns the delimiter of this `Group`
|
2017-06-04 20:41:33 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub fn delimiter(&self) -> Delimiter {
|
|
|
|
self.delimiter
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Returns the `TokenStream` of tokens that are delimited in this `Group`.
|
|
|
|
///
|
|
|
|
/// Note that the returned token stream does not include the delimiter
|
|
|
|
/// returned above.
|
2017-06-04 20:41:33 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub fn stream(&self) -> TokenStream {
|
|
|
|
self.stream.clone()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the span for the delimiters of this token stream, spanning the
|
|
|
|
/// entire `Group`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn span(&self) -> Span {
|
|
|
|
self.span
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Configures the span for this `Group`'s delimiters, but not its internal
|
|
|
|
/// tokens.
|
|
|
|
///
|
|
|
|
/// This method will **not** set the span of all the internal tokens spanned
|
|
|
|
/// by this group, but rather it will only set the span of the delimiter
|
|
|
|
/// tokens at the level of the `Group`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
|
|
self.span = span;
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints the group as a string that should be losslessly convertible back
|
|
|
|
/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
|
|
|
|
/// with `Delimiter::None` delimiters.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl fmt::Display for Group {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
TokenStream::from(TokenTree::from(self.clone())).fmt(f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
|
2018-04-02 10:19:32 -05:00
|
|
|
///
|
2018-05-05 13:09:41 -05:00
|
|
|
/// Multicharacter operators like `+=` are represented as two instances of `Punct` with different
|
2018-04-02 10:19:32 -05:00
|
|
|
/// forms of `Spacing` returned.
|
2018-05-05 12:12:37 -05:00
|
|
|
///
|
2018-05-05 13:09:41 -05:00
|
|
|
/// REVIEW We should guarantee that `Punct` contains a valid punctuation character permitted by
|
2018-05-05 12:12:37 -05:00
|
|
|
/// REVIEW the language and not a random unicode code point. The check is already performed in
|
|
|
|
/// REVIEW `TokenTree::to_internal`, but we should do it on construction.
|
2018-05-05 13:09:41 -05:00
|
|
|
/// REVIEW `Punct` can also avoid using `char` internally and keep an u8-like enum.
|
2018-05-05 12:12:37 -05:00
|
|
|
///
|
|
|
|
/// REVIEW ATTENTION: `Copy` impl on a struct with private fields.
|
2018-05-05 13:09:41 -05:00
|
|
|
/// REVIEW Do we want to guarantee `Punct` to be `Copy`?
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
2018-05-05 13:09:41 -05:00
|
|
|
pub struct Punct {
|
|
|
|
ch: char,
|
2018-04-02 10:19:32 -05:00
|
|
|
spacing: Spacing,
|
|
|
|
span: Span,
|
|
|
|
}
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl !Send for Punct {}
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl !Sync for Punct {}
|
2018-05-04 16:11:22 -05:00
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
/// Whether an `Punct` is followed immediately by another `Punct` or
|
2018-05-05 12:12:37 -05:00
|
|
|
/// followed by another token or whitespace.
|
2017-08-20 16:20:34 -05:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2017-06-04 20:41:33 -05:00
|
|
|
pub enum Spacing {
|
2018-05-05 12:12:37 -05:00
|
|
|
/// e.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
|
2017-03-17 18:41:09 -05:00
|
|
|
Alone,
|
2018-05-05 12:12:37 -05:00
|
|
|
/// e.g. `+` is `Joint` in `+=` or `+#`.
|
2017-03-17 18:41:09 -05:00
|
|
|
Joint,
|
|
|
|
}
|
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
impl Punct {
|
|
|
|
/// Creates a new `Punct` from the given character and spacing.
|
2018-04-02 10:19:32 -05:00
|
|
|
///
|
2018-05-05 13:09:41 -05:00
|
|
|
/// The returned `Punct` will have the default span of `Span::call_site()`
|
2018-04-02 10:19:32 -05:00
|
|
|
/// which can be further configured with the `set_span` method below.
|
2018-05-05 12:12:37 -05:00
|
|
|
///
|
|
|
|
/// REVIEW Why we even use `char` here? There's no reason to use unicode here.
|
|
|
|
/// REVIEW I guess because it's more convenient to write `new('+')` than `new(b'+')`, that's ok.
|
|
|
|
///
|
|
|
|
/// REVIEW TO_DO Do input validation on construction, the argument should be a valid punctuation
|
|
|
|
/// REVIEW character permitted by the language.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
pub fn new(ch: char, spacing: Spacing) -> Punct {
|
|
|
|
Punct {
|
|
|
|
ch: ch,
|
2018-04-02 10:19:32 -05:00
|
|
|
spacing: spacing,
|
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Returns the value of this punctuation character as `char`.
|
|
|
|
///
|
|
|
|
/// REVIEW Again, there's no need for unicode here,
|
|
|
|
/// REVIEW except for maybe future compatibility in case Rust turns into APL,
|
|
|
|
/// REVIEW but if it's more convenient to use `char` then that's okay.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
pub fn as_char(&self) -> char {
|
|
|
|
self.ch
|
2018-04-02 10:19:32 -05:00
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Returns the spacing of this punctuation character, indicating whether it's immediately
|
2018-05-05 13:09:41 -05:00
|
|
|
/// followed by another `Punct` in the token stream, so they can potentially be combined into
|
2018-05-05 12:12:37 -05:00
|
|
|
/// a multicharacter operator (`Joint`), or it's followed by some other token or whitespace
|
|
|
|
/// (`Alone`) so the operator has certainly ended.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn spacing(&self) -> Spacing {
|
|
|
|
self.spacing
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Returns the span for this punctuation character.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn span(&self) -> Span {
|
|
|
|
self.span
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Configure the span for this punctuation character.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
|
|
self.span = span;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints the punctuation character as a string that should be losslessly convertible
|
|
|
|
/// back into the same character.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl fmt::Display for Punct {
|
2018-04-02 10:19:32 -05:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
TokenStream::from(TokenTree::from(self.clone())).fmt(f)
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// An identifier (`ident`) or lifetime identifier (`'ident`).
|
|
|
|
///
|
2018-05-05 13:09:41 -05:00
|
|
|
/// REVIEW We should guarantee that `Ident` contains a valid identifier permitted by
|
2018-05-05 12:12:37 -05:00
|
|
|
/// REVIEW the language and not a random unicode string, at least for a start.
|
|
|
|
///
|
|
|
|
/// REVIEW We need to support raw identifiers here (`r#ident`) or at least be future compatible
|
|
|
|
/// REVIEW with them. Currently they are supported using "string typing" - if string "r#ident" is
|
2018-05-05 13:09:41 -05:00
|
|
|
/// REVIEW passed to `Ident::new` it will be interpreted as a raw identifier later on, we should add
|
|
|
|
/// REVIEW a field `is_raw` and a separate constructor for it (`Ident::new_raw` or something) and
|
2018-05-05 12:12:37 -05:00
|
|
|
/// REVIEW keep it unstable until raw identifiers are stabilized.
|
|
|
|
///
|
|
|
|
/// REVIEW ATTENTION: `Copy` impl on a struct with private fields.
|
2018-05-05 13:09:41 -05:00
|
|
|
/// REVIEW Do we want to guarantee `Ident` to be `Copy`?
|
2018-04-02 10:19:32 -05:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
pub struct Ident {
|
2018-05-05 12:12:37 -05:00
|
|
|
// REVIEW(INTERNAL) Symbol + Span is actually `ast::Ident`! We can use it here.
|
2018-04-02 10:19:32 -05:00
|
|
|
sym: Symbol,
|
|
|
|
span: Span,
|
|
|
|
}
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl !Send for Ident {}
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl !Sync for Ident {}
|
2018-05-04 16:11:22 -05:00
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
impl Ident {
|
|
|
|
/// Creates a new `Ident` with the given `string` as well as the specified
|
2018-04-02 10:19:32 -05:00
|
|
|
/// `span`.
|
|
|
|
///
|
|
|
|
/// Note that `span`, currently in rustc, configures the hygiene information
|
2018-05-05 12:12:37 -05:00
|
|
|
/// for this identifier.
|
|
|
|
///
|
|
|
|
/// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
|
|
|
|
/// meaning that identifiers created with this span will be resolved as if they were written
|
|
|
|
/// directly at the location of the macro call, and other code at the macro call site will be
|
|
|
|
/// able to refer to them as well.
|
|
|
|
///
|
|
|
|
/// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
|
|
|
|
/// meaning that identifiers created with this span will be resolved at the location of the
|
|
|
|
/// macro definition and other code at the macro call site will not be able to refer to them.
|
2018-04-02 10:19:32 -05:00
|
|
|
///
|
|
|
|
/// Due to the current importance of hygiene this constructor, unlike other
|
|
|
|
/// tokens, requires a `Span` to be specified at construction.
|
2018-05-05 12:12:37 -05:00
|
|
|
///
|
|
|
|
/// REVIEW TO_DO Do input validation, the argument should be a valid identifier or
|
|
|
|
/// REVIEW lifetime identifier.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
pub fn new(string: &str, span: Span) -> Ident {
|
|
|
|
Ident {
|
2018-04-02 10:19:32 -05:00
|
|
|
sym: Symbol::intern(string),
|
|
|
|
span,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:49:25 -05:00
|
|
|
// FIXME: Remove this, do not stabilize
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Get a reference to the interned string.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn as_str(&self) -> &str {
|
|
|
|
unsafe { &*(&*self.sym.as_str() as *const str) }
|
|
|
|
}
|
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
/// Returns the span of this `Ident`, encompassing the entire string returned
|
2018-04-02 10:19:32 -05:00
|
|
|
/// by `as_str`.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn span(&self) -> Span {
|
|
|
|
self.span
|
|
|
|
}
|
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
/// Configures the span of this `Ident`, possibly changing its hygiene context.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
|
|
self.span = span;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints the identifier as a string that should be losslessly convertible
|
|
|
|
/// back into the same identifier.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-05-05 13:09:41 -05:00
|
|
|
impl fmt::Display for Ident {
|
2017-03-17 18:41:09 -05:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
2018-04-09 16:49:25 -05:00
|
|
|
self.sym.as_str().fmt(f)
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// A literal string (`"hello"`), byte string (`b"hello"`),
|
|
|
|
/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
|
|
|
|
/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
|
2018-05-05 13:09:41 -05:00
|
|
|
/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
|
2018-04-02 10:19:32 -05:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub struct Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit,
|
|
|
|
suffix: Option<ast::Name>,
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span,
|
|
|
|
}
|
|
|
|
|
2018-05-04 16:11:22 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Send for Literal {}
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
impl !Sync for Literal {}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
macro_rules! suffixed_int_literals {
|
|
|
|
($($name:ident => $kind:ident,)*) => ($(
|
|
|
|
/// Creates a new suffixed integer literal with the specified value.
|
|
|
|
///
|
|
|
|
/// This function will create an integer like `1u32` where the integer
|
|
|
|
/// value specified is the first part of the token and the integral is
|
|
|
|
/// also suffixed at the end.
|
|
|
|
///
|
|
|
|
/// Literals created through this method have the `Span::call_site()`
|
|
|
|
/// span by default, which can be configured with the `set_span` method
|
|
|
|
/// below.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub fn $name(n: $kind) -> Literal {
|
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Integer(Symbol::intern(&n.to_string())),
|
|
|
|
suffix: Some(Symbol::intern(stringify!($kind))),
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)*)
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! unsuffixed_int_literals {
|
|
|
|
($($name:ident => $kind:ident,)*) => ($(
|
|
|
|
/// Creates a new unsuffixed integer literal with the specified value.
|
|
|
|
///
|
|
|
|
/// This function will create an integer like `1` where the integer
|
|
|
|
/// value specified is the first part of the token. No suffix is
|
|
|
|
/// specified on this token, meaning that invocations like
|
|
|
|
/// `Literal::i8_unsuffixed(1)` are equivalent to
|
|
|
|
/// `Literal::u32_unsuffixed(1)`.
|
|
|
|
///
|
|
|
|
/// Literals created through this method have the `Span::call_site()`
|
|
|
|
/// span by default, which can be configured with the `set_span` method
|
|
|
|
/// below.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn $name(n: $kind) -> Literal {
|
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Integer(Symbol::intern(&n.to_string())),
|
|
|
|
suffix: None,
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
2018-04-02 10:19:32 -05:00
|
|
|
)*)
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Literal {
|
2018-04-02 10:19:32 -05:00
|
|
|
suffixed_int_literals! {
|
|
|
|
u8_suffixed => u8,
|
|
|
|
u16_suffixed => u16,
|
|
|
|
u32_suffixed => u32,
|
|
|
|
u64_suffixed => u64,
|
|
|
|
u128_suffixed => u128,
|
|
|
|
usize_suffixed => usize,
|
|
|
|
i8_suffixed => i8,
|
|
|
|
i16_suffixed => i16,
|
|
|
|
i32_suffixed => i32,
|
|
|
|
i64_suffixed => i64,
|
|
|
|
i128_suffixed => i128,
|
|
|
|
isize_suffixed => isize,
|
2017-06-04 20:41:33 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
unsuffixed_int_literals! {
|
|
|
|
u8_unsuffixed => u8,
|
|
|
|
u16_unsuffixed => u16,
|
|
|
|
u32_unsuffixed => u32,
|
|
|
|
u64_unsuffixed => u64,
|
|
|
|
u128_unsuffixed => u128,
|
|
|
|
usize_unsuffixed => usize,
|
|
|
|
i8_unsuffixed => i8,
|
|
|
|
i16_unsuffixed => i16,
|
|
|
|
i32_unsuffixed => i32,
|
|
|
|
i64_unsuffixed => i64,
|
|
|
|
i128_unsuffixed => i128,
|
|
|
|
isize_unsuffixed => isize,
|
2017-06-04 20:41:33 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Creates a new unsuffixed floating-point literal.
|
|
|
|
///
|
|
|
|
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
|
|
|
|
/// the float's value is emitted directly into the token but no suffix is
|
|
|
|
/// used, so it may be inferred to be a `f64` later in the compiler.
|
|
|
|
///
|
|
|
|
/// # Panics
|
|
|
|
///
|
|
|
|
/// This function requires that the specified float is finite, for
|
|
|
|
/// example if it is infinity or NaN this function will panic.
|
2017-06-04 20:41:33 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub fn f32_unsuffixed(n: f32) -> Literal {
|
2017-06-04 20:41:33 -05:00
|
|
|
if !n.is_finite() {
|
|
|
|
panic!("Invalid float literal {}", n);
|
|
|
|
}
|
2018-04-02 10:19:32 -05:00
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Float(Symbol::intern(&n.to_string())),
|
|
|
|
suffix: None,
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Creates a new suffixed floating-point literal.
|
|
|
|
///
|
|
|
|
/// This consturctor will create a literal like `1.0f32` where the value
|
|
|
|
/// specified is the preceding part of the token and `f32` is the suffix of
|
|
|
|
/// the token. This token will always be inferred to be an `f32` in the
|
|
|
|
/// compiler.
|
|
|
|
///
|
|
|
|
/// # Panics
|
|
|
|
///
|
|
|
|
/// This function requires that the specified float is finite, for
|
|
|
|
/// example if it is infinity or NaN this function will panic.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub fn f32_suffixed(n: f32) -> Literal {
|
2017-06-04 20:41:33 -05:00
|
|
|
if !n.is_finite() {
|
2018-04-02 10:19:32 -05:00
|
|
|
panic!("Invalid float literal {}", n);
|
|
|
|
}
|
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Float(Symbol::intern(&n.to_string())),
|
|
|
|
suffix: Some(Symbol::intern("f32")),
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
2017-06-04 20:41:33 -05:00
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Creates a new unsuffixed floating-point literal.
|
|
|
|
///
|
|
|
|
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
|
|
|
|
/// the float's value is emitted directly into the token but no suffix is
|
|
|
|
/// used, so it may be inferred to be a `f64` later in the compiler.
|
|
|
|
///
|
|
|
|
/// # Panics
|
|
|
|
///
|
|
|
|
/// This function requires that the specified float is finite, for
|
|
|
|
/// example if it is infinity or NaN this function will panic.
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
pub fn f64_unsuffixed(n: f64) -> Literal {
|
2017-06-04 20:41:33 -05:00
|
|
|
if !n.is_finite() {
|
2018-04-02 10:19:32 -05:00
|
|
|
panic!("Invalid float literal {}", n);
|
|
|
|
}
|
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Float(Symbol::intern(&n.to_string())),
|
|
|
|
suffix: None,
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates a new suffixed floating-point literal.
|
|
|
|
///
|
|
|
|
/// This consturctor will create a literal like `1.0f64` where the value
|
|
|
|
/// specified is the preceding part of the token and `f64` is the suffix of
|
|
|
|
/// the token. This token will always be inferred to be an `f64` in the
|
|
|
|
/// compiler.
|
|
|
|
///
|
|
|
|
/// # Panics
|
|
|
|
///
|
|
|
|
/// This function requires that the specified float is finite, for
|
|
|
|
/// example if it is infinity or NaN this function will panic.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn f64_suffixed(n: f64) -> Literal {
|
|
|
|
if !n.is_finite() {
|
|
|
|
panic!("Invalid float literal {}", n);
|
|
|
|
}
|
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Float(Symbol::intern(&n.to_string())),
|
|
|
|
suffix: Some(Symbol::intern("f64")),
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
2017-06-04 20:41:33 -05:00
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// String literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn string(string: &str) -> Literal {
|
|
|
|
let mut escaped = String::new();
|
|
|
|
for ch in string.chars() {
|
2017-10-08 03:42:33 -05:00
|
|
|
escaped.extend(ch.escape_debug());
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
2018-04-02 10:19:32 -05:00
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Str_(Symbol::intern(&escaped)),
|
|
|
|
suffix: None,
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Character literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn character(ch: char) -> Literal {
|
|
|
|
let mut escaped = String::new();
|
|
|
|
escaped.extend(ch.escape_unicode());
|
2018-04-02 10:19:32 -05:00
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::Char(Symbol::intern(&escaped)),
|
|
|
|
suffix: None,
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
2017-06-04 20:41:33 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Byte string literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn byte_string(bytes: &[u8]) -> Literal {
|
|
|
|
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
|
|
|
|
.map(Into::<char>::into).collect::<String>();
|
2018-04-02 10:19:32 -05:00
|
|
|
Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: token::Lit::ByteStr(Symbol::intern(&string)),
|
|
|
|
suffix: None,
|
2018-04-02 10:19:32 -05:00
|
|
|
span: Span::call_site(),
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
/// Returns the span encompassing this literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn span(&self) -> Span {
|
|
|
|
self.span
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Configures the span associated for this literal.
|
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
|
|
|
pub fn set_span(&mut self, span: Span) {
|
|
|
|
self.span = span;
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-05-05 12:12:37 -05:00
|
|
|
/// Prints the literal as a string that should be losslessly convertible
|
|
|
|
/// back into the same literal (except for possible rounding for floating point literals).
|
2017-03-17 18:41:09 -05:00
|
|
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
2018-04-02 10:19:32 -05:00
|
|
|
impl fmt::Display for Literal {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
TokenStream::from(TokenTree::from(self.clone())).fmt(f)
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Delimiter {
|
2017-06-04 20:41:33 -05:00
|
|
|
fn from_internal(delim: token::DelimToken) -> Delimiter {
|
2017-03-17 18:41:09 -05:00
|
|
|
match delim {
|
|
|
|
token::Paren => Delimiter::Parenthesis,
|
|
|
|
token::Brace => Delimiter::Brace,
|
|
|
|
token::Bracket => Delimiter::Bracket,
|
|
|
|
token::NoDelim => Delimiter::None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
fn to_internal(self) -> token::DelimToken {
|
2017-03-17 18:41:09 -05:00
|
|
|
match self {
|
|
|
|
Delimiter::Parenthesis => token::Paren,
|
|
|
|
Delimiter::Brace => token::Brace,
|
|
|
|
Delimiter::Bracket => token::Bracket,
|
|
|
|
Delimiter::None => token::NoDelim,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TokenTree {
|
2018-03-30 04:55:54 -05:00
|
|
|
fn from_internal(stream: tokenstream::TokenStream, stack: &mut Vec<TokenTree>)
|
2017-03-17 18:41:09 -05:00
|
|
|
-> TokenTree {
|
|
|
|
use syntax::parse::token::*;
|
|
|
|
|
|
|
|
let (tree, is_joint) = stream.as_tree();
|
2018-03-30 04:55:54 -05:00
|
|
|
let (span, token) = match tree {
|
2017-03-17 18:41:09 -05:00
|
|
|
tokenstream::TokenTree::Token(span, token) => (span, token),
|
|
|
|
tokenstream::TokenTree::Delimited(span, delimed) => {
|
2017-06-04 20:41:33 -05:00
|
|
|
let delimiter = Delimiter::from_internal(delimed.delim);
|
2018-04-02 10:19:32 -05:00
|
|
|
let mut g = Group::new(delimiter, TokenStream(delimed.tts.into()));
|
|
|
|
g.set_span(Span(span));
|
|
|
|
return g.into()
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
|
2018-03-30 04:55:54 -05:00
|
|
|
macro_rules! tt {
|
2018-04-02 10:19:32 -05:00
|
|
|
($e:expr) => ({
|
|
|
|
let mut x = TokenTree::from($e);
|
|
|
|
x.set_span(Span(span));
|
|
|
|
x
|
|
|
|
})
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
2018-03-30 04:55:54 -05:00
|
|
|
macro_rules! op {
|
2018-05-05 13:09:41 -05:00
|
|
|
($a:expr) => (tt!(Punct::new($a, op_kind)));
|
2018-03-30 04:55:54 -05:00
|
|
|
($a:expr, $b:expr) => ({
|
2018-05-05 13:09:41 -05:00
|
|
|
stack.push(tt!(Punct::new($b, op_kind)));
|
|
|
|
tt!(Punct::new($a, Spacing::Joint))
|
2018-03-30 04:55:54 -05:00
|
|
|
});
|
|
|
|
($a:expr, $b:expr, $c:expr) => ({
|
2018-05-05 13:09:41 -05:00
|
|
|
stack.push(tt!(Punct::new($c, op_kind)));
|
|
|
|
stack.push(tt!(Punct::new($b, Spacing::Joint)));
|
|
|
|
tt!(Punct::new($a, Spacing::Joint))
|
2018-03-30 04:55:54 -05:00
|
|
|
})
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
match token {
|
2017-03-17 18:41:09 -05:00
|
|
|
Eq => op!('='),
|
|
|
|
Lt => op!('<'),
|
2018-03-30 04:55:54 -05:00
|
|
|
Le => op!('<', '='),
|
|
|
|
EqEq => op!('=', '='),
|
|
|
|
Ne => op!('!', '='),
|
|
|
|
Ge => op!('>', '='),
|
2017-03-17 18:41:09 -05:00
|
|
|
Gt => op!('>'),
|
2018-03-30 04:55:54 -05:00
|
|
|
AndAnd => op!('&', '&'),
|
|
|
|
OrOr => op!('|', '|'),
|
2017-03-17 18:41:09 -05:00
|
|
|
Not => op!('!'),
|
|
|
|
Tilde => op!('~'),
|
|
|
|
BinOp(Plus) => op!('+'),
|
|
|
|
BinOp(Minus) => op!('-'),
|
|
|
|
BinOp(Star) => op!('*'),
|
|
|
|
BinOp(Slash) => op!('/'),
|
|
|
|
BinOp(Percent) => op!('%'),
|
|
|
|
BinOp(Caret) => op!('^'),
|
|
|
|
BinOp(And) => op!('&'),
|
|
|
|
BinOp(Or) => op!('|'),
|
2018-03-30 04:55:54 -05:00
|
|
|
BinOp(Shl) => op!('<', '<'),
|
|
|
|
BinOp(Shr) => op!('>', '>'),
|
|
|
|
BinOpEq(Plus) => op!('+', '='),
|
|
|
|
BinOpEq(Minus) => op!('-', '='),
|
|
|
|
BinOpEq(Star) => op!('*', '='),
|
|
|
|
BinOpEq(Slash) => op!('/', '='),
|
|
|
|
BinOpEq(Percent) => op!('%', '='),
|
|
|
|
BinOpEq(Caret) => op!('^', '='),
|
|
|
|
BinOpEq(And) => op!('&', '='),
|
|
|
|
BinOpEq(Or) => op!('|', '='),
|
|
|
|
BinOpEq(Shl) => op!('<', '<', '='),
|
|
|
|
BinOpEq(Shr) => op!('>', '>', '='),
|
2017-03-17 18:41:09 -05:00
|
|
|
At => op!('@'),
|
|
|
|
Dot => op!('.'),
|
2018-03-30 04:55:54 -05:00
|
|
|
DotDot => op!('.', '.'),
|
|
|
|
DotDotDot => op!('.', '.', '.'),
|
|
|
|
DotDotEq => op!('.', '.', '='),
|
2017-03-17 18:41:09 -05:00
|
|
|
Comma => op!(','),
|
|
|
|
Semi => op!(';'),
|
|
|
|
Colon => op!(':'),
|
2018-03-30 04:55:54 -05:00
|
|
|
ModSep => op!(':', ':'),
|
|
|
|
RArrow => op!('-', '>'),
|
|
|
|
LArrow => op!('<', '-'),
|
|
|
|
FatArrow => op!('=', '>'),
|
2017-03-17 18:41:09 -05:00
|
|
|
Pound => op!('#'),
|
|
|
|
Dollar => op!('$'),
|
|
|
|
Question => op!('?'),
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
Ident(ident, false) | Lifetime(ident) => {
|
2018-05-05 13:09:41 -05:00
|
|
|
tt!(self::Ident::new(&ident.name.as_str(), Span(span)))
|
2018-04-02 10:19:32 -05:00
|
|
|
}
|
|
|
|
Ident(ident, true) => {
|
2018-05-05 13:09:41 -05:00
|
|
|
tt!(self::Ident::new(&format!("r#{}", ident), Span(span)))
|
2018-04-02 10:19:32 -05:00
|
|
|
}
|
2018-04-06 17:20:57 -05:00
|
|
|
Literal(lit, suffix) => tt!(self::Literal { lit, suffix, span: Span(span) }),
|
2018-03-30 04:55:54 -05:00
|
|
|
DocComment(c) => {
|
2018-04-04 16:27:53 -05:00
|
|
|
let style = comments::doc_comment_style(&c.as_str());
|
|
|
|
let stripped = comments::strip_doc_comment_decoration(&c.as_str());
|
2018-03-30 04:55:54 -05:00
|
|
|
let stream = vec![
|
2018-05-05 13:09:41 -05:00
|
|
|
tt!(self::Ident::new("doc", Span(span))),
|
|
|
|
tt!(Punct::new('=', Spacing::Alone)),
|
2018-04-04 16:27:53 -05:00
|
|
|
tt!(self::Literal::string(&stripped)),
|
2018-03-30 04:55:54 -05:00
|
|
|
].into_iter().collect();
|
2018-04-02 10:19:32 -05:00
|
|
|
stack.push(tt!(Group::new(Delimiter::Bracket, stream)));
|
2018-04-04 16:27:53 -05:00
|
|
|
if style == ast::AttrStyle::Inner {
|
2018-05-05 13:09:41 -05:00
|
|
|
stack.push(tt!(Punct::new('!', Spacing::Alone)));
|
2018-04-04 16:27:53 -05:00
|
|
|
}
|
2018-05-05 13:09:41 -05:00
|
|
|
tt!(Punct::new('#', Spacing::Alone))
|
2018-03-30 04:55:54 -05:00
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2017-09-15 10:28:34 -05:00
|
|
|
Interpolated(_) => {
|
|
|
|
__internal::with_sess(|(sess, _)| {
|
|
|
|
let tts = token.interpolated_to_tokenstream(sess, span);
|
2018-04-02 10:19:32 -05:00
|
|
|
tt!(Group::new(Delimiter::None, TokenStream(tts)))
|
2017-07-10 19:49:18 -05:00
|
|
|
})
|
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2018-03-30 04:55:54 -05:00
|
|
|
DotEq => op!('.', '='),
|
2017-03-17 18:41:09 -05:00
|
|
|
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
|
|
|
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
|
2018-04-02 10:19:32 -05:00
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
|
2017-06-04 20:41:33 -05:00
|
|
|
fn to_internal(self) -> tokenstream::TokenStream {
|
2017-03-17 18:41:09 -05:00
|
|
|
use syntax::parse::token::*;
|
|
|
|
use syntax::tokenstream::{TokenTree, Delimited};
|
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
let (ch, kind, span) = match self {
|
|
|
|
self::TokenTree::Punct(tt) => (tt.as_char(), tt.spacing(), tt.span()),
|
2018-04-02 10:19:32 -05:00
|
|
|
self::TokenTree::Group(tt) => {
|
|
|
|
return TokenTree::Delimited(tt.span.0, Delimited {
|
|
|
|
delim: tt.delimiter.to_internal(),
|
|
|
|
tts: tt.stream.0.into(),
|
2017-03-17 18:41:09 -05:00
|
|
|
}).into();
|
|
|
|
},
|
2018-05-05 13:09:41 -05:00
|
|
|
self::TokenTree::Ident(tt) => {
|
2018-03-17 18:57:23 -05:00
|
|
|
let ident = ast::Ident::new(tt.sym, tt.span.0);
|
2018-04-09 16:49:25 -05:00
|
|
|
let sym_str = tt.sym.to_string();
|
2018-03-18 19:54:56 -05:00
|
|
|
let token = if sym_str.starts_with("'") {
|
|
|
|
Lifetime(ident)
|
|
|
|
} else if sym_str.starts_with("r#") {
|
|
|
|
let name = Symbol::intern(&sym_str[2..]);
|
|
|
|
let ident = ast::Ident::new(name, ident.span);
|
|
|
|
Ident(ident, true)
|
|
|
|
} else {
|
|
|
|
Ident(ident, false)
|
|
|
|
};
|
2018-04-02 10:19:32 -05:00
|
|
|
return TokenTree::Token(tt.span.0, token).into();
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
2018-04-02 10:19:32 -05:00
|
|
|
self::TokenTree::Literal(self::Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: Lit::Integer(ref a),
|
|
|
|
suffix,
|
2018-04-02 10:19:32 -05:00
|
|
|
span,
|
|
|
|
})
|
2018-03-30 04:55:54 -05:00
|
|
|
if a.as_str().starts_with("-") =>
|
|
|
|
{
|
|
|
|
let minus = BinOp(BinOpToken::Minus);
|
|
|
|
let integer = Symbol::intern(&a.as_str()[1..]);
|
2018-04-06 17:20:57 -05:00
|
|
|
let integer = Literal(Lit::Integer(integer), suffix);
|
2018-04-02 10:19:32 -05:00
|
|
|
let a = TokenTree::Token(span.0, minus);
|
|
|
|
let b = TokenTree::Token(span.0, integer);
|
2018-03-30 04:55:54 -05:00
|
|
|
return vec![a, b].into_iter().collect()
|
|
|
|
}
|
2018-04-02 10:19:32 -05:00
|
|
|
self::TokenTree::Literal(self::Literal {
|
2018-04-06 17:20:57 -05:00
|
|
|
lit: Lit::Float(ref a),
|
|
|
|
suffix,
|
2018-04-02 10:19:32 -05:00
|
|
|
span,
|
|
|
|
})
|
2018-03-30 04:55:54 -05:00
|
|
|
if a.as_str().starts_with("-") =>
|
|
|
|
{
|
|
|
|
let minus = BinOp(BinOpToken::Minus);
|
|
|
|
let float = Symbol::intern(&a.as_str()[1..]);
|
2018-04-06 17:20:57 -05:00
|
|
|
let float = Literal(Lit::Float(float), suffix);
|
2018-04-02 10:19:32 -05:00
|
|
|
let a = TokenTree::Token(span.0, minus);
|
|
|
|
let b = TokenTree::Token(span.0, float);
|
2018-03-30 04:55:54 -05:00
|
|
|
return vec![a, b].into_iter().collect()
|
|
|
|
}
|
2018-04-02 10:19:32 -05:00
|
|
|
self::TokenTree::Literal(tt) => {
|
2018-04-06 17:20:57 -05:00
|
|
|
let token = Literal(tt.lit, tt.suffix);
|
|
|
|
return TokenTree::Token(tt.span.0, token).into()
|
2018-03-30 04:55:54 -05:00
|
|
|
}
|
2017-03-17 18:41:09 -05:00
|
|
|
};
|
|
|
|
|
2018-05-05 13:09:41 -05:00
|
|
|
let token = match ch {
|
2017-03-17 18:41:09 -05:00
|
|
|
'=' => Eq,
|
|
|
|
'<' => Lt,
|
|
|
|
'>' => Gt,
|
|
|
|
'!' => Not,
|
|
|
|
'~' => Tilde,
|
|
|
|
'+' => BinOp(Plus),
|
|
|
|
'-' => BinOp(Minus),
|
|
|
|
'*' => BinOp(Star),
|
|
|
|
'/' => BinOp(Slash),
|
|
|
|
'%' => BinOp(Percent),
|
|
|
|
'^' => BinOp(Caret),
|
|
|
|
'&' => BinOp(And),
|
|
|
|
'|' => BinOp(Or),
|
|
|
|
'@' => At,
|
|
|
|
'.' => Dot,
|
|
|
|
',' => Comma,
|
|
|
|
';' => Semi,
|
|
|
|
':' => Colon,
|
|
|
|
'#' => Pound,
|
|
|
|
'$' => Dollar,
|
|
|
|
'?' => Question,
|
2018-05-05 13:09:41 -05:00
|
|
|
_ => panic!("unsupported character {}", ch),
|
2017-03-17 18:41:09 -05:00
|
|
|
};
|
|
|
|
|
2018-04-02 10:19:32 -05:00
|
|
|
let tree = TokenTree::Token(span.0, token);
|
2017-03-17 18:41:09 -05:00
|
|
|
match kind {
|
2017-06-04 20:41:33 -05:00
|
|
|
Spacing::Alone => tree.into(),
|
|
|
|
Spacing::Joint => tree.joint(),
|
2017-03-17 18:41:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
/// Permanently unstable internal implementation details of this crate. This
|
|
|
|
/// should not be used.
|
|
|
|
///
|
|
|
|
/// These methods are used by the rest of the compiler to generate instances of
|
|
|
|
/// `TokenStream` to hand to macro definitions, as well as consume the output.
|
|
|
|
///
|
|
|
|
/// Note that this module is also intentionally separate from the rest of the
|
|
|
|
/// crate. This allows the `#[unstable]` directive below to naturally apply to
|
|
|
|
/// all of the contents.
|
|
|
|
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
|
|
|
#[doc(hidden)]
|
|
|
|
pub mod __internal {
|
2017-11-05 12:21:05 -06:00
|
|
|
pub use quote::{LiteralKind, Quoter, unquote};
|
2017-03-17 18:41:09 -05:00
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
use std::cell::Cell;
|
|
|
|
|
|
|
|
use syntax::ast;
|
2017-03-28 00:32:43 -05:00
|
|
|
use syntax::ext::base::ExtCtxt;
|
|
|
|
use syntax::ext::hygiene::Mark;
|
2016-10-03 11:49:39 -05:00
|
|
|
use syntax::ptr::P;
|
2017-03-28 20:55:01 -05:00
|
|
|
use syntax::parse::{self, ParseSess};
|
|
|
|
use syntax::parse::token::{self, Token};
|
2017-03-17 18:23:12 -05:00
|
|
|
use syntax::tokenstream;
|
2017-08-01 20:05:08 -05:00
|
|
|
use syntax_pos::{BytePos, Loc, DUMMY_SP};
|
2017-01-07 23:04:49 -06:00
|
|
|
|
|
|
|
use super::{TokenStream, LexError};
|
2016-10-03 11:49:39 -05:00
|
|
|
|
2017-08-01 20:05:08 -05:00
|
|
|
pub fn lookup_char_pos(pos: BytePos) -> Loc {
|
|
|
|
with_sess(|(sess, _)| sess.codemap().lookup_char_pos(pos))
|
|
|
|
}
|
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
|
2017-03-28 20:55:01 -05:00
|
|
|
let token = Token::interpolated(token::NtItem(item));
|
|
|
|
TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into())
|
2017-01-07 23:04:49 -06:00
|
|
|
}
|
|
|
|
|
2017-03-17 18:23:12 -05:00
|
|
|
pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
|
|
|
|
TokenStream(inner)
|
2017-01-07 23:04:49 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
|
2017-03-28 00:32:43 -05:00
|
|
|
with_sess(move |(sess, _)| {
|
2017-03-17 18:23:12 -05:00
|
|
|
let mut parser = parse::stream_to_parser(sess, stream.0);
|
2017-01-07 23:04:49 -06:00
|
|
|
let mut items = Vec::new();
|
|
|
|
|
|
|
|
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
|
|
|
|
items.push(item)
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(items)
|
|
|
|
})
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2017-03-17 18:23:12 -05:00
|
|
|
pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
|
|
|
|
stream.0
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
2016-08-04 14:20:01 -05:00
|
|
|
|
2016-10-03 11:49:39 -05:00
|
|
|
pub trait Registry {
|
|
|
|
fn register_custom_derive(&mut self,
|
|
|
|
trait_name: &str,
|
2016-11-08 05:15:02 -06:00
|
|
|
expand: fn(TokenStream) -> TokenStream,
|
|
|
|
attributes: &[&'static str]);
|
2017-01-07 23:04:49 -06:00
|
|
|
|
|
|
|
fn register_attr_proc_macro(&mut self,
|
|
|
|
name: &str,
|
|
|
|
expand: fn(TokenStream, TokenStream) -> TokenStream);
|
2017-02-27 14:03:19 -06:00
|
|
|
|
|
|
|
fn register_bang_proc_macro(&mut self,
|
|
|
|
name: &str,
|
|
|
|
expand: fn(TokenStream) -> TokenStream);
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Emulate scoped_thread_local!() here essentially
|
|
|
|
thread_local! {
|
2017-03-28 00:32:43 -05:00
|
|
|
static CURRENT_SESS: Cell<(*const ParseSess, Mark)> =
|
|
|
|
Cell::new((0 as *const _, Mark::root()));
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
|
|
|
|
2017-03-28 00:32:43 -05:00
|
|
|
pub fn set_sess<F, R>(cx: &ExtCtxt, f: F) -> R
|
2016-10-03 11:49:39 -05:00
|
|
|
where F: FnOnce() -> R
|
|
|
|
{
|
2017-03-28 00:32:43 -05:00
|
|
|
struct Reset { prev: (*const ParseSess, Mark) }
|
2016-10-03 11:49:39 -05:00
|
|
|
|
|
|
|
impl Drop for Reset {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
CURRENT_SESS.with(|p| p.set(self.prev));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
CURRENT_SESS.with(|p| {
|
|
|
|
let _reset = Reset { prev: p.get() };
|
2017-03-28 00:32:43 -05:00
|
|
|
p.set((cx.parse_sess, cx.current_expansion.mark));
|
2016-10-03 11:49:39 -05:00
|
|
|
f()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-02-01 11:10:56 -06:00
|
|
|
pub fn in_sess() -> bool
|
|
|
|
{
|
|
|
|
let p = CURRENT_SESS.with(|p| p.get());
|
|
|
|
!p.0.is_null()
|
|
|
|
}
|
|
|
|
|
2017-03-28 00:32:43 -05:00
|
|
|
pub fn with_sess<F, R>(f: F) -> R
|
|
|
|
where F: FnOnce((&ParseSess, Mark)) -> R
|
2016-10-03 11:49:39 -05:00
|
|
|
{
|
|
|
|
let p = CURRENT_SESS.with(|p| p.get());
|
2017-03-28 00:32:43 -05:00
|
|
|
assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \
|
|
|
|
before set_parse_sess()!");
|
|
|
|
f(unsafe { (&*p.0, p.1) })
|
2016-10-03 11:49:39 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-07 23:04:49 -06:00
|
|
|
fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
|
|
|
|
err.cancel();
|
|
|
|
LexError { _inner: () }
|
|
|
|
}
|