2019-02-08 00:56:05 +09:00
|
|
|
use errors::{Diagnostic, DiagnosticBuilder};
|
2019-02-04 21:49:54 +09:00
|
|
|
|
2018-03-20 16:41:14 +02:00
|
|
|
use std::panic;
|
|
|
|
|
|
|
|
use proc_macro::bridge::{server, TokenTree};
|
|
|
|
use proc_macro::{Delimiter, Level, LineColumn, Spacing};
|
2018-07-19 15:59:08 +03:00
|
|
|
|
2018-03-16 01:09:22 +02:00
|
|
|
use rustc_data_structures::sync::Lrc;
|
|
|
|
use std::ascii;
|
|
|
|
use std::ops::Bound;
|
2018-07-19 15:59:08 +03:00
|
|
|
use syntax::ast;
|
2018-03-19 22:44:24 +02:00
|
|
|
use syntax::ext::base::ExtCtxt;
|
2018-07-19 15:59:08 +03:00
|
|
|
use syntax::parse::lexer::comments;
|
2018-03-19 22:44:24 +02:00
|
|
|
use syntax::parse::{self, token, ParseSess};
|
2019-02-05 01:35:25 -08:00
|
|
|
use syntax::parse::parser::emit_unclosed_delims;
|
2018-12-19 14:53:52 +11:00
|
|
|
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
2018-03-19 22:44:24 +02:00
|
|
|
use syntax_pos::hygiene::{SyntaxContext, Transparency};
|
2018-07-19 15:59:08 +03:00
|
|
|
use syntax_pos::symbol::{keywords, Symbol};
|
2018-03-16 01:09:22 +02:00
|
|
|
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
|
2018-07-19 15:59:08 +03:00
|
|
|
|
2018-03-20 16:41:14 +02:00
|
|
|
trait FromInternal<T> {
|
|
|
|
fn from_internal(x: T) -> Self;
|
|
|
|
}
|
|
|
|
|
|
|
|
trait ToInternal<T> {
|
|
|
|
fn to_internal(self) -> T;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromInternal<token::DelimToken> for Delimiter {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn from_internal(delim: token::DelimToken) -> Delimiter {
|
2018-07-19 15:59:08 +03:00
|
|
|
match delim {
|
|
|
|
token::Paren => Delimiter::Parenthesis,
|
|
|
|
token::Brace => Delimiter::Brace,
|
|
|
|
token::Bracket => Delimiter::Bracket,
|
|
|
|
token::NoDelim => Delimiter::None,
|
|
|
|
}
|
|
|
|
}
|
2018-03-20 16:41:14 +02:00
|
|
|
}
|
2018-07-19 15:59:08 +03:00
|
|
|
|
2018-03-20 16:41:14 +02:00
|
|
|
impl ToInternal<token::DelimToken> for Delimiter {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn to_internal(self) -> token::DelimToken {
|
2018-07-19 15:59:08 +03:00
|
|
|
match self {
|
|
|
|
Delimiter::Parenthesis => token::Paren,
|
|
|
|
Delimiter::Brace => token::Brace,
|
|
|
|
Delimiter::Bracket => token::Bracket,
|
|
|
|
Delimiter::None => token::NoDelim,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-19 14:53:52 +11:00
|
|
|
impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
2018-03-20 16:41:14 +02:00
|
|
|
for TokenTree<Group, Punct, Ident, Literal>
|
|
|
|
{
|
2018-12-19 14:53:52 +11:00
|
|
|
fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
|
|
|
|
-> Self {
|
2018-07-19 15:59:08 +03:00
|
|
|
use syntax::parse::token::*;
|
|
|
|
|
2018-12-19 14:53:52 +11:00
|
|
|
let joint = is_joint == Joint;
|
2018-07-19 15:59:08 +03:00
|
|
|
let (span, token) = match tree {
|
2018-11-30 10:02:04 +11:00
|
|
|
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
|
|
|
let delimiter = Delimiter::from_internal(delim);
|
2018-03-16 01:09:22 +02:00
|
|
|
return TokenTree::Group(Group {
|
|
|
|
delimiter,
|
2018-11-30 10:02:04 +11:00
|
|
|
stream: tts.into(),
|
2018-03-16 01:09:22 +02:00
|
|
|
span,
|
|
|
|
});
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
tokenstream::TokenTree::Token(span, token) => (span, token),
|
2018-07-19 15:59:08 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
macro_rules! tt {
|
2018-03-16 01:09:22 +02:00
|
|
|
($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)* }) => (
|
|
|
|
TokenTree::$ty(self::$ty {
|
|
|
|
$($field $(: $value)*,)*
|
|
|
|
span,
|
|
|
|
})
|
2018-12-08 21:00:39 +03:00
|
|
|
);
|
|
|
|
($ty:ident::$method:ident($($value:expr),*)) => (
|
|
|
|
TokenTree::$ty(self::$ty::$method($($value,)* span))
|
|
|
|
);
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
|
|
|
macro_rules! op {
|
|
|
|
($a:expr) => {
|
2018-12-08 21:00:39 +03:00
|
|
|
tt!(Punct::new($a, joint))
|
2018-07-19 15:59:08 +03:00
|
|
|
};
|
|
|
|
($a:expr, $b:expr) => {{
|
2018-12-08 21:00:39 +03:00
|
|
|
stack.push(tt!(Punct::new($b, joint)));
|
|
|
|
tt!(Punct::new($a, true))
|
2018-07-19 15:59:08 +03:00
|
|
|
}};
|
|
|
|
($a:expr, $b:expr, $c:expr) => {{
|
2018-12-08 21:00:39 +03:00
|
|
|
stack.push(tt!(Punct::new($c, joint)));
|
|
|
|
stack.push(tt!(Punct::new($b, true)));
|
|
|
|
tt!(Punct::new($a, true))
|
2018-07-19 15:59:08 +03:00
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
|
|
|
match token {
|
|
|
|
Eq => op!('='),
|
|
|
|
Lt => op!('<'),
|
|
|
|
Le => op!('<', '='),
|
|
|
|
EqEq => op!('=', '='),
|
|
|
|
Ne => op!('!', '='),
|
|
|
|
Ge => op!('>', '='),
|
|
|
|
Gt => op!('>'),
|
|
|
|
AndAnd => op!('&', '&'),
|
|
|
|
OrOr => op!('|', '|'),
|
|
|
|
Not => op!('!'),
|
|
|
|
Tilde => op!('~'),
|
|
|
|
BinOp(Plus) => op!('+'),
|
|
|
|
BinOp(Minus) => op!('-'),
|
|
|
|
BinOp(Star) => op!('*'),
|
|
|
|
BinOp(Slash) => op!('/'),
|
|
|
|
BinOp(Percent) => op!('%'),
|
|
|
|
BinOp(Caret) => op!('^'),
|
|
|
|
BinOp(And) => op!('&'),
|
|
|
|
BinOp(Or) => op!('|'),
|
|
|
|
BinOp(Shl) => op!('<', '<'),
|
|
|
|
BinOp(Shr) => op!('>', '>'),
|
|
|
|
BinOpEq(Plus) => op!('+', '='),
|
|
|
|
BinOpEq(Minus) => op!('-', '='),
|
|
|
|
BinOpEq(Star) => op!('*', '='),
|
|
|
|
BinOpEq(Slash) => op!('/', '='),
|
|
|
|
BinOpEq(Percent) => op!('%', '='),
|
|
|
|
BinOpEq(Caret) => op!('^', '='),
|
|
|
|
BinOpEq(And) => op!('&', '='),
|
|
|
|
BinOpEq(Or) => op!('|', '='),
|
|
|
|
BinOpEq(Shl) => op!('<', '<', '='),
|
|
|
|
BinOpEq(Shr) => op!('>', '>', '='),
|
|
|
|
At => op!('@'),
|
|
|
|
Dot => op!('.'),
|
|
|
|
DotDot => op!('.', '.'),
|
|
|
|
DotDotDot => op!('.', '.', '.'),
|
|
|
|
DotDotEq => op!('.', '.', '='),
|
|
|
|
Comma => op!(','),
|
|
|
|
Semi => op!(';'),
|
|
|
|
Colon => op!(':'),
|
|
|
|
ModSep => op!(':', ':'),
|
|
|
|
RArrow => op!('-', '>'),
|
|
|
|
LArrow => op!('<', '-'),
|
|
|
|
FatArrow => op!('=', '>'),
|
|
|
|
Pound => op!('#'),
|
|
|
|
Dollar => op!('$'),
|
|
|
|
Question => op!('?'),
|
|
|
|
SingleQuote => op!('\''),
|
|
|
|
|
2018-12-09 17:31:12 +03:00
|
|
|
Ident(ident, false) if ident.name == keywords::DollarCrate.name() =>
|
|
|
|
tt!(Ident::dollar_crate()),
|
2018-12-08 21:00:39 +03:00
|
|
|
Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)),
|
2018-07-19 15:59:08 +03:00
|
|
|
Lifetime(ident) => {
|
|
|
|
let ident = ident.without_first_quote();
|
2018-12-08 21:00:39 +03:00
|
|
|
stack.push(tt!(Ident::new(ident.name, false)));
|
|
|
|
tt!(Punct::new('\'', true))
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
Literal(lit, suffix) => tt!(Literal { lit, suffix }),
|
2018-07-19 15:59:08 +03:00
|
|
|
DocComment(c) => {
|
|
|
|
let style = comments::doc_comment_style(&c.as_str());
|
|
|
|
let stripped = comments::strip_doc_comment_decoration(&c.as_str());
|
2018-03-16 01:09:22 +02:00
|
|
|
let mut escaped = String::new();
|
|
|
|
for ch in stripped.chars() {
|
|
|
|
escaped.extend(ch.escape_debug());
|
|
|
|
}
|
2018-07-19 15:59:08 +03:00
|
|
|
let stream = vec![
|
2018-03-16 01:09:22 +02:00
|
|
|
Ident(ast::Ident::new(Symbol::intern("doc"), span), false),
|
|
|
|
Eq,
|
|
|
|
Literal(Lit::Str_(Symbol::intern(&escaped)), None),
|
|
|
|
]
|
|
|
|
.into_iter()
|
|
|
|
.map(|token| tokenstream::TokenTree::Token(span, token))
|
|
|
|
.collect();
|
|
|
|
stack.push(TokenTree::Group(Group {
|
|
|
|
delimiter: Delimiter::Bracket,
|
|
|
|
stream,
|
|
|
|
span: DelimSpan::from_single(span),
|
|
|
|
}));
|
2018-07-19 15:59:08 +03:00
|
|
|
if style == ast::AttrStyle::Inner {
|
2018-12-08 21:00:39 +03:00
|
|
|
stack.push(tt!(Punct::new('!', false)));
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
2018-12-08 21:00:39 +03:00
|
|
|
tt!(Punct::new('#', false))
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
|
|
|
|
2019-02-15 08:31:44 +11:00
|
|
|
Interpolated(nt) => {
|
2019-02-18 10:06:26 +11:00
|
|
|
let stream = nt.to_tokenstream(sess, span);
|
2018-03-16 01:09:22 +02:00
|
|
|
TokenTree::Group(Group {
|
|
|
|
delimiter: Delimiter::None,
|
|
|
|
stream,
|
|
|
|
span: DelimSpan::from_single(span),
|
|
|
|
})
|
2018-03-19 22:44:24 +02:00
|
|
|
}
|
2018-07-19 15:59:08 +03:00
|
|
|
|
|
|
|
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
|
|
|
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
2018-03-20 16:41:14 +02:00
|
|
|
}
|
2018-07-19 15:59:08 +03:00
|
|
|
|
2018-03-20 16:41:14 +02:00
|
|
|
impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn to_internal(self) -> TokenStream {
|
2018-07-19 15:59:08 +03:00
|
|
|
use syntax::parse::token::*;
|
2018-03-16 01:09:22 +02:00
|
|
|
|
|
|
|
let (ch, joint, span) = match self {
|
|
|
|
TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
|
|
|
|
TokenTree::Group(Group {
|
|
|
|
delimiter,
|
|
|
|
stream,
|
|
|
|
span,
|
|
|
|
}) => {
|
|
|
|
return tokenstream::TokenTree::Delimited(
|
|
|
|
span,
|
2018-11-30 10:02:04 +11:00
|
|
|
delimiter.to_internal(),
|
|
|
|
stream.into(),
|
2018-03-16 01:09:22 +02:00
|
|
|
)
|
|
|
|
.into();
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
2018-12-08 21:00:39 +03:00
|
|
|
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
|
2018-03-16 01:09:22 +02:00
|
|
|
let token = Ident(ast::Ident::new(sym, span), is_raw);
|
|
|
|
return tokenstream::TokenTree::Token(span, token).into();
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
TokenTree::Literal(self::Literal {
|
2018-07-19 15:59:08 +03:00
|
|
|
lit: Lit::Integer(ref a),
|
|
|
|
suffix,
|
|
|
|
span,
|
2018-03-16 01:09:22 +02:00
|
|
|
}) if a.as_str().starts_with("-") => {
|
2018-07-19 15:59:08 +03:00
|
|
|
let minus = BinOp(BinOpToken::Minus);
|
|
|
|
let integer = Symbol::intern(&a.as_str()[1..]);
|
|
|
|
let integer = Literal(Lit::Integer(integer), suffix);
|
2018-03-16 01:09:22 +02:00
|
|
|
let a = tokenstream::TokenTree::Token(span, minus);
|
|
|
|
let b = tokenstream::TokenTree::Token(span, integer);
|
2018-07-19 15:59:08 +03:00
|
|
|
return vec![a, b].into_iter().collect();
|
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
TokenTree::Literal(self::Literal {
|
2018-07-19 15:59:08 +03:00
|
|
|
lit: Lit::Float(ref a),
|
|
|
|
suffix,
|
|
|
|
span,
|
2018-03-16 01:09:22 +02:00
|
|
|
}) if a.as_str().starts_with("-") => {
|
2018-07-19 15:59:08 +03:00
|
|
|
let minus = BinOp(BinOpToken::Minus);
|
|
|
|
let float = Symbol::intern(&a.as_str()[1..]);
|
|
|
|
let float = Literal(Lit::Float(float), suffix);
|
2018-03-16 01:09:22 +02:00
|
|
|
let a = tokenstream::TokenTree::Token(span, minus);
|
|
|
|
let b = tokenstream::TokenTree::Token(span, float);
|
2018-07-19 15:59:08 +03:00
|
|
|
return vec![a, b].into_iter().collect();
|
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
TokenTree::Literal(self::Literal { lit, suffix, span }) => {
|
|
|
|
return tokenstream::TokenTree::Token(span, Literal(lit, suffix)).into()
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let token = match ch {
|
|
|
|
'=' => Eq,
|
|
|
|
'<' => Lt,
|
|
|
|
'>' => Gt,
|
|
|
|
'!' => Not,
|
|
|
|
'~' => Tilde,
|
|
|
|
'+' => BinOp(Plus),
|
|
|
|
'-' => BinOp(Minus),
|
|
|
|
'*' => BinOp(Star),
|
|
|
|
'/' => BinOp(Slash),
|
|
|
|
'%' => BinOp(Percent),
|
|
|
|
'^' => BinOp(Caret),
|
|
|
|
'&' => BinOp(And),
|
|
|
|
'|' => BinOp(Or),
|
|
|
|
'@' => At,
|
|
|
|
'.' => Dot,
|
|
|
|
',' => Comma,
|
|
|
|
';' => Semi,
|
|
|
|
':' => Colon,
|
|
|
|
'#' => Pound,
|
|
|
|
'$' => Dollar,
|
|
|
|
'?' => Question,
|
|
|
|
'\'' => SingleQuote,
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
|
2018-03-16 01:09:22 +02:00
|
|
|
let tree = tokenstream::TokenTree::Token(span, token);
|
2019-01-09 15:20:56 +11:00
|
|
|
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-20 16:41:14 +02:00
|
|
|
impl ToInternal<errors::Level> for Level {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn to_internal(self) -> errors::Level {
|
2018-07-19 15:59:08 +03:00
|
|
|
match self {
|
|
|
|
Level::Error => errors::Level::Error,
|
|
|
|
Level::Warning => errors::Level::Warning,
|
|
|
|
Level::Note => errors::Level::Note,
|
|
|
|
Level::Help => errors::Level::Help,
|
2018-03-20 16:41:14 +02:00
|
|
|
_ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
|
2018-07-19 15:59:08 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct TokenStreamIter {
|
|
|
|
cursor: tokenstream::Cursor,
|
|
|
|
stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct Group {
|
|
|
|
delimiter: Delimiter,
|
|
|
|
stream: TokenStream,
|
|
|
|
span: DelimSpan,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
|
|
|
pub struct Punct {
|
|
|
|
ch: char,
|
|
|
|
// NB. not using `Spacing` here because it doesn't implement `Hash`.
|
|
|
|
joint: bool,
|
|
|
|
span: Span,
|
|
|
|
}
|
|
|
|
|
2018-12-08 21:00:39 +03:00
|
|
|
impl Punct {
|
|
|
|
fn new(ch: char, joint: bool, span: Span) -> Punct {
|
|
|
|
const LEGAL_CHARS: &[char] = &['=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^',
|
|
|
|
'&', '|', '@', '.', ',', ';', ':', '#', '$', '?', '\''];
|
|
|
|
if !LEGAL_CHARS.contains(&ch) {
|
|
|
|
panic!("unsupported character `{:?}`", ch)
|
|
|
|
}
|
|
|
|
Punct { ch, joint, span }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-16 01:09:22 +02:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
|
|
|
pub struct Ident {
|
|
|
|
sym: Symbol,
|
|
|
|
is_raw: bool,
|
2018-12-08 21:00:39 +03:00
|
|
|
span: Span,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Ident {
|
|
|
|
fn is_valid(string: &str) -> bool {
|
|
|
|
let mut chars = string.chars();
|
|
|
|
if let Some(start) = chars.next() {
|
|
|
|
(start == '_' || start.is_xid_start())
|
|
|
|
&& chars.all(|cont| cont == '_' || cont.is_xid_continue())
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident {
|
|
|
|
let string = sym.as_str().get();
|
|
|
|
if !Self::is_valid(string) {
|
|
|
|
panic!("`{:?}` is not a valid identifier", string)
|
|
|
|
}
|
|
|
|
if is_raw {
|
|
|
|
let normalized_sym = Symbol::intern(string);
|
|
|
|
if normalized_sym == keywords::Underscore.name() ||
|
|
|
|
ast::Ident::with_empty_ctxt(normalized_sym).is_path_segment_keyword() {
|
|
|
|
panic!("`{:?}` is not a valid raw identifier", string)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ident { sym, is_raw, span }
|
|
|
|
}
|
2018-12-09 17:31:12 +03:00
|
|
|
fn dollar_crate(span: Span) -> Ident {
|
|
|
|
// `$crate` is accepted as an ident only if it comes from the compiler.
|
|
|
|
Ident { sym: keywords::DollarCrate.name(), is_raw: false, span }
|
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct Literal {
|
|
|
|
lit: token::Lit,
|
|
|
|
suffix: Option<Symbol>,
|
|
|
|
span: Span,
|
|
|
|
}
|
|
|
|
|
2018-03-20 16:41:14 +02:00
|
|
|
pub(crate) struct Rustc<'a> {
|
2018-03-19 22:44:24 +02:00
|
|
|
sess: &'a ParseSess,
|
|
|
|
def_site: Span,
|
|
|
|
call_site: Span,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Rustc<'a> {
|
2019-02-04 21:49:54 +09:00
|
|
|
pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
|
2018-03-19 22:44:24 +02:00
|
|
|
// No way to determine def location for a proc macro right now, so use call location.
|
|
|
|
let location = cx.current_expansion.mark.expn_info().unwrap().call_site;
|
|
|
|
let to_span = |transparency| {
|
|
|
|
location.with_ctxt(
|
|
|
|
SyntaxContext::empty()
|
|
|
|
.apply_mark_with_transparency(cx.current_expansion.mark, transparency),
|
|
|
|
)
|
|
|
|
};
|
|
|
|
Rustc {
|
|
|
|
sess: cx.parse_sess,
|
|
|
|
def_site: to_span(Transparency::Opaque),
|
|
|
|
call_site: to_span(Transparency::Transparent),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-03-16 01:09:22 +02:00
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::Types for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
type TokenStream = TokenStream;
|
|
|
|
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
|
|
|
|
type TokenStreamIter = TokenStreamIter;
|
|
|
|
type Group = Group;
|
|
|
|
type Punct = Punct;
|
|
|
|
type Ident = Ident;
|
|
|
|
type Literal = Literal;
|
|
|
|
type SourceFile = Lrc<SourceFile>;
|
|
|
|
type MultiSpan = Vec<Span>;
|
|
|
|
type Diagnostic = Diagnostic;
|
|
|
|
type Span = Span;
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::TokenStream for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn new(&mut self) -> Self::TokenStream {
|
|
|
|
TokenStream::empty()
|
|
|
|
}
|
|
|
|
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
|
|
|
stream.is_empty()
|
|
|
|
}
|
|
|
|
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
2019-02-05 01:35:25 -08:00
|
|
|
let (tokens, errors) = parse::parse_stream_from_source_str(
|
2018-12-04 15:18:03 -05:00
|
|
|
FileName::proc_macro_source_code(src.clone()),
|
2018-03-19 22:44:24 +02:00
|
|
|
src.to_string(),
|
|
|
|
self.sess,
|
|
|
|
Some(self.call_site),
|
2019-02-05 01:35:25 -08:00
|
|
|
);
|
|
|
|
emit_unclosed_delims(&errors, &self.sess.span_diagnostic);
|
|
|
|
tokens
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
|
|
|
|
stream.to_string()
|
|
|
|
}
|
|
|
|
fn from_token_tree(
|
|
|
|
&mut self,
|
|
|
|
tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
|
|
|
|
) -> Self::TokenStream {
|
|
|
|
tree.to_internal()
|
|
|
|
}
|
|
|
|
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
|
|
|
|
TokenStreamIter {
|
|
|
|
cursor: stream.trees(),
|
|
|
|
stack: vec![],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::TokenStreamBuilder for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn new(&mut self) -> Self::TokenStreamBuilder {
|
|
|
|
tokenstream::TokenStreamBuilder::new()
|
|
|
|
}
|
|
|
|
fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
|
|
|
|
builder.push(stream);
|
|
|
|
}
|
|
|
|
fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
|
|
|
|
builder.build()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::TokenStreamIter for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn next(
|
|
|
|
&mut self,
|
|
|
|
iter: &mut Self::TokenStreamIter,
|
|
|
|
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
|
|
|
loop {
|
|
|
|
let tree = iter.stack.pop().or_else(|| {
|
2018-12-19 14:53:52 +11:00
|
|
|
let next = iter.cursor.next_with_joint()?;
|
2018-03-20 16:41:14 +02:00
|
|
|
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
|
2018-03-16 01:09:22 +02:00
|
|
|
})?;
|
|
|
|
// HACK: The condition "dummy span + group with empty delimiter" represents an AST
|
|
|
|
// fragment approximately converted into a token stream. This may happen, for
|
|
|
|
// example, with inputs to proc macro attributes, including derives. Such "groups"
|
|
|
|
// need to flattened during iteration over stream's token trees.
|
|
|
|
// Eventually this needs to be removed in favor of keeping original token trees
|
|
|
|
// and not doing the roundtrip through AST.
|
|
|
|
if let TokenTree::Group(ref group) = tree {
|
|
|
|
if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
|
2018-12-19 14:53:52 +11:00
|
|
|
iter.cursor.append(group.stream.clone());
|
2018-03-16 01:09:22 +02:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Some(tree);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::Group for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
|
|
|
|
Group {
|
|
|
|
delimiter,
|
|
|
|
stream,
|
|
|
|
span: DelimSpan::from_single(server::Span::call_site(self)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
|
|
|
|
group.delimiter
|
|
|
|
}
|
|
|
|
fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
|
|
|
|
group.stream.clone()
|
|
|
|
}
|
|
|
|
fn span(&mut self, group: &Self::Group) -> Self::Span {
|
|
|
|
group.span.entire()
|
|
|
|
}
|
|
|
|
fn span_open(&mut self, group: &Self::Group) -> Self::Span {
|
|
|
|
group.span.open
|
|
|
|
}
|
|
|
|
fn span_close(&mut self, group: &Self::Group) -> Self::Span {
|
|
|
|
group.span.close
|
|
|
|
}
|
|
|
|
fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
|
|
|
|
group.span = DelimSpan::from_single(span);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::Punct for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
|
2018-12-08 21:00:39 +03:00
|
|
|
Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
fn as_char(&mut self, punct: Self::Punct) -> char {
|
|
|
|
punct.ch
|
|
|
|
}
|
|
|
|
fn spacing(&mut self, punct: Self::Punct) -> Spacing {
|
|
|
|
if punct.joint {
|
|
|
|
Spacing::Joint
|
|
|
|
} else {
|
|
|
|
Spacing::Alone
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn span(&mut self, punct: Self::Punct) -> Self::Span {
|
|
|
|
punct.span
|
|
|
|
}
|
|
|
|
fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
|
|
|
|
Punct { span, ..punct }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::Ident for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
|
2018-12-08 21:00:39 +03:00
|
|
|
Ident::new(Symbol::intern(string), is_raw, span)
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
fn span(&mut self, ident: Self::Ident) -> Self::Span {
|
|
|
|
ident.span
|
|
|
|
}
|
|
|
|
fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
|
|
|
|
Ident { span, ..ident }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::Literal for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
|
|
|
|
fn debug(&mut self, literal: &Self::Literal) -> String {
|
|
|
|
format!("{:?}", literal)
|
|
|
|
}
|
|
|
|
fn integer(&mut self, n: &str) -> Self::Literal {
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::Integer(Symbol::intern(n)),
|
|
|
|
suffix: None,
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::Integer(Symbol::intern(n)),
|
|
|
|
suffix: Some(Symbol::intern(kind)),
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn float(&mut self, n: &str) -> Self::Literal {
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::Float(Symbol::intern(n)),
|
|
|
|
suffix: None,
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn f32(&mut self, n: &str) -> Self::Literal {
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::Float(Symbol::intern(n)),
|
|
|
|
suffix: Some(Symbol::intern("f32")),
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn f64(&mut self, n: &str) -> Self::Literal {
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::Float(Symbol::intern(n)),
|
|
|
|
suffix: Some(Symbol::intern("f64")),
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn string(&mut self, string: &str) -> Self::Literal {
|
|
|
|
let mut escaped = String::new();
|
|
|
|
for ch in string.chars() {
|
|
|
|
escaped.extend(ch.escape_debug());
|
|
|
|
}
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::Str_(Symbol::intern(&escaped)),
|
|
|
|
suffix: None,
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn character(&mut self, ch: char) -> Self::Literal {
|
|
|
|
let mut escaped = String::new();
|
|
|
|
escaped.extend(ch.escape_unicode());
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::Char(Symbol::intern(&escaped)),
|
|
|
|
suffix: None,
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
|
|
|
|
let string = bytes
|
|
|
|
.iter()
|
|
|
|
.cloned()
|
|
|
|
.flat_map(ascii::escape_default)
|
|
|
|
.map(Into::<char>::into)
|
|
|
|
.collect::<String>();
|
|
|
|
Literal {
|
|
|
|
lit: token::Lit::ByteStr(Symbol::intern(&string)),
|
|
|
|
suffix: None,
|
|
|
|
span: server::Span::call_site(self),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
|
|
|
|
literal.span
|
|
|
|
}
|
|
|
|
fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
|
|
|
|
literal.span = span;
|
|
|
|
}
|
|
|
|
fn subspan(
|
|
|
|
&mut self,
|
|
|
|
literal: &Self::Literal,
|
|
|
|
start: Bound<usize>,
|
|
|
|
end: Bound<usize>,
|
|
|
|
) -> Option<Self::Span> {
|
|
|
|
let span = literal.span;
|
|
|
|
let length = span.hi().to_usize() - span.lo().to_usize();
|
|
|
|
|
|
|
|
let start = match start {
|
|
|
|
Bound::Included(lo) => lo,
|
|
|
|
Bound::Excluded(lo) => lo + 1,
|
|
|
|
Bound::Unbounded => 0,
|
|
|
|
};
|
|
|
|
|
|
|
|
let end = match end {
|
|
|
|
Bound::Included(hi) => hi + 1,
|
|
|
|
Bound::Excluded(hi) => hi,
|
|
|
|
Bound::Unbounded => length,
|
|
|
|
};
|
|
|
|
|
|
|
|
// Bounds check the values, preventing addition overflow and OOB spans.
|
|
|
|
if start > u32::max_value() as usize
|
|
|
|
|| end > u32::max_value() as usize
|
|
|
|
|| (u32::max_value() - start as u32) < span.lo().to_u32()
|
|
|
|
|| (u32::max_value() - end as u32) < span.lo().to_u32()
|
|
|
|
|| start >= end
|
|
|
|
|| end > length
|
|
|
|
{
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let new_lo = span.lo() + BytePos::from_usize(start);
|
|
|
|
let new_hi = span.lo() + BytePos::from_usize(end);
|
|
|
|
Some(span.with_lo(new_lo).with_hi(new_hi))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-04 21:49:54 +09:00
|
|
|
impl server::SourceFile for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
|
|
|
|
Lrc::ptr_eq(file1, file2)
|
|
|
|
}
|
|
|
|
fn path(&mut self, file: &Self::SourceFile) -> String {
|
|
|
|
match file.name {
|
|
|
|
FileName::Real(ref path) => path
|
|
|
|
.to_str()
|
|
|
|
.expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
|
|
|
|
.to_string(),
|
|
|
|
_ => file.name.to_string(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn is_real(&mut self, file: &Self::SourceFile) -> bool {
|
|
|
|
file.is_real_file()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::MultiSpan for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn new(&mut self) -> Self::MultiSpan {
|
|
|
|
vec![]
|
|
|
|
}
|
|
|
|
fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
|
|
|
|
spans.push(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::Diagnostic for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
|
|
|
|
let mut diag = Diagnostic::new(level.to_internal(), msg);
|
|
|
|
diag.set_span(MultiSpan::from_spans(spans));
|
|
|
|
diag
|
|
|
|
}
|
|
|
|
fn sub(
|
|
|
|
&mut self,
|
|
|
|
diag: &mut Self::Diagnostic,
|
|
|
|
level: Level,
|
|
|
|
msg: &str,
|
|
|
|
spans: Self::MultiSpan,
|
|
|
|
) {
|
|
|
|
diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
|
|
|
|
}
|
|
|
|
fn emit(&mut self, diag: Self::Diagnostic) {
|
2018-03-19 22:44:24 +02:00
|
|
|
DiagnosticBuilder::new_diagnostic(&self.sess.span_diagnostic, diag).emit()
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-19 22:44:24 +02:00
|
|
|
impl server::Span for Rustc<'_> {
|
2018-03-16 01:09:22 +02:00
|
|
|
fn debug(&mut self, span: Self::Span) -> String {
|
|
|
|
format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
|
|
|
|
}
|
|
|
|
fn def_site(&mut self) -> Self::Span {
|
2018-03-19 22:44:24 +02:00
|
|
|
self.def_site
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
fn call_site(&mut self) -> Self::Span {
|
2018-03-19 22:44:24 +02:00
|
|
|
self.call_site
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
|
2018-03-19 22:44:24 +02:00
|
|
|
self.sess.source_map().lookup_char_pos(span.lo()).file
|
2018-03-16 01:09:22 +02:00
|
|
|
}
|
|
|
|
fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
|
|
|
|
span.ctxt().outer().expn_info().map(|i| i.call_site)
|
|
|
|
}
|
|
|
|
fn source(&mut self, span: Self::Span) -> Self::Span {
|
|
|
|
span.source_callsite()
|
|
|
|
}
|
|
|
|
fn start(&mut self, span: Self::Span) -> LineColumn {
|
2018-03-19 22:44:24 +02:00
|
|
|
let loc = self.sess.source_map().lookup_char_pos(span.lo());
|
2018-03-16 01:09:22 +02:00
|
|
|
LineColumn {
|
|
|
|
line: loc.line,
|
|
|
|
column: loc.col.to_usize(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn end(&mut self, span: Self::Span) -> LineColumn {
|
2018-03-19 22:44:24 +02:00
|
|
|
let loc = self.sess.source_map().lookup_char_pos(span.hi());
|
2018-03-16 01:09:22 +02:00
|
|
|
LineColumn {
|
|
|
|
line: loc.line,
|
|
|
|
column: loc.col.to_usize(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
|
2018-03-19 22:44:24 +02:00
|
|
|
let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
|
|
|
|
let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
|
2018-03-16 01:09:22 +02:00
|
|
|
|
|
|
|
if self_loc.file.name != other_loc.file.name {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
Some(first.to(second))
|
|
|
|
}
|
|
|
|
fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
|
|
|
|
span.with_ctxt(at.ctxt())
|
|
|
|
}
|
|
|
|
}
|