Rollup merge of #57486 - nnethercote:simplify-TokenStream-more, r=petrochenkov
Simplify `TokenStream` some more These commits simplify `TokenStream`, remove `ThinTokenStream`, and avoid some clones. The end result is simpler code and a slight perf win on some benchmarks. r? @petrochenkov
This commit is contained in:
commit
349c9eeb35
@ -258,7 +258,7 @@ for tokenstream::TokenTree {
|
||||
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
|
||||
span.hash_stable(hcx, hasher);
|
||||
std_hash::Hash::hash(&delim, hasher);
|
||||
for sub_tt in tts.stream().trees() {
|
||||
for sub_tt in tts.trees() {
|
||||
sub_tt.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -1474,7 +1474,7 @@ impl KeywordIdents {
|
||||
_ => {},
|
||||
}
|
||||
TokenTree::Delimited(_, _, tts) => {
|
||||
self.check_tokens(cx, tts.stream())
|
||||
self.check_tokens(cx, tts)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ use rustc_target::spec::abi::Abi;
|
||||
use source_map::{dummy_spanned, respan, Spanned};
|
||||
use symbol::{keywords, Symbol};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{ThinTokenStream, TokenStream};
|
||||
use tokenstream::TokenStream;
|
||||
use ThinVec;
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
@ -1216,7 +1216,7 @@ pub type Mac = Spanned<Mac_>;
|
||||
pub struct Mac_ {
|
||||
pub path: Path,
|
||||
pub delim: MacDelimiter,
|
||||
pub tts: ThinTokenStream,
|
||||
pub tts: TokenStream,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
|
||||
@ -1228,13 +1228,13 @@ pub enum MacDelimiter {
|
||||
|
||||
impl Mac_ {
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
self.tts.stream()
|
||||
self.tts.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct MacroDef {
|
||||
pub tokens: ThinTokenStream,
|
||||
pub tokens: TokenStream,
|
||||
pub legacy: bool,
|
||||
}
|
||||
|
||||
|
@ -570,7 +570,7 @@ impl MetaItemKind {
|
||||
}
|
||||
Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
|
||||
tokens.next();
|
||||
tts.stream()
|
||||
tts.clone()
|
||||
}
|
||||
_ => return Some(MetaItemKind::Word),
|
||||
};
|
||||
|
@ -748,7 +748,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
|
||||
},
|
||||
TokenTree::Delimited(span, delim, ref tts) => {
|
||||
let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
|
||||
stmts.extend(statements_mk_tts(cx, tts.stream()));
|
||||
stmts.extend(statements_mk_tts(cx, tts.clone()));
|
||||
stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
|
||||
stmts
|
||||
}
|
||||
|
@ -598,7 +598,7 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
|
||||
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
|
||||
delim,
|
||||
fld.fold_tts(tts.stream()).into(),
|
||||
fld.fold_tts(tts).into(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -817,7 +817,7 @@ mod tests {
|
||||
)
|
||||
if name_macro_rules.name == "macro_rules"
|
||||
&& name_zip.name == "zip" => {
|
||||
let tts = ¯o_tts.stream().trees().collect::<Vec<_>>();
|
||||
let tts = ¯o_tts.trees().collect::<Vec<_>>();
|
||||
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
|
||||
(
|
||||
3,
|
||||
@ -826,7 +826,7 @@ mod tests {
|
||||
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
|
||||
)
|
||||
if macro_delim == token::Paren => {
|
||||
let tts = &first_tts.stream().trees().collect::<Vec<_>>();
|
||||
let tts = &first_tts.trees().collect::<Vec<_>>();
|
||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||
(
|
||||
2,
|
||||
@ -836,7 +836,7 @@ mod tests {
|
||||
if first_delim == token::Paren && ident.name == "a" => {},
|
||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||
}
|
||||
let tts = &second_tts.stream().trees().collect::<Vec<_>>();
|
||||
let tts = &second_tts.trees().collect::<Vec<_>>();
|
||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||
(
|
||||
2,
|
||||
|
@ -46,7 +46,7 @@ use print::pprust;
|
||||
use ptr::P;
|
||||
use parse::PResult;
|
||||
use ThinVec;
|
||||
use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
|
||||
use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||
use symbol::{Symbol, keywords};
|
||||
|
||||
use std::borrow::Cow;
|
||||
@ -280,17 +280,17 @@ struct TokenCursorFrame {
|
||||
/// on the parser.
|
||||
#[derive(Clone)]
|
||||
enum LastToken {
|
||||
Collecting(Vec<TokenStream>),
|
||||
Was(Option<TokenStream>),
|
||||
Collecting(Vec<TreeAndJoint>),
|
||||
Was(Option<TreeAndJoint>),
|
||||
}
|
||||
|
||||
impl TokenCursorFrame {
|
||||
fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
|
||||
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
|
||||
TokenCursorFrame {
|
||||
delim: delim,
|
||||
span: sp,
|
||||
open_delim: delim == token::NoDelim,
|
||||
tree_cursor: tts.stream().into_trees(),
|
||||
tree_cursor: tts.clone().into_trees(),
|
||||
close_delim: delim == token::NoDelim,
|
||||
last_token: LastToken::Was(None),
|
||||
}
|
||||
@ -2330,7 +2330,7 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> {
|
||||
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
|
||||
let delim = match self.token {
|
||||
token::OpenDelim(delim) => delim,
|
||||
_ => {
|
||||
@ -2350,7 +2350,7 @@ impl<'a> Parser<'a> {
|
||||
token::Brace => MacDelimiter::Brace,
|
||||
token::NoDelim => self.bug("unexpected no delimiter"),
|
||||
};
|
||||
Ok((delim, tts.stream().into()))
|
||||
Ok((delim, tts.into()))
|
||||
}
|
||||
|
||||
/// At the bottom (top?) of the precedence hierarchy,
|
||||
@ -4641,7 +4641,7 @@ impl<'a> Parser<'a> {
|
||||
let ident = self.parse_ident()?;
|
||||
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
|
||||
match self.parse_token_tree() {
|
||||
TokenTree::Delimited(_, _, tts) => tts.stream(),
|
||||
TokenTree::Delimited(_, _, tts) => tts,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
@ -7757,7 +7757,7 @@ impl<'a> Parser<'a> {
|
||||
&mut self.token_cursor.stack[prev].last_token
|
||||
};
|
||||
|
||||
// Pull our the toekns that we've collected from the call to `f` above
|
||||
// Pull out the tokens that we've collected from the call to `f` above.
|
||||
let mut collected_tokens = match *last_token {
|
||||
LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()),
|
||||
LastToken::Was(_) => panic!("our vector went away?"),
|
||||
@ -7776,10 +7776,9 @@ impl<'a> Parser<'a> {
|
||||
// call. In that case we need to record all the tokens we collected in
|
||||
// our parent list as well. To do that we push a clone of our stream
|
||||
// onto the previous list.
|
||||
let stream = collected_tokens.into_iter().collect::<TokenStream>();
|
||||
match prev_collecting {
|
||||
Some(mut list) => {
|
||||
list.push(stream.clone());
|
||||
list.extend(collected_tokens.iter().cloned());
|
||||
list.extend(extra_token);
|
||||
*last_token = LastToken::Collecting(list);
|
||||
}
|
||||
@ -7788,7 +7787,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok((ret?, stream))
|
||||
Ok((ret?, TokenStream::new(collected_tokens)))
|
||||
}
|
||||
|
||||
pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {
|
||||
|
@ -807,7 +807,7 @@ pub trait PrintState<'a> {
|
||||
TokenTree::Delimited(_, delim, tts) => {
|
||||
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
|
||||
self.writer().space()?;
|
||||
self.print_tts(tts.stream())?;
|
||||
self.print_tts(tts)?;
|
||||
self.writer().space()?;
|
||||
self.writer().word(token_to_string(&token::CloseDelim(delim)))
|
||||
},
|
||||
|
@ -41,7 +41,7 @@ pub enum TokenTree {
|
||||
/// A single token
|
||||
Token(Span, token::Token),
|
||||
/// A delimited sequence of token trees
|
||||
Delimited(DelimSpan, DelimToken, ThinTokenStream),
|
||||
Delimited(DelimSpan, DelimToken, TokenStream),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
@ -62,8 +62,7 @@ impl TokenTree {
|
||||
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
|
||||
(&TokenTree::Delimited(_, delim, ref tts),
|
||||
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||
delim == delim2 &&
|
||||
tts.stream().eq_unspanned(&tts2.stream())
|
||||
delim == delim2 && tts.eq_unspanned(&tts2)
|
||||
}
|
||||
(_, _) => false,
|
||||
}
|
||||
@ -81,8 +80,7 @@ impl TokenTree {
|
||||
}
|
||||
(&TokenTree::Delimited(_, delim, ref tts),
|
||||
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||
delim == delim2 &&
|
||||
tts.stream().probably_equal_for_proc_macro(&tts2.stream())
|
||||
delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
|
||||
}
|
||||
(_, _) => false,
|
||||
}
|
||||
@ -113,7 +111,7 @@ impl TokenTree {
|
||||
}
|
||||
|
||||
pub fn joint(self) -> TokenStream {
|
||||
TokenStream::Tree(self, Joint)
|
||||
TokenStream::new(vec![(self, Joint)])
|
||||
}
|
||||
|
||||
/// Returns the opening delimiter as a token tree.
|
||||
@ -143,18 +141,19 @@ impl TokenTree {
|
||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
||||
/// instead of a representation of the abstract syntax tree.
|
||||
/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat.
|
||||
///
|
||||
/// The use of `Option` is an optimization that avoids the need for an
|
||||
/// allocation when the stream is empty. However, it is not guaranteed that an
|
||||
/// empty stream is represented with `None`; it may be represented as a `Some`
|
||||
/// around an empty `Vec`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum TokenStream {
|
||||
Empty,
|
||||
Tree(TokenTree, IsJoint),
|
||||
Stream(Lrc<Vec<TreeAndJoint>>),
|
||||
}
|
||||
pub struct TokenStream(Option<Lrc<Vec<TreeAndJoint>>>);
|
||||
|
||||
pub type TreeAndJoint = (TokenTree, IsJoint);
|
||||
|
||||
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 32);
|
||||
static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 8);
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub enum IsJoint {
|
||||
@ -169,7 +168,7 @@ impl TokenStream {
|
||||
/// separating the two arguments with a comma for diagnostic suggestions.
|
||||
pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
|
||||
// Used to suggest if a user writes `foo!(a b);`
|
||||
if let TokenStream::Stream(ref stream) = self {
|
||||
if let Some(ref stream) = self.0 {
|
||||
let mut suggestion = None;
|
||||
let mut iter = stream.iter().enumerate().peekable();
|
||||
while let Some((pos, ts)) = iter.next() {
|
||||
@ -201,7 +200,7 @@ impl TokenStream {
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(tree: TokenTree) -> TokenStream {
|
||||
TokenStream::Tree(tree, NonJoint)
|
||||
TokenStream::new(vec![(tree, NonJoint)])
|
||||
}
|
||||
}
|
||||
|
||||
@ -233,7 +232,7 @@ impl PartialEq<TokenStream> for TokenStream {
|
||||
|
||||
impl TokenStream {
|
||||
pub fn len(&self) -> usize {
|
||||
if let TokenStream::Stream(ref slice) = self {
|
||||
if let Some(ref slice) = self.0 {
|
||||
slice.len()
|
||||
} else {
|
||||
0
|
||||
@ -241,13 +240,13 @@ impl TokenStream {
|
||||
}
|
||||
|
||||
pub fn empty() -> TokenStream {
|
||||
TokenStream::Empty
|
||||
TokenStream(None)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
TokenStream::Empty => true,
|
||||
_ => false,
|
||||
match self.0 {
|
||||
None => true,
|
||||
Some(ref stream) => stream.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -258,10 +257,9 @@ impl TokenStream {
|
||||
_ => {
|
||||
let mut vec = vec![];
|
||||
for stream in streams {
|
||||
match stream {
|
||||
TokenStream::Empty => {},
|
||||
TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)),
|
||||
TokenStream::Stream(stream2) => vec.extend(stream2.iter().cloned()),
|
||||
match stream.0 {
|
||||
None => {},
|
||||
Some(stream2) => vec.extend(stream2.iter().cloned()),
|
||||
}
|
||||
}
|
||||
TokenStream::new(vec)
|
||||
@ -269,22 +267,16 @@ impl TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(mut streams: Vec<TreeAndJoint>) -> TokenStream {
|
||||
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
|
||||
match streams.len() {
|
||||
0 => TokenStream::empty(),
|
||||
1 => {
|
||||
let (tree, is_joint) = streams.pop().unwrap();
|
||||
TokenStream::Tree(tree, is_joint)
|
||||
}
|
||||
_ => TokenStream::Stream(Lrc::new(streams)),
|
||||
0 => TokenStream(None),
|
||||
_ => TokenStream(Some(Lrc::new(streams))),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
|
||||
match self {
|
||||
TokenStream::Empty => {}
|
||||
TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)),
|
||||
TokenStream::Stream(stream) => vec.extend(stream.iter().cloned()),
|
||||
if let Some(stream) = self.0 {
|
||||
vec.extend(stream.iter().cloned());
|
||||
}
|
||||
}
|
||||
|
||||
@ -349,51 +341,36 @@ impl TokenStream {
|
||||
}
|
||||
|
||||
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
||||
match self {
|
||||
TokenStream::Empty => TokenStream::Empty,
|
||||
TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(0, tree), is_joint),
|
||||
TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new(
|
||||
TokenStream(self.0.map(|stream| {
|
||||
Lrc::new(
|
||||
stream
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
|
||||
.collect()
|
||||
)),
|
||||
}
|
||||
.collect())
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
||||
match self {
|
||||
TokenStream::Empty => TokenStream::Empty,
|
||||
TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(tree), is_joint),
|
||||
TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new(
|
||||
TokenStream(self.0.map(|stream| {
|
||||
Lrc::new(
|
||||
stream
|
||||
.iter()
|
||||
.map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
|
||||
.collect()
|
||||
)),
|
||||
}
|
||||
.collect())
|
||||
}))
|
||||
}
|
||||
|
||||
fn first_tree_and_joint(&self) -> Option<(TokenTree, IsJoint)> {
|
||||
match self {
|
||||
TokenStream::Empty => None,
|
||||
TokenStream::Tree(ref tree, is_joint) => Some((tree.clone(), *is_joint)),
|
||||
TokenStream::Stream(ref stream) => Some(stream.first().unwrap().clone())
|
||||
}
|
||||
fn first_tree_and_joint(&self) -> Option<TreeAndJoint> {
|
||||
self.0.as_ref().map(|stream| {
|
||||
stream.first().unwrap().clone()
|
||||
})
|
||||
}
|
||||
|
||||
fn last_tree_if_joint(&self) -> Option<TokenTree> {
|
||||
match self {
|
||||
TokenStream::Empty => None,
|
||||
TokenStream::Tree(ref tree, is_joint) => {
|
||||
if *is_joint == Joint {
|
||||
Some(tree.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
TokenStream::Stream(ref stream) => {
|
||||
match self.0 {
|
||||
None => None,
|
||||
Some(ref stream) => {
|
||||
if let (tree, Joint) = stream.last().unwrap() {
|
||||
Some(tree.clone())
|
||||
} else {
|
||||
@ -422,7 +399,7 @@ impl TokenStreamBuilder {
|
||||
self.push_all_but_last_tree(&last_stream);
|
||||
let glued_span = last_span.to(span);
|
||||
let glued_tt = TokenTree::Token(glued_span, glued_tok);
|
||||
let glued_tokenstream = TokenStream::Tree(glued_tt, is_joint);
|
||||
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
|
||||
self.0.push(glued_tokenstream);
|
||||
self.push_all_but_first_tree(&stream);
|
||||
return
|
||||
@ -437,23 +414,21 @@ impl TokenStreamBuilder {
|
||||
}
|
||||
|
||||
fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
|
||||
if let TokenStream::Stream(ref streams) = stream {
|
||||
if let Some(ref streams) = stream.0 {
|
||||
let len = streams.len();
|
||||
match len {
|
||||
1 => {}
|
||||
2 => self.0.push(TokenStream::Tree(streams[0].0.clone(), streams[0].1)),
|
||||
_ => self.0.push(TokenStream::Stream(Lrc::new(streams[0 .. len - 1].to_vec()))),
|
||||
_ => self.0.push(TokenStream(Some(Lrc::new(streams[0 .. len - 1].to_vec())))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn push_all_but_first_tree(&mut self, stream: &TokenStream) {
|
||||
if let TokenStream::Stream(ref streams) = stream {
|
||||
if let Some(ref streams) = stream.0 {
|
||||
let len = streams.len();
|
||||
match len {
|
||||
1 => {}
|
||||
2 => self.0.push(TokenStream::Tree(streams[1].0.clone(), streams[1].1)),
|
||||
_ => self.0.push(TokenStream::Stream(Lrc::new(streams[1 .. len].to_vec()))),
|
||||
_ => self.0.push(TokenStream(Some(Lrc::new(streams[1 .. len].to_vec())))),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -479,17 +454,9 @@ impl Cursor {
|
||||
}
|
||||
|
||||
pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
|
||||
match self.stream {
|
||||
TokenStream::Empty => None,
|
||||
TokenStream::Tree(ref tree, ref is_joint) => {
|
||||
if self.index == 0 {
|
||||
self.index = 1;
|
||||
Some((tree.clone(), *is_joint))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
TokenStream::Stream(ref stream) => {
|
||||
match self.stream.0 {
|
||||
None => None,
|
||||
Some(ref stream) => {
|
||||
if self.index < stream.len() {
|
||||
self.index += 1;
|
||||
Some(stream[self.index - 1].clone())
|
||||
@ -505,63 +472,19 @@ impl Cursor {
|
||||
return;
|
||||
}
|
||||
let index = self.index;
|
||||
let stream = mem::replace(&mut self.stream, TokenStream::Empty);
|
||||
let stream = mem::replace(&mut self.stream, TokenStream(None));
|
||||
*self = TokenStream::from_streams(vec![stream, new_stream]).into_trees();
|
||||
self.index = index;
|
||||
}
|
||||
|
||||
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
|
||||
match self.stream {
|
||||
TokenStream::Empty => None,
|
||||
TokenStream::Tree(ref tree, _) => {
|
||||
if n == 0 && self.index == 0 {
|
||||
Some(tree.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
TokenStream::Stream(ref stream) =>
|
||||
stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
|
||||
match self.stream.0 {
|
||||
None => None,
|
||||
Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation.
|
||||
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
|
||||
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ThinTokenStream(Option<Lrc<Vec<TreeAndJoint>>>);
|
||||
|
||||
impl ThinTokenStream {
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
self.clone().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenStream> for ThinTokenStream {
|
||||
fn from(stream: TokenStream) -> ThinTokenStream {
|
||||
ThinTokenStream(match stream {
|
||||
TokenStream::Empty => None,
|
||||
TokenStream::Tree(tree, is_joint) => Some(Lrc::new(vec![(tree, is_joint)])),
|
||||
TokenStream::Stream(stream) => Some(stream),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ThinTokenStream> for TokenStream {
|
||||
fn from(stream: ThinTokenStream) -> TokenStream {
|
||||
stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ThinTokenStream {}
|
||||
|
||||
impl PartialEq<ThinTokenStream> for ThinTokenStream {
|
||||
fn eq(&self, other: &ThinTokenStream) -> bool {
|
||||
TokenStream::from(self.clone()) == TokenStream::from(other.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(&pprust::tokens_to_string(self.clone()))
|
||||
@ -580,18 +503,6 @@ impl Decodable for TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for ThinTokenStream {
|
||||
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
|
||||
TokenStream::from(self.clone()).encode(encoder)
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for ThinTokenStream {
|
||||
fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
|
||||
TokenStream::decode(decoder).map(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub struct DelimSpan {
|
||||
pub open: Span,
|
||||
|
@ -832,7 +832,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
|
||||
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
|
||||
match tt {
|
||||
TokenTree::Token(_, tok) => visitor.visit_token(tok),
|
||||
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()),
|
||||
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,7 +269,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
||||
};
|
||||
|
||||
let tree = tokenstream::TokenTree::Token(span, token);
|
||||
TokenStream::Tree(tree, if joint { Joint } else { NonJoint })
|
||||
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user