Rename IsJoint -> Spacing
To match better naming from proc-macro
This commit is contained in:
parent
4231fbc0a8
commit
ccf41dd5eb
@ -8,7 +8,7 @@ use crate::ast::{Path, PathSegment};
|
|||||||
use crate::mut_visit::visit_clobber;
|
use crate::mut_visit::visit_clobber;
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::token::{self, CommentKind, Token};
|
use crate::token::{self, CommentKind, Token};
|
||||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
|
||||||
|
|
||||||
use rustc_index::bit_set::GrowableBitSet;
|
use rustc_index::bit_set::GrowableBitSet;
|
||||||
use rustc_span::source_map::{BytePos, Spanned};
|
use rustc_span::source_map::{BytePos, Spanned};
|
||||||
@ -361,7 +361,7 @@ pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MetaItem {
|
impl MetaItem {
|
||||||
fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
|
fn token_trees_and_spacings(&self) -> Vec<TreeAndSpacing> {
|
||||||
let mut idents = vec![];
|
let mut idents = vec![];
|
||||||
let mut last_pos = BytePos(0 as u32);
|
let mut last_pos = BytePos(0 as u32);
|
||||||
for (i, segment) in self.path.segments.iter().enumerate() {
|
for (i, segment) in self.path.segments.iter().enumerate() {
|
||||||
@ -374,7 +374,7 @@ impl MetaItem {
|
|||||||
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
|
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
|
||||||
last_pos = segment.ident.span.hi();
|
last_pos = segment.ident.span.hi();
|
||||||
}
|
}
|
||||||
idents.extend(self.kind.token_trees_and_joints(self.span));
|
idents.extend(self.kind.token_trees_and_spacings(self.span));
|
||||||
idents
|
idents
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -447,7 +447,7 @@ impl MetaItemKind {
|
|||||||
if i > 0 {
|
if i > 0 {
|
||||||
tts.push(TokenTree::token(token::Comma, span).into());
|
tts.push(TokenTree::token(token::Comma, span).into());
|
||||||
}
|
}
|
||||||
tts.extend(item.token_trees_and_joints())
|
tts.extend(item.token_trees_and_spacings())
|
||||||
}
|
}
|
||||||
MacArgs::Delimited(
|
MacArgs::Delimited(
|
||||||
DelimSpan::from_single(span),
|
DelimSpan::from_single(span),
|
||||||
@ -458,7 +458,7 @@ impl MetaItemKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_trees_and_joints(&self, span: Span) -> Vec<TreeAndJoint> {
|
fn token_trees_and_spacings(&self, span: Span) -> Vec<TreeAndSpacing> {
|
||||||
match *self {
|
match *self {
|
||||||
MetaItemKind::Word => vec![],
|
MetaItemKind::Word => vec![],
|
||||||
MetaItemKind::NameValue(ref lit) => {
|
MetaItemKind::NameValue(ref lit) => {
|
||||||
@ -470,7 +470,7 @@ impl MetaItemKind {
|
|||||||
if i > 0 {
|
if i > 0 {
|
||||||
tokens.push(TokenTree::token(token::Comma, span).into());
|
tokens.push(TokenTree::token(token::Comma, span).into());
|
||||||
}
|
}
|
||||||
tokens.extend(item.token_trees_and_joints())
|
tokens.extend(item.token_trees_and_spacings())
|
||||||
}
|
}
|
||||||
vec![
|
vec![
|
||||||
TokenTree::Delimited(
|
TokenTree::Delimited(
|
||||||
@ -553,9 +553,9 @@ impl NestedMetaItem {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
|
fn token_trees_and_spacings(&self) -> Vec<TreeAndSpacing> {
|
||||||
match *self {
|
match *self {
|
||||||
NestedMetaItem::MetaItem(ref item) => item.token_trees_and_joints(),
|
NestedMetaItem::MetaItem(ref item) => item.token_trees_and_spacings(),
|
||||||
NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()],
|
NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -83,7 +83,7 @@ impl TokenTree {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn joint(self) -> TokenStream {
|
pub fn joint(self) -> TokenStream {
|
||||||
TokenStream::new(vec![(self, Joint)])
|
TokenStream::new(vec![(self, Spacing::Joint)])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn token(kind: TokenKind, span: Span) -> TokenTree {
|
pub fn token(kind: TokenKind, span: Span) -> TokenTree {
|
||||||
@ -125,22 +125,20 @@ where
|
|||||||
/// instead of a representation of the abstract syntax tree.
|
/// instead of a representation of the abstract syntax tree.
|
||||||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
|
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
|
||||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||||
pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);
|
pub struct TokenStream(pub Lrc<Vec<TreeAndSpacing>>);
|
||||||
|
|
||||||
pub type TreeAndJoint = (TokenTree, IsJoint);
|
pub type TreeAndSpacing = (TokenTree, Spacing);
|
||||||
|
|
||||||
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
|
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||||
#[cfg(target_arch = "x86_64")]
|
#[cfg(target_arch = "x86_64")]
|
||||||
rustc_data_structures::static_assert_size!(TokenStream, 8);
|
rustc_data_structures::static_assert_size!(TokenStream, 8);
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)]
|
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)]
|
||||||
pub enum IsJoint {
|
pub enum Spacing {
|
||||||
|
Alone,
|
||||||
Joint,
|
Joint,
|
||||||
NonJoint,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
use IsJoint::*;
|
|
||||||
|
|
||||||
impl TokenStream {
|
impl TokenStream {
|
||||||
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
|
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
|
||||||
/// separating the two arguments with a comma for diagnostic suggestions.
|
/// separating the two arguments with a comma for diagnostic suggestions.
|
||||||
@ -153,7 +151,7 @@ impl TokenStream {
|
|||||||
let sp = match (&ts, &next) {
|
let sp = match (&ts, &next) {
|
||||||
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
|
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
|
||||||
(
|
(
|
||||||
(TokenTree::Token(token_left), NonJoint),
|
(TokenTree::Token(token_left), Spacing::Alone),
|
||||||
(TokenTree::Token(token_right), _),
|
(TokenTree::Token(token_right), _),
|
||||||
) if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
) if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
||||||
|| token_left.is_lit())
|
|| token_left.is_lit())
|
||||||
@ -162,11 +160,11 @@ impl TokenStream {
|
|||||||
{
|
{
|
||||||
token_left.span
|
token_left.span
|
||||||
}
|
}
|
||||||
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
|
((TokenTree::Delimited(sp, ..), Spacing::Alone), _) => sp.entire(),
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
let sp = sp.shrink_to_hi();
|
let sp = sp.shrink_to_hi();
|
||||||
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
|
let comma = (TokenTree::token(token::Comma, sp), Spacing::Alone);
|
||||||
suggestion = Some((pos, comma, sp));
|
suggestion = Some((pos, comma, sp));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -184,19 +182,19 @@ impl TokenStream {
|
|||||||
|
|
||||||
impl From<TokenTree> for TokenStream {
|
impl From<TokenTree> for TokenStream {
|
||||||
fn from(tree: TokenTree) -> TokenStream {
|
fn from(tree: TokenTree) -> TokenStream {
|
||||||
TokenStream::new(vec![(tree, NonJoint)])
|
TokenStream::new(vec![(tree, Spacing::Alone)])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TokenTree> for TreeAndJoint {
|
impl From<TokenTree> for TreeAndSpacing {
|
||||||
fn from(tree: TokenTree) -> TreeAndJoint {
|
fn from(tree: TokenTree) -> TreeAndSpacing {
|
||||||
(tree, NonJoint)
|
(tree, Spacing::Alone)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
|
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
|
||||||
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndJoint>>())
|
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndSpacing>>())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,7 +207,7 @@ impl PartialEq<TokenStream> for TokenStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TokenStream {
|
impl TokenStream {
|
||||||
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
|
pub fn new(streams: Vec<TreeAndSpacing>) -> TokenStream {
|
||||||
TokenStream(Lrc::new(streams))
|
TokenStream(Lrc::new(streams))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -320,11 +318,11 @@ impl TokenStreamBuilder {
|
|||||||
// If `self` is not empty and the last tree within the last stream is a
|
// If `self` is not empty and the last tree within the last stream is a
|
||||||
// token tree marked with `Joint`...
|
// token tree marked with `Joint`...
|
||||||
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
|
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
|
||||||
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
|
if let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last() {
|
||||||
// ...and `stream` is not empty and the first tree within it is
|
// ...and `stream` is not empty and the first tree within it is
|
||||||
// a token tree...
|
// a token tree...
|
||||||
let TokenStream(ref mut stream_lrc) = stream;
|
let TokenStream(ref mut stream_lrc) = stream;
|
||||||
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
|
if let Some((TokenTree::Token(token), spacing)) = stream_lrc.first() {
|
||||||
// ...and the two tokens can be glued together...
|
// ...and the two tokens can be glued together...
|
||||||
if let Some(glued_tok) = last_token.glue(&token) {
|
if let Some(glued_tok) = last_token.glue(&token) {
|
||||||
// ...then do so, by overwriting the last token
|
// ...then do so, by overwriting the last token
|
||||||
@ -337,8 +335,7 @@ impl TokenStreamBuilder {
|
|||||||
// Overwrite the last token tree with the merged
|
// Overwrite the last token tree with the merged
|
||||||
// token.
|
// token.
|
||||||
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
|
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
|
||||||
*last_vec_mut.last_mut().unwrap() =
|
*last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
|
||||||
(TokenTree::Token(glued_tok), *is_joint);
|
|
||||||
|
|
||||||
// Remove the first token tree from `stream`. (This
|
// Remove the first token tree from `stream`. (This
|
||||||
// is almost always the only tree in `stream`.)
|
// is almost always the only tree in `stream`.)
|
||||||
@ -375,7 +372,7 @@ impl Iterator for Cursor {
|
|||||||
type Item = TokenTree;
|
type Item = TokenTree;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<TokenTree> {
|
fn next(&mut self) -> Option<TokenTree> {
|
||||||
self.next_with_joint().map(|(tree, _)| tree)
|
self.next_with_spacing().map(|(tree, _)| tree)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -384,7 +381,7 @@ impl Cursor {
|
|||||||
Cursor { stream, index: 0 }
|
Cursor { stream, index: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
|
pub fn next_with_spacing(&mut self) -> Option<TreeAndSpacing> {
|
||||||
if self.index < self.stream.len() {
|
if self.index < self.stream.len() {
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
Some(self.stream.0[self.index - 1].clone())
|
Some(self.stream.0[self.index - 1].clone())
|
||||||
|
@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
|||||||
|
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::token::{self, NtTT, Token};
|
use rustc_ast::token::{self, NtTT, Token};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
|
||||||
use rustc_ast::MacCall;
|
use rustc_ast::MacCall;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
@ -111,7 +111,7 @@ pub(super) fn transcribe<'a>(
|
|||||||
//
|
//
|
||||||
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
|
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
|
||||||
// again, and we are done transcribing.
|
// again, and we are done transcribing.
|
||||||
let mut result: Vec<TreeAndJoint> = Vec::new();
|
let mut result: Vec<TreeAndSpacing> = Vec::new();
|
||||||
let mut result_stack = Vec::new();
|
let mut result_stack = Vec::new();
|
||||||
let mut marker = Marker(cx.current_expansion.id, transparency);
|
let mut marker = Marker(cx.current_expansion.id, transparency);
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ use crate::base::ExtCtxt;
|
|||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::token;
|
use rustc_ast::token;
|
||||||
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::Diagnostic;
|
use rustc_errors::Diagnostic;
|
||||||
@ -47,15 +47,15 @@ impl ToInternal<token::DelimToken> for Delimiter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
impl FromInternal<(TreeAndSpacing, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||||
for TokenTree<Group, Punct, Ident, Literal>
|
for TokenTree<Group, Punct, Ident, Literal>
|
||||||
{
|
{
|
||||||
fn from_internal(
|
fn from_internal(
|
||||||
((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>),
|
((tree, spacing), sess, stack): (TreeAndSpacing, &ParseSess, &mut Vec<Self>),
|
||||||
) -> Self {
|
) -> Self {
|
||||||
use rustc_ast::token::*;
|
use rustc_ast::token::*;
|
||||||
|
|
||||||
let joint = is_joint == Joint;
|
let joint = spacing == Joint;
|
||||||
let Token { kind, span } = match tree {
|
let Token { kind, span } = match tree {
|
||||||
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||||
let delimiter = Delimiter::from_internal(delim);
|
let delimiter = Delimiter::from_internal(delim);
|
||||||
@ -261,7 +261,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let tree = tokenstream::TokenTree::token(kind, span);
|
let tree = tokenstream::TokenTree::token(kind, span);
|
||||||
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
|
TokenStream::new(vec![(tree, if joint { Joint } else { Alone })])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -444,7 +444,7 @@ impl server::TokenStreamIter for Rustc<'_> {
|
|||||||
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
||||||
loop {
|
loop {
|
||||||
let tree = iter.stack.pop().or_else(|| {
|
let tree = iter.stack.pop().or_else(|| {
|
||||||
let next = iter.cursor.next_with_joint()?;
|
let next = iter.cursor.next_with_spacing()?;
|
||||||
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
|
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
|
||||||
})?;
|
})?;
|
||||||
// A hack used to pass AST fragments to attribute and derive macros
|
// A hack used to pass AST fragments to attribute and derive macros
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use rustc_ast::ast::AttrStyle;
|
use rustc_ast::ast::AttrStyle;
|
||||||
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
|
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{IsJoint, TokenStream};
|
use rustc_ast::tokenstream::{Spacing, TokenStream};
|
||||||
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult};
|
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult};
|
||||||
use rustc_lexer::unescape::{self, Mode};
|
use rustc_lexer::unescape::{self, Mode};
|
||||||
use rustc_lexer::{Base, DocStyle, RawStrError};
|
use rustc_lexer::{Base, DocStyle, RawStrError};
|
||||||
@ -54,8 +54,8 @@ impl<'a> StringReader<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the next token, and info about preceding whitespace, if any.
|
/// Returns the next token, and info about preceding whitespace, if any.
|
||||||
fn next_token(&mut self) -> (IsJoint, Token) {
|
fn next_token(&mut self) -> (Spacing, Token) {
|
||||||
let mut is_joint = IsJoint::Joint;
|
let mut spacing = Spacing::Joint;
|
||||||
|
|
||||||
// Skip `#!` at the start of the file
|
// Skip `#!` at the start of the file
|
||||||
let start_src_index = self.src_index(self.pos);
|
let start_src_index = self.src_index(self.pos);
|
||||||
@ -64,7 +64,7 @@ impl<'a> StringReader<'a> {
|
|||||||
if is_beginning_of_file {
|
if is_beginning_of_file {
|
||||||
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
|
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
|
||||||
self.pos = self.pos + BytePos::from_usize(shebang_len);
|
self.pos = self.pos + BytePos::from_usize(shebang_len);
|
||||||
is_joint = IsJoint::NonJoint;
|
spacing = Spacing::Alone;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -75,7 +75,7 @@ impl<'a> StringReader<'a> {
|
|||||||
|
|
||||||
if text.is_empty() {
|
if text.is_empty() {
|
||||||
let span = self.mk_sp(self.pos, self.pos);
|
let span = self.mk_sp(self.pos, self.pos);
|
||||||
return (is_joint, Token::new(token::Eof, span));
|
return (spacing, Token::new(token::Eof, span));
|
||||||
}
|
}
|
||||||
|
|
||||||
let token = rustc_lexer::first_token(text);
|
let token = rustc_lexer::first_token(text);
|
||||||
@ -88,9 +88,9 @@ impl<'a> StringReader<'a> {
|
|||||||
match self.cook_lexer_token(token.kind, start) {
|
match self.cook_lexer_token(token.kind, start) {
|
||||||
Some(kind) => {
|
Some(kind) => {
|
||||||
let span = self.mk_sp(start, self.pos);
|
let span = self.mk_sp(start, self.pos);
|
||||||
return (is_joint, Token::new(kind, span));
|
return (spacing, Token::new(kind, span));
|
||||||
}
|
}
|
||||||
None => is_joint = IsJoint::NonJoint,
|
None => spacing = Spacing::Alone,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,8 +3,8 @@ use super::{StringReader, UnmatchedBrace};
|
|||||||
use rustc_ast::token::{self, DelimToken, Token};
|
use rustc_ast::token::{self, DelimToken, Token};
|
||||||
use rustc_ast::tokenstream::{
|
use rustc_ast::tokenstream::{
|
||||||
DelimSpan,
|
DelimSpan,
|
||||||
IsJoint::{self, *},
|
Spacing::{self, *},
|
||||||
TokenStream, TokenTree, TreeAndJoint,
|
TokenStream, TokenTree, TreeAndSpacing,
|
||||||
};
|
};
|
||||||
use rustc_ast_pretty::pprust::token_to_string;
|
use rustc_ast_pretty::pprust::token_to_string;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
@ -77,7 +77,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
|
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> {
|
||||||
let sm = self.string_reader.sess.source_map();
|
let sm = self.string_reader.sess.source_map();
|
||||||
|
|
||||||
match self.token.kind {
|
match self.token.kind {
|
||||||
@ -262,29 +262,29 @@ impl<'a> TokenTreesReader<'a> {
|
|||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let tt = TokenTree::Token(self.token.take());
|
let tt = TokenTree::Token(self.token.take());
|
||||||
let mut is_joint = self.bump();
|
let mut spacing = self.bump();
|
||||||
if !self.token.is_op() {
|
if !self.token.is_op() {
|
||||||
is_joint = NonJoint;
|
spacing = Alone;
|
||||||
}
|
}
|
||||||
Ok((tt, is_joint))
|
Ok((tt, spacing))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bump(&mut self) -> IsJoint {
|
fn bump(&mut self) -> Spacing {
|
||||||
let (joint_to_prev, token) = self.string_reader.next_token();
|
let (spacing, token) = self.string_reader.next_token();
|
||||||
self.token = token;
|
self.token = token;
|
||||||
joint_to_prev
|
spacing
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct TokenStreamBuilder {
|
struct TokenStreamBuilder {
|
||||||
buf: Vec<TreeAndJoint>,
|
buf: Vec<TreeAndSpacing>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenStreamBuilder {
|
impl TokenStreamBuilder {
|
||||||
fn push(&mut self, (tree, joint): TreeAndJoint) {
|
fn push(&mut self, (tree, joint): TreeAndSpacing) {
|
||||||
if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() {
|
if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() {
|
||||||
if let TokenTree::Token(token) = &tree {
|
if let TokenTree::Token(token) = &tree {
|
||||||
if let Some(glued) = prev_token.glue(token) {
|
if let Some(glued) = prev_token.glue(token) {
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
|
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
|
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
|
||||||
@ -437,7 +437,7 @@ pub fn tokenstream_probably_equal_for_proc_macro(
|
|||||||
// issue #75734 tracks resolving this.
|
// issue #75734 tracks resolving this.
|
||||||
nt_to_tokenstream(nt, sess, *span).into_trees()
|
nt_to_tokenstream(nt, sess, *span).into_trees()
|
||||||
} else {
|
} else {
|
||||||
TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees()
|
TokenStream::new(vec![(tree, Spacing::Alone)]).into_trees()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ pub use path::PathStyle;
|
|||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
|
||||||
use rustc_ast::DUMMY_NODE_ID;
|
use rustc_ast::DUMMY_NODE_ID;
|
||||||
use rustc_ast::{self as ast, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
|
use rustc_ast::{self as ast, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
|
||||||
use rustc_ast::{Async, MacArgs, MacDelimiter, Mutability, StrLit, Visibility, VisibilityKind};
|
use rustc_ast::{Async, MacArgs, MacDelimiter, Mutability, StrLit, Visibility, VisibilityKind};
|
||||||
@ -118,7 +118,7 @@ impl<'a> Drop for Parser<'a> {
|
|||||||
struct TokenCursor {
|
struct TokenCursor {
|
||||||
frame: TokenCursorFrame,
|
frame: TokenCursorFrame,
|
||||||
stack: Vec<TokenCursorFrame>,
|
stack: Vec<TokenCursorFrame>,
|
||||||
cur_token: Option<TreeAndJoint>,
|
cur_token: Option<TreeAndSpacing>,
|
||||||
collecting: Option<Collecting>,
|
collecting: Option<Collecting>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -136,7 +136,7 @@ struct TokenCursorFrame {
|
|||||||
struct Collecting {
|
struct Collecting {
|
||||||
/// Holds the current tokens captured during the most
|
/// Holds the current tokens captured during the most
|
||||||
/// recent call to `collect_tokens`
|
/// recent call to `collect_tokens`
|
||||||
buf: Vec<TreeAndJoint>,
|
buf: Vec<TreeAndSpacing>,
|
||||||
/// The depth of the `TokenCursor` stack at the time
|
/// The depth of the `TokenCursor` stack at the time
|
||||||
/// collection was started. When we encounter a `TokenTree::Delimited`,
|
/// collection was started. When we encounter a `TokenTree::Delimited`,
|
||||||
/// we want to record the `TokenTree::Delimited` itself,
|
/// we want to record the `TokenTree::Delimited` itself,
|
||||||
@ -167,7 +167,7 @@ impl TokenCursor {
|
|||||||
let tree = if !self.frame.open_delim {
|
let tree = if !self.frame.open_delim {
|
||||||
self.frame.open_delim = true;
|
self.frame.open_delim = true;
|
||||||
TokenTree::open_tt(self.frame.span, self.frame.delim).into()
|
TokenTree::open_tt(self.frame.span, self.frame.delim).into()
|
||||||
} else if let Some(tree) = self.frame.tree_cursor.next_with_joint() {
|
} else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() {
|
||||||
tree
|
tree
|
||||||
} else if !self.frame.close_delim {
|
} else if !self.frame.close_delim {
|
||||||
self.frame.close_delim = true;
|
self.frame.close_delim = true;
|
||||||
@ -1154,7 +1154,7 @@ impl<'a> Parser<'a> {
|
|||||||
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
||||||
) -> PResult<'a, (R, TokenStream)> {
|
) -> PResult<'a, (R, TokenStream)> {
|
||||||
// Record all tokens we parse when parsing this item.
|
// Record all tokens we parse when parsing this item.
|
||||||
let tokens: Vec<TreeAndJoint> = self.token_cursor.cur_token.clone().into_iter().collect();
|
let tokens: Vec<TreeAndSpacing> = self.token_cursor.cur_token.clone().into_iter().collect();
|
||||||
debug!("collect_tokens: starting with {:?}", tokens);
|
debug!("collect_tokens: starting with {:?}", tokens);
|
||||||
|
|
||||||
// We need special handling for the case where `collect_tokens` is called
|
// We need special handling for the case where `collect_tokens` is called
|
||||||
|
@ -1 +1 @@
|
|||||||
{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
||||||
|
@ -1 +1 @@
|
|||||||
{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user