Rollup merge of #123462 - fmease:rn-mod-sep-to-path-sep, r=nnethercote

Cleanup: Rename `ModSep` to `PathSep`

`::` is usually referred to as the *path separator* (citation needed).

The existing name `ModSep` for *module separator* is a bit misleading since it in fact separates the segments of arbitrary path segments, not only ones resolving to modules. Let me just give a shout-out to associated items (`T::Assoc`, `<Ty as Trait>::function`) and enum variants (`Option::None`).

Motivation: Reduce friction for new contributors, prevent potential confusion.

cc `@petrochenkov`
r? nnethercote or compiler
This commit is contained in:
León Orell Valerian Liehr 2024-04-16 01:12:37 +02:00 committed by GitHub
commit c5665990c5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 44 additions and 42 deletions

View File

@ -308,11 +308,11 @@ fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
// FIXME: Share code with `parse_path`. // FIXME: Share code with `parse_path`.
let path = match tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() { let path = match tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() {
Some(&TokenTree::Token( Some(&TokenTree::Token(
Token { kind: ref kind @ (token::Ident(..) | token::ModSep), span }, Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
_, _,
)) => 'arm: { )) => 'arm: {
let mut segments = if let &token::Ident(name, _) = kind { let mut segments = if let &token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }, _)) = if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
tokens.peek() tokens.peek()
{ {
tokens.next(); tokens.next();
@ -331,7 +331,7 @@ fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
} else { } else {
return None; return None;
} }
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }, _)) = if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
tokens.peek() tokens.peek()
{ {
tokens.next(); tokens.next();

View File

@ -290,7 +290,7 @@ pub enum TokenKind {
/// `:` /// `:`
Colon, Colon,
/// `::` /// `::`
ModSep, PathSep,
/// `->` /// `->`
RArrow, RArrow,
/// `<-` /// `<-`
@ -393,7 +393,7 @@ pub fn break_two_token_op(&self) -> Option<(TokenKind, TokenKind)> {
BinOpEq(Shr) => (Gt, Ge), BinOpEq(Shr) => (Gt, Ge),
DotDot => (Dot, Dot), DotDot => (Dot, Dot),
DotDotDot => (Dot, DotDot), DotDotDot => (Dot, DotDot),
ModSep => (Colon, Colon), PathSep => (Colon, Colon),
RArrow => (BinOp(Minus), Gt), RArrow => (BinOp(Minus), Gt),
LArrow => (Lt, BinOp(Minus)), LArrow => (Lt, BinOp(Minus)),
FatArrow => (Eq, Gt), FatArrow => (Eq, Gt),
@ -454,7 +454,9 @@ pub fn is_punct(&self) -> bool {
match self.kind { match self.kind {
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_) Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_)
| BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon | BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon
| ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => true, | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => {
true
}
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
| Lifetime(..) | Interpolated(..) | Eof => false, | Lifetime(..) | Interpolated(..) | Eof => false,
@ -481,7 +483,7 @@ pub fn can_begin_expr(&self) -> bool {
// DotDotDot is no longer supported, but we need some way to display the error // DotDotDot is no longer supported, but we need some way to display the error
DotDot | DotDotDot | DotDotEq | // range notation DotDot | DotDotDot | DotDotEq | // range notation
Lt | BinOp(Shl) | // associated path Lt | BinOp(Shl) | // associated path
ModSep | // global path PathSep | // global path
Lifetime(..) | // labeled loop Lifetime(..) | // labeled loop
Pound => true, // expression attributes Pound => true, // expression attributes
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) | Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
@ -507,7 +509,7 @@ pub fn can_begin_pattern(&self) -> bool {
// DotDotDot is no longer supported // DotDotDot is no longer supported
| DotDot | DotDotDot | DotDotEq // ranges | DotDot | DotDotDot | DotDotEq // ranges
| Lt | BinOp(Shl) // associated path | Lt | BinOp(Shl) // associated path
| ModSep => true, // global path | PathSep => true, // global path
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) | Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
NtPat(..) | NtPat(..) |
NtBlock(..) | NtBlock(..) |
@ -530,7 +532,7 @@ pub fn can_begin_type(&self) -> bool {
Question | // maybe bound in trait object Question | // maybe bound in trait object
Lifetime(..) | // lifetime bound in trait object Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path Lt | BinOp(Shl) | // associated path
ModSep => true, // global path PathSep => true, // global path
Interpolated(ref nt) => matches!(&nt.0, NtTy(..) | NtPath(..)), Interpolated(ref nt) => matches!(&nt.0, NtTy(..) | NtPath(..)),
// For anonymous structs or unions, which only appear in specific positions // For anonymous structs or unions, which only appear in specific positions
// (type of struct fields or union fields), we don't consider them as regular types // (type of struct fields or union fields), we don't consider them as regular types
@ -708,7 +710,7 @@ pub fn is_qpath_start(&self) -> bool {
} }
pub fn is_path_start(&self) -> bool { pub fn is_path_start(&self) -> bool {
self == &ModSep self == &PathSep
|| self.is_qpath_start() || self.is_qpath_start()
|| self.is_whole_path() || self.is_whole_path()
|| self.is_path_segment_keyword() || self.is_path_segment_keyword()
@ -821,7 +823,7 @@ pub fn glue(&self, joint: &Token) -> Option<Token> {
_ => return None, _ => return None,
}, },
Colon => match joint.kind { Colon => match joint.kind {
Colon => ModSep, Colon => PathSep,
_ => return None, _ => return None,
}, },
SingleQuote => match joint.kind { SingleQuote => match joint.kind {
@ -830,7 +832,7 @@ pub fn glue(&self, joint: &Token) -> Option<Token> {
}, },
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot
| DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | DotDotEq | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar
| Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..)
| Lifetime(..) | Interpolated(..) | DocComment(..) | Eof => return None, | Lifetime(..) | Interpolated(..) | DocComment(..) | Eof => return None,
}; };

View File

@ -893,7 +893,7 @@ fn token_kind_to_string_ext(
token::Comma => ",".into(), token::Comma => ",".into(),
token::Semi => ";".into(), token::Semi => ";".into(),
token::Colon => ":".into(), token::Colon => ":".into(),
token::ModSep => "::".into(), token::PathSep => "::".into(),
token::RArrow => "->".into(), token::RArrow => "->".into(),
token::LArrow => "<-".into(), token::LArrow => "<-".into(),
token::FatArrow => "=>".into(), token::FatArrow => "=>".into(),

View File

@ -208,7 +208,7 @@ fn from_internal((stream, rustc): (TokenStream, &mut Rustc<'_, '_>)) -> Self {
Comma => op(","), Comma => op(","),
Semi => op(";"), Semi => op(";"),
Colon => op(":"), Colon => op(":"),
ModSep => op("::"), PathSep => op("::"),
RArrow => op("->"), RArrow => op("->"),
LArrow => op("<-"), LArrow => op("<-"),
FatArrow => op("=>"), FatArrow => op("=>"),

View File

@ -279,7 +279,7 @@ pub(super) fn expected_ident_found(
TokenKind::Colon, TokenKind::Colon,
TokenKind::Comma, TokenKind::Comma,
TokenKind::Semi, TokenKind::Semi,
TokenKind::ModSep, TokenKind::PathSep,
TokenKind::OpenDelim(Delimiter::Brace), TokenKind::OpenDelim(Delimiter::Brace),
TokenKind::OpenDelim(Delimiter::Parenthesis), TokenKind::OpenDelim(Delimiter::Parenthesis),
TokenKind::CloseDelim(Delimiter::Brace), TokenKind::CloseDelim(Delimiter::Brace),
@ -1169,7 +1169,7 @@ pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut Pa
return; return;
} }
if token::ModSep == self.token.kind && segment.args.is_none() { if token::PathSep == self.token.kind && segment.args.is_none() {
let snapshot = self.create_snapshot_for_diagnostic(); let snapshot = self.create_snapshot_for_diagnostic();
self.bump(); self.bump();
let lo = self.token.span; let lo = self.token.span;
@ -1420,7 +1420,7 @@ pub(super) fn check_no_chained_comparison(
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)]; [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
self.consume_tts(1, &modifiers); self.consume_tts(1, &modifiers);
if !&[token::OpenDelim(Delimiter::Parenthesis), token::ModSep] if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep]
.contains(&self.token.kind) .contains(&self.token.kind)
{ {
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
@ -1428,7 +1428,7 @@ pub(super) fn check_no_chained_comparison(
self.restore_snapshot(snapshot); self.restore_snapshot(snapshot);
} }
} }
return if token::ModSep == self.token.kind { return if token::PathSep == self.token.kind {
// We have some certainty that this was a bad turbofish at this point. // We have some certainty that this was a bad turbofish at this point.
// `foo< bar >::` // `foo< bar >::`
if let ExprKind::Binary(o, ..) = inner_op.kind if let ExprKind::Binary(o, ..) = inner_op.kind
@ -1784,7 +1784,7 @@ pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
} }
// Do not add `::` to expected tokens. // Do not add `::` to expected tokens.
if self.token == token::ModSep { if self.token == token::PathSep {
if let Some(ty) = base.to_ty() { if let Some(ty) = base.to_ty() {
return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty); return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
} }
@ -1799,7 +1799,7 @@ pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
ty_span: Span, ty_span: Span,
ty: P<Ty>, ty: P<Ty>,
) -> PResult<'a, P<T>> { ) -> PResult<'a, P<T>> {
self.expect(&token::ModSep)?; self.expect(&token::PathSep)?;
let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None }; let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?; self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;

View File

@ -358,12 +358,12 @@ pub(super) fn is_path_start_item(&mut self) -> bool {
fn is_reuse_path_item(&mut self) -> bool { fn is_reuse_path_item(&mut self) -> bool {
// no: `reuse ::path` for compatibility reasons with macro invocations // no: `reuse ::path` for compatibility reasons with macro invocations
self.token.is_keyword(kw::Reuse) self.token.is_keyword(kw::Reuse)
&& self.look_ahead(1, |t| t.is_path_start() && t.kind != token::ModSep) && self.look_ahead(1, |t| t.is_path_start() && t.kind != token::PathSep)
} }
/// Are we sure this could not possibly be a macro invocation? /// Are we sure this could not possibly be a macro invocation?
fn isnt_macro_invocation(&mut self) -> bool { fn isnt_macro_invocation(&mut self) -> bool {
self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::ModSep) self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep)
} }
/// Recover on encountering a struct or method definition where the user /// Recover on encountering a struct or method definition where the user
@ -1020,7 +1020,7 @@ fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
{ {
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};` // `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
let mod_sep_ctxt = self.token.span.ctxt(); let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) { if self.eat(&token::PathSep) {
prefix prefix
.segments .segments
.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); .push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
@ -1031,7 +1031,7 @@ fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;` // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
prefix = self.parse_path(PathStyle::Mod)?; prefix = self.parse_path(PathStyle::Mod)?;
if self.eat(&token::ModSep) { if self.eat(&token::PathSep) {
self.parse_use_tree_glob_or_nested()? self.parse_use_tree_glob_or_nested()?
} else { } else {
// Recover from using a colon as path separator. // Recover from using a colon as path separator.
@ -2752,7 +2752,7 @@ fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
// Is `self` `n` tokens ahead? // Is `self` `n` tokens ahead?
let is_isolated_self = |this: &Self, n| { let is_isolated_self = |this: &Self, n| {
this.is_keyword_ahead(n, &[kw::SelfLower]) this.is_keyword_ahead(n, &[kw::SelfLower])
&& this.look_ahead(n + 1, |t| t != &token::ModSep) && this.look_ahead(n + 1, |t| t != &token::PathSep)
}; };
// Is `mut self` `n` tokens ahead? // Is `mut self` `n` tokens ahead?
let is_isolated_mut_self = let is_isolated_mut_self =

View File

@ -109,7 +109,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => { ($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery if $allow_qpath_recovery
&& $self.may_recover() && $self.may_recover()
&& $self.look_ahead(1, |t| t == &token::ModSep) && $self.look_ahead(1, |t| t == &token::PathSep)
&& let token::Interpolated(nt) = &$self.token.kind && let token::Interpolated(nt) = &$self.token.kind
&& let token::NtTy(ty) = &nt.0 && let token::NtTy(ty) = &nt.0
{ {
@ -1532,7 +1532,7 @@ pub fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
/// `::{` or `::*` /// `::{` or `::*`
fn is_import_coupler(&mut self) -> bool { fn is_import_coupler(&mut self) -> bool {
self.check(&token::ModSep) self.check(&token::PathSep)
&& self.look_ahead(1, |t| { && self.look_ahead(1, |t| {
*t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star) *t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
}) })

View File

@ -62,7 +62,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
_ => false, _ => false,
}, },
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind { NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
token::ModSep | token::Ident(..) => true, token::PathSep | token::Ident(..) => true,
token::Interpolated(nt) => may_be_ident(&nt.0), token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false, _ => false,
}, },
@ -76,7 +76,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
token::Literal(_) | // literal token::Literal(_) | // literal
token::DotDot | // range pattern (future compat) token::DotDot | // range pattern (future compat)
token::DotDotDot | // range pattern (future compat) token::DotDotDot | // range pattern (future compat)
token::ModSep | // path token::PathSep | // path
token::Lt | // path (UFCS constant) token::Lt | // path (UFCS constant)
token::BinOp(token::Shl) => true, // path (double UFCS) token::BinOp(token::Shl) => true, // path (double UFCS)
// leading vert `|` or-pattern // leading vert `|` or-pattern

View File

@ -1016,7 +1016,7 @@ fn can_be_ident_pat(&mut self) -> bool {
&& self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern. && self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern.
| token::OpenDelim(Delimiter::Brace) // A struct pattern. | token::OpenDelim(Delimiter::Brace) // A struct pattern.
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
| token::ModSep // A tuple / struct variant pattern. | token::PathSep // A tuple / struct variant pattern.
| token::Not)) // A macro expanding to a pattern. | token::Not)) // A macro expanding to a pattern.
} }

View File

@ -96,7 +96,7 @@ pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (P<QSelf>,
} }
if !self.recover_colon_before_qpath_proj() { if !self.recover_colon_before_qpath_proj() {
self.expect(&token::ModSep)?; self.expect(&token::PathSep)?;
} }
let qself = P(QSelf { ty, path_span, position: path.segments.len() }); let qself = P(QSelf { ty, path_span, position: path.segments.len() });
@ -200,7 +200,7 @@ pub(super) fn parse_path_inner(
let lo = self.token.span; let lo = self.token.span;
let mut segments = ThinVec::new(); let mut segments = ThinVec::new();
let mod_sep_ctxt = self.token.span.ctxt(); let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) { if self.eat(&token::PathSep) {
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
} }
self.parse_path_segments(&mut segments, style, ty_generics)?; self.parse_path_segments(&mut segments, style, ty_generics)?;
@ -232,11 +232,11 @@ pub(super) fn parse_path_segments(
// `PathStyle::Expr` is only provided at the root invocation and never in // `PathStyle::Expr` is only provided at the root invocation and never in
// `parse_path_segment` to recurse and therefore can be checked to maintain // `parse_path_segment` to recurse and therefore can be checked to maintain
// this invariant. // this invariant.
self.check_trailing_angle_brackets(&segment, &[&token::ModSep]); self.check_trailing_angle_brackets(&segment, &[&token::PathSep]);
} }
segments.push(segment); segments.push(segment);
if self.is_import_coupler() || !self.eat(&token::ModSep) { if self.is_import_coupler() || !self.eat(&token::PathSep) {
if style == PathStyle::Expr if style == PathStyle::Expr
&& self.may_recover() && self.may_recover()
&& self.token == token::Colon && self.token == token::Colon
@ -291,7 +291,7 @@ pub(super) fn parse_path_segment(
Ok( Ok(
if style == PathStyle::Type && check_args_start(self) if style == PathStyle::Type && check_args_start(self)
|| style != PathStyle::Mod || style != PathStyle::Mod
&& self.check(&token::ModSep) && self.check(&token::PathSep)
&& self.look_ahead(1, |t| is_args_start(t)) && self.look_ahead(1, |t| is_args_start(t))
{ {
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
@ -303,7 +303,7 @@ pub(super) fn parse_path_segment(
} }
// Generic arguments are found - `<`, `(`, `::<` or `::(`. // Generic arguments are found - `<`, `(`, `::<` or `::(`.
self.eat(&token::ModSep); self.eat(&token::PathSep);
let lo = self.token.span; let lo = self.token.span;
let args = if self.eat_lt() { let args = if self.eat_lt() {
// `<'a, T, A = U>` // `<'a, T, A = U>`
@ -379,7 +379,7 @@ pub(super) fn parse_path_segment(
let token_before_parsing = self.token.clone(); let token_before_parsing = self.token.clone();
let mut snapshot = None; let mut snapshot = None;
if self.may_recover() if self.may_recover()
&& prev_token_before_parsing.kind == token::ModSep && prev_token_before_parsing.kind == token::PathSep
&& (style == PathStyle::Expr && self.token.can_begin_expr() && (style == PathStyle::Expr && self.token.can_begin_expr()
|| style == PathStyle::Pat && self.token.can_begin_pattern()) || style == PathStyle::Pat && self.token.can_begin_pattern())
{ {
@ -388,7 +388,7 @@ pub(super) fn parse_path_segment(
let (inputs, _) = match self.parse_paren_comma_seq(|p| p.parse_ty()) { let (inputs, _) = match self.parse_paren_comma_seq(|p| p.parse_ty()) {
Ok(output) => output, Ok(output) => output,
Err(mut error) if prev_token_before_parsing.kind == token::ModSep => { Err(mut error) if prev_token_before_parsing.kind == token::PathSep => {
error.span_label( error.span_label(
prev_token_before_parsing.span.to(token_before_parsing.span), prev_token_before_parsing.span.to(token_before_parsing.span),
"while parsing this parenthesized list of type arguments starting here", "while parsing this parenthesized list of type arguments starting here",
@ -470,7 +470,7 @@ fn recover_fn_call_leading_path_sep(
} }
} }
if let token::ModSep | token::RArrow = self.token.kind { if let token::PathSep | token::RArrow = self.token.kind {
return; return;
} }

View File

@ -82,7 +82,7 @@ enum AllowCVariadic {
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
/// that `IDENT` is not the ident of a fn trait. /// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool { fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) t == &token::PathSep || t == &token::Lt || t == &token::BinOp(token::Shl)
} }
fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool { fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {

View File

@ -88,7 +88,7 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
if !prev_is_dollar if !prev_is_dollar
&& let Some(span) = is_crate_keyword(curr) && let Some(span) = is_crate_keyword(curr)
&& let Some(next) = cursor.look_ahead(0) && let Some(next) = cursor.look_ahead(0)
&& is_token(next, &TokenKind::ModSep) && is_token(next, &TokenKind::PathSep)
{ {
return Some(span); return Some(span);
} }

View File

@ -1091,7 +1091,7 @@ fn next_space(tok: &TokenKind) -> SpaceState {
| TokenKind::DotDotEq | TokenKind::DotDotEq
| TokenKind::Question => SpaceState::Punctuation, | TokenKind::Question => SpaceState::Punctuation,
TokenKind::ModSep TokenKind::PathSep
| TokenKind::Pound | TokenKind::Pound
| TokenKind::Dollar | TokenKind::Dollar
| TokenKind::OpenDelim(_) | TokenKind::OpenDelim(_)