From 8739668438a40712a0bc617bc587d415c8cb42f0 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Fri, 10 May 2019 03:00:51 +0300 Subject: [PATCH] Simplify conversions between tokens and semantic literals --- src/librustc/hir/print.rs | 56 +---- src/librustdoc/clean/cfg.rs | 24 +- src/libsyntax/attr/mod.rs | 104 +++++---- src/libsyntax/ext/build.rs | 11 +- src/libsyntax/parse/mod.rs | 208 +++++++++++------- src/libsyntax/parse/parser.rs | 159 ++++--------- src/libsyntax/parse/token.rs | 7 + src/libsyntax/print/pprust.rs | 2 +- .../ui/malformed/malformed-interpolated.rs | 5 +- .../malformed/malformed-interpolated.stderr | 9 +- 10 files changed, 260 insertions(+), 325 deletions(-) diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 6d48ad94a66..475bf8d8372 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -5,7 +5,7 @@ use syntax::parse::lexer::comments; use syntax::print::pp::{self, Breaks}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; -use syntax::print::pprust::PrintState; +use syntax::print::pprust::{self, PrintState}; use syntax::ptr::P; use syntax::symbol::keywords; use syntax::util::parser::{self, AssocOp, Fixity}; @@ -15,7 +15,6 @@ use crate::hir::{PatKind, GenericBound, TraitBoundModifier, RangeEnd}; use crate::hir::{GenericParam, GenericParamKind, GenericArg}; -use std::ascii; use std::borrow::Cow; use std::cell::Cell; use std::io::{self, Write, Read}; @@ -1251,57 +1250,8 @@ fn print_expr_addr_of(&mut self, fn print_literal(&mut self, lit: &hir::Lit) -> io::Result<()> { self.maybe_print_comment(lit.span.lo())?; - match lit.node { - hir::LitKind::Str(st, style) => self.print_string(&st.as_str(), style), - hir::LitKind::Err(st) => { - let st = st.as_str().escape_debug().to_string(); - let mut res = String::with_capacity(st.len() + 2); - res.push('\''); - res.push_str(&st); - res.push('\''); - self.writer().word(res) - } - hir::LitKind::Byte(byte) => { - let mut res = String::from("b'"); - res.extend(ascii::escape_default(byte).map(|c| c as char)); - res.push('\''); - self.writer().word(res) - } - hir::LitKind::Char(ch) => { - let mut res = String::from("'"); - res.extend(ch.escape_default()); - res.push('\''); - self.writer().word(res) - } - hir::LitKind::Int(i, t) => { - match t { - ast::LitIntType::Signed(st) => { - self.writer().word(st.val_to_string(i as i128)) - } - ast::LitIntType::Unsigned(ut) => { - self.writer().word(ut.val_to_string(i)) - } - ast::LitIntType::Unsuffixed => { - self.writer().word(i.to_string()) - } - } - } - hir::LitKind::Float(ref f, t) => { - self.writer().word(format!("{}{}", &f, t.ty_to_string())) - } - hir::LitKind::FloatUnsuffixed(ref f) => self.writer().word(f.as_str().to_string()), - hir::LitKind::Bool(val) => { - if val { self.writer().word("true") } else { self.writer().word("false") } - } - hir::LitKind::ByteStr(ref v) => { - let mut escaped: String = String::new(); - for &ch in v.iter() { - escaped.extend(ascii::escape_default(ch) - .map(|c| c as char)); - } - self.writer().word(format!("b\"{}\"", escaped)) - } - } + let (token, suffix) = lit.node.to_lit_token(); + self.writer().word(pprust::literal_to_string(token, suffix)) } pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> { diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index 257f02af4cd..51fe26b3743 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -591,13 +591,10 @@ fn test_parse_ok() { let mi = dummy_meta_item_word("all"); assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all"))); - let node = LitKind::Str(Symbol::intern("done"), StrStyle::Cooked); - let (token, suffix) = node.lit_token(); - let mi = MetaItem { - path: Path::from_ident(Ident::from_str("all")), - node: MetaItemKind::NameValue(Lit { node, token, suffix, span: DUMMY_SP }), - span: DUMMY_SP, - }; + let mi = attr::mk_name_value_item_str( + Ident::from_str("all"), + dummy_spanned(Symbol::intern("done")) + ); assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done"))); let mi = dummy_meta_item_list!(all, [a, b]); @@ -625,13 +622,12 @@ fn test_parse_ok() { #[test] fn test_parse_err() { with_globals(|| { - let node = LitKind::Bool(false); - let (token, suffix) = node.lit_token(); - let mi = MetaItem { - path: Path::from_ident(Ident::from_str("foo")), - node: MetaItemKind::NameValue(Lit { node, token, suffix, span: DUMMY_SP }), - span: DUMMY_SP, - }; + let mi = attr::mk_name_value_item( + DUMMY_SP, + Ident::from_str("foo"), + LitKind::Bool(false), + DUMMY_SP, + ); assert!(Cfg::parse(&mi).is_err()); let mi = dummy_meta_item_list!(not, [a, b]); diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index e23c1826651..c122e1994e7 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -14,7 +14,7 @@ use crate::ast; use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment}; use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem}; -use crate::ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam}; +use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam}; use crate::mut_visit::visit_clobber; use crate::source_map::{BytePos, Spanned, dummy_spanned}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; @@ -27,9 +27,11 @@ use crate::tokenstream::{TokenStream, TokenTree, DelimSpan}; use crate::GLOBALS; +use errors::Handler; use log::debug; use syntax_pos::{FileName, Span}; +use std::ascii; use std::iter; use std::ops::DerefMut; @@ -350,14 +352,13 @@ pub fn with_desugared_doc(&self, f: F) -> T where /* Constructors */ pub fn mk_name_value_item_str(ident: Ident, value: Spanned) -> MetaItem { - let node = LitKind::Str(value.node, ast::StrStyle::Cooked); - let (token, suffix) = node.lit_token(); - let value = Lit { node, token, suffix, span: value.span }; - mk_name_value_item(ident.span.to(value.span), ident, value) + let lit_kind = LitKind::Str(value.node, ast::StrStyle::Cooked); + mk_name_value_item(ident.span.to(value.span), ident, lit_kind, value.span) } -pub fn mk_name_value_item(span: Span, ident: Ident, value: Lit) -> MetaItem { - MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(value) } +pub fn mk_name_value_item(span: Span, ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem { + let lit = Lit::from_lit_kind(lit_kind, lit_span); + MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(lit) } } pub fn mk_list_item(span: Span, ident: Ident, items: Vec) -> MetaItem { @@ -419,9 +420,8 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute { let style = doc_comment_style(&text.as_str()); - let node = LitKind::Str(text, ast::StrStyle::Cooked); - let (token, suffix) = node.lit_token(); - let lit = Lit { node, token, suffix, span }; + let lit_kind = LitKind::Str(text, ast::StrStyle::Cooked); + let lit = Lit::from_lit_kind(lit_kind, span); Attribute { id, style, @@ -565,9 +565,7 @@ fn from_tokens(tokens: &mut iter::Peekable) -> Option Some(TokenTree::Token(_, token::Eq)) => { tokens.next(); return if let Some(TokenTree::Token(span, token)) = tokens.next() { - LitKind::from_token(token).map(|(node, token, suffix)| { - MetaItemKind::NameValue(Lit { node, token, suffix, span }) - }) + Lit::from_token(&token, span, None).map(MetaItemKind::NameValue) } else { None }; @@ -612,9 +610,9 @@ fn from_tokens(tokens: &mut iter::Peekable) -> Option where I: Iterator, { if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() { - if let Some((node, token, suffix)) = LitKind::from_token(token) { + if let Some(lit) = Lit::from_token(&token, span, None) { tokens.next(); - return Some(NestedMetaItem::Literal(Lit { node, token, suffix, span })); + return Some(NestedMetaItem::Literal(lit)); } } @@ -624,21 +622,19 @@ fn from_tokens(tokens: &mut iter::Peekable) -> Option impl Lit { crate fn tokens(&self) -> TokenStream { - TokenTree::Token(self.span, self.node.token()).into() + let token = match self.token { + token::Bool(symbol) => Token::Ident(Ident::with_empty_ctxt(symbol), false), + token => Token::Literal(token, self.suffix), + }; + TokenTree::Token(self.span, token).into() } } impl LitKind { - fn token(&self) -> Token { - match self.lit_token() { - (token::Bool(symbol), _) => Token::Ident(Ident::with_empty_ctxt(symbol), false), - (lit, suffix) => Token::Literal(lit, suffix), - } - } - - pub fn lit_token(&self) -> (token::Lit, Option) { - use std::ascii; - + /// Attempts to recover a token from semantic literal. + /// This function is used when the original token doesn't exist (e.g. the literal is created + /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing). + pub fn to_lit_token(&self) -> (token::Lit, Option) { match *self { LitKind::Str(string, ast::StrStyle::Cooked) => { let escaped = string.as_str().escape_default().to_string(); @@ -679,29 +675,45 @@ pub fn lit_token(&self) -> (token::Lit, Option) { LitKind::Err(val) => (token::Lit::Err(val), None), } } +} - fn from_token(token: Token) -> Option<(LitKind, token::Lit, Option)> { - match token { - Token::Ident(ident, false) if ident.name == keywords::True.name() => - Some((LitKind::Bool(true), token::Bool(ident.name), None)), - Token::Ident(ident, false) if ident.name == keywords::False.name() => - Some((LitKind::Bool(false), token::Bool(ident.name), None)), - Token::Interpolated(nt) => match *nt { - token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { - ExprKind::Lit(ref lit) => Some((lit.node.clone(), lit.token, lit.suffix)), - _ => None, - }, - _ => None, - }, - Token::Literal(lit, suf) => { - let (suffix_illegal, result) = parse::lit_token(lit, suf, None); - if result.is_none() || suffix_illegal && suf.is_some() { - return None; +impl Lit { + /// Converts literal token with a suffix into an AST literal. + /// Works speculatively and may return `None` is diagnostic handler is not passed. + /// If diagnostic handler is passed, may return `Some`, + /// possibly after reporting non-fatal errors and recovery, or `None` for irrecoverable errors. + crate fn from_token( + token: &token::Token, + span: Span, + diag: Option<(Span, &Handler)>, + ) -> Option { + let (token, suffix) = match *token { + token::Ident(ident, false) if ident.name == keywords::True.name() || + ident.name == keywords::False.name() => + (token::Bool(ident.name), None), + token::Literal(token, suffix) => + (token, suffix), + token::Interpolated(ref nt) => { + if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt { + if let ast::ExprKind::Lit(lit) = &expr.node { + return Some(lit.clone()); + } } - Some((result.unwrap(), lit, suf)) + return None; } - _ => None, - } + _ => return None, + }; + + let node = LitKind::from_lit_token(token, suffix, diag)?; + Some(Lit { node, token, suffix, span }) + } + + /// Attempts to recover an AST literal from semantic literal. + /// This function is used when the original token doesn't exist (e.g. the literal is created + /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing). + pub fn from_lit_kind(node: LitKind, span: Span) -> Lit { + let (token, suffix) = node.to_lit_token(); + Lit { node, token, suffix, span } } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 0f9977e8503..d24106f697e 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -697,9 +697,9 @@ fn expr_struct_ident(&self, span: Span, self.expr_struct(span, self.path_ident(span, id), fields) } - fn expr_lit(&self, span: Span, node: ast::LitKind) -> P { - let (token, suffix) = node.lit_token(); - self.expr(span, ast::ExprKind::Lit(ast::Lit { node, token, suffix, span })) + fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P { + let lit = ast::Lit::from_lit_kind(lit_kind, span); + self.expr(span, ast::ExprKind::Lit(lit)) } fn expr_usize(&self, span: Span, i: usize) -> P { self.expr_lit(span, ast::LitKind::Int(i as u128, @@ -1165,11 +1165,10 @@ fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec) attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis) } - fn meta_name_value(&self, span: Span, name: ast::Name, node: ast::LitKind) + fn meta_name_value(&self, span: Span, name: ast::Name, lit_kind: ast::LitKind) -> ast::MetaItem { - let (token, suffix) = node.lit_token(); attr::mk_name_value_item(span, Ident::with_empty_ctxt(name).with_span_pos(span), - ast::Lit { node, token, suffix, span }) + lit_kind, span) } fn item_use(&self, sp: Span, diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 4d4e99009a9..868b344c065 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1,11 +1,11 @@ //! The main parser interface. -use crate::ast::{self, CrateConfig, NodeId}; +use crate::ast::{self, CrateConfig, LitKind, NodeId}; use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use crate::source_map::{SourceMap, FilePathMapping}; use crate::feature_gate::UnstableFeatures; use crate::parse::parser::Parser; -use crate::symbol::Symbol; +use crate::symbol::{keywords, Symbol}; use crate::syntax::parse::parser::emit_unclosed_delims; use crate::tokenstream::{TokenStream, TokenTree}; use crate::diagnostics::plugin::ErrorMap; @@ -371,97 +371,151 @@ macro_rules! err { } } -crate fn lit_token(lit: token::Lit, suf: Option, diag: Option<(Span, &Handler)>) - -> (bool /* suffix illegal? */, Option) { - use ast::LitKind; - - match lit { - token::Bool(_) => panic!("literal token contains `Lit::Bool`"), - token::Byte(i) => { - let lit_kind = match unescape_byte(&i.as_str()) { - Ok(c) => LitKind::Byte(c), - Err(_) => LitKind::Err(i), +crate fn expect_no_suffix(sp: Span, diag: &Handler, kind: &str, suffix: Option) { + match suffix { + None => {/* everything ok */} + Some(suf) => { + let text = suf.as_str(); + if text.is_empty() { + diag.span_bug(sp, "found empty literal suffix in Some") + } + let mut err = if kind == "a tuple index" && + ["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str()) + { + // #59553: warn instead of reject out of hand to allow the fix to percolate + // through the ecosystem when people fix their macros + let mut err = diag.struct_span_warn( + sp, + &format!("suffixes on {} are invalid", kind), + ); + err.note(&format!( + "`{}` is *temporarily* accepted on tuple index fields as it was \ + incorrectly accepted on stable for a few releases", + text, + )); + err.help( + "on proc macros, you'll want to use `syn::Index::from` or \ + `proc_macro::Literal::*_unsuffixed` for code that will desugar \ + to tuple field access", + ); + err.note( + "for more context, see https://github.com/rust-lang/rust/issues/60210", + ); + err + } else { + diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind)) }; - (true, Some(lit_kind)) - }, - token::Char(i) => { - let lit_kind = match unescape_char(&i.as_str()) { - Ok(c) => LitKind::Char(c), - Err(_) => LitKind::Err(i), - }; - (true, Some(lit_kind)) - }, - token::Err(i) => (true, Some(LitKind::Err(i))), + err.span_label(sp, format!("invalid suffix `{}`", text)); + err.emit(); + } + } +} - // There are some valid suffixes for integer and float literals, - // so all the handling is done internally. - token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)), - token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)), +impl LitKind { + /// Converts literal token with a suffix into a semantic literal. + /// Works speculatively and may return `None` is diagnostic handler is not passed. + /// If diagnostic handler is passed, always returns `Some`, + /// possibly after reporting non-fatal errors and recovery. + crate fn from_lit_token( + lit: token::Lit, + suf: Option, + diag: Option<(Span, &Handler)> + ) -> Option { + if suf.is_some() && !lit.may_have_suffix() { + err!(diag, |span, diag| { + expect_no_suffix(span, diag, &format!("a {}", lit.literal_name()), suf) + }); + } - token::Str_(mut sym) => { - // If there are no characters requiring special treatment we can - // reuse the symbol from the Token. Otherwise, we must generate a - // new symbol because the string in the LitKind is different to the - // string in the Token. - let mut has_error = false; - let s = &sym.as_str(); - if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') { - let mut buf = String::with_capacity(s.len()); - unescape_str(s, &mut |_, unescaped_char| { - match unescaped_char { + Some(match lit { + token::Bool(i) => { + assert!(i == keywords::True.name() || i == keywords::False.name()); + LitKind::Bool(i == keywords::True.name()) + } + token::Byte(i) => { + match unescape_byte(&i.as_str()) { + Ok(c) => LitKind::Byte(c), + Err(_) => LitKind::Err(i), + } + }, + token::Char(i) => { + match unescape_char(&i.as_str()) { + Ok(c) => LitKind::Char(c), + Err(_) => LitKind::Err(i), + } + }, + token::Err(i) => LitKind::Err(i), + + // There are some valid suffixes for integer and float literals, + // so all the handling is done internally. + token::Integer(s) => return integer_lit(&s.as_str(), suf, diag), + token::Float(s) => return float_lit(&s.as_str(), suf, diag), + + token::Str_(mut sym) => { + // If there are no characters requiring special treatment we can + // reuse the symbol from the Token. Otherwise, we must generate a + // new symbol because the string in the LitKind is different to the + // string in the Token. + let mut has_error = false; + let s = &sym.as_str(); + if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') { + let mut buf = String::with_capacity(s.len()); + unescape_str(s, &mut |_, unescaped_char| { + match unescaped_char { + Ok(c) => buf.push(c), + Err(_) => has_error = true, + } + }); + if has_error { + return Some(LitKind::Err(sym)); + } + sym = Symbol::intern(&buf) + } + + LitKind::Str(sym, ast::StrStyle::Cooked) + } + token::StrRaw(mut sym, n) => { + // Ditto. + let s = &sym.as_str(); + if s.contains('\r') { + sym = Symbol::intern(&raw_str_lit(s)); + } + LitKind::Str(sym, ast::StrStyle::Raw(n)) + } + token::ByteStr(i) => { + let s = &i.as_str(); + let mut buf = Vec::with_capacity(s.len()); + let mut has_error = false; + unescape_byte_str(s, &mut |_, unescaped_byte| { + match unescaped_byte { Ok(c) => buf.push(c), Err(_) => has_error = true, } }); if has_error { - return (true, Some(LitKind::Err(sym))); + return Some(LitKind::Err(i)); } - sym = Symbol::intern(&buf) + buf.shrink_to_fit(); + LitKind::ByteStr(Lrc::new(buf)) } - - (true, Some(LitKind::Str(sym, ast::StrStyle::Cooked))) - } - token::StrRaw(mut sym, n) => { - // Ditto. - let s = &sym.as_str(); - if s.contains('\r') { - sym = Symbol::intern(&raw_str_lit(s)); + token::ByteStrRaw(i, _) => { + LitKind::ByteStr(Lrc::new(i.to_string().into_bytes())) } - (true, Some(LitKind::Str(sym, ast::StrStyle::Raw(n)))) - } - token::ByteStr(i) => { - let s = &i.as_str(); - let mut buf = Vec::with_capacity(s.len()); - let mut has_error = false; - unescape_byte_str(s, &mut |_, unescaped_byte| { - match unescaped_byte { - Ok(c) => buf.push(c), - Err(_) => has_error = true, - } - }); - if has_error { - return (true, Some(LitKind::Err(i))); - } - buf.shrink_to_fit(); - (true, Some(LitKind::ByteStr(Lrc::new(buf)))) - } - token::ByteStrRaw(i, _) => { - (true, Some(LitKind::ByteStr(Lrc::new(i.to_string().into_bytes())))) - } + }) } } fn filtered_float_lit(data: Symbol, suffix: Option, diag: Option<(Span, &Handler)>) - -> Option { + -> Option { debug!("filtered_float_lit: {}, {:?}", data, suffix); let suffix = match suffix { Some(suffix) => suffix, - None => return Some(ast::LitKind::FloatUnsuffixed(data)), + None => return Some(LitKind::FloatUnsuffixed(data)), }; Some(match &*suffix.as_str() { - "f32" => ast::LitKind::Float(data, ast::FloatTy::F32), - "f64" => ast::LitKind::Float(data, ast::FloatTy::F64), + "f32" => LitKind::Float(data, ast::FloatTy::F32), + "f64" => LitKind::Float(data, ast::FloatTy::F64), suf => { err!(diag, |span, diag| { if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { @@ -477,12 +531,12 @@ fn filtered_float_lit(data: Symbol, suffix: Option, diag: Option<(Span, } }); - ast::LitKind::FloatUnsuffixed(data) + LitKind::FloatUnsuffixed(data) } }) } fn float_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) - -> Option { + -> Option { debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is deferred until trans @@ -499,7 +553,7 @@ fn float_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) } fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) - -> Option { + -> Option { // s can only be ascii, byte indexing is fine // Strip underscores without allocating a new String unless necessary. @@ -595,7 +649,7 @@ fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix); Some(match u128::from_str_radix(s, base) { - Ok(r) => ast::LitKind::Int(r, ty), + Ok(r) => LitKind::Int(r, ty), Err(_) => { // small bases are lexed as if they were base 10, e.g, the string // might be `0b10201`. This will cause the conversion above to fail, @@ -608,7 +662,7 @@ fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) if !already_errored { err!(diag, |span, diag| diag.span_err(span, "int literal is too large")); } - ast::LitKind::Int(0, ty) + LitKind::Int(0, ty) } }) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 8c0c2f4b6e2..b81f7be9c2c 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -15,7 +15,7 @@ use crate::ast::{GenericParam, GenericParamKind}; use crate::ast::GenericArg; use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind}; -use crate::ast::{Label, Lifetime, Lit, LitKind}; +use crate::ast::{Label, Lifetime, Lit}; use crate::ast::{Local, LocalSource}; use crate::ast::MacStmtStyle; use crate::ast::{Mac, Mac_, MacDelimiter}; @@ -46,7 +46,7 @@ use crate::parse::PResult; use crate::ThinVec; use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; -use crate::symbol::{Symbol, keywords}; +use crate::symbol::{keywords, Symbol}; use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError}; use rustc_target::spec::abi::{self, Abi}; @@ -1109,43 +1109,7 @@ fn expect_or(&mut self) -> PResult<'a, ()> { } fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option) { - match suffix { - None => {/* everything ok */} - Some(suf) => { - let text = suf.as_str(); - if text.is_empty() { - self.span_bug(sp, "found empty literal suffix in Some") - } - let mut err = if kind == "a tuple index" && - ["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str()) - { - // #59553: warn instead of reject out of hand to allow the fix to percolate - // through the ecosystem when people fix their macros - let mut err = self.struct_span_warn( - sp, - &format!("suffixes on {} are invalid", kind), - ); - err.note(&format!( - "`{}` is *temporarily* accepted on tuple index fields as it was \ - incorrectly accepted on stable for a few releases", - text, - )); - err.help( - "on proc macros, you'll want to use `syn::Index::from` or \ - `proc_macro::Literal::*_unsuffixed` for code that will desugar \ - to tuple field access", - ); - err.note( - "for more context, see https://github.com/rust-lang/rust/issues/60210", - ); - err - } else { - self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind)) - }; - err.span_label(sp, format!("invalid suffix `{}`", text)); - err.emit(); - } - } + parse::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix) } /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single @@ -1452,9 +1416,6 @@ fn span_err>(&self, sp: S, m: &str) { crate fn struct_span_err>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_err(sp, m) } - fn struct_span_warn>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { - self.sess.span_diagnostic.struct_span_warn(sp, m) - } crate fn span_bug>(&self, sp: S, m: &str) -> ! { self.sess.span_diagnostic.span_bug(sp, m) } @@ -2069,85 +2030,45 @@ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option } } - /// Matches `token_lit = LIT_INTEGER | ...`. - fn parse_lit_token(&mut self) -> PResult<'a, (LitKind, token::Lit, Option)> { - let out = match self.token { - token::Interpolated(ref nt) => match **nt { - token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { - ExprKind::Lit(ref lit) => { (lit.node.clone(), lit.token, lit.suffix) } - _ => { return self.unexpected_last(&self.token); } - }, - _ => { return self.unexpected_last(&self.token); } - }, - token::Literal(lit, suf) => { - let diag = Some((self.span, &self.sess.span_diagnostic)); - let (suffix_illegal, result) = parse::lit_token(lit, suf, diag); - - if suffix_illegal { - let sp = self.span; - self.expect_no_suffix(sp, &format!("a {}", lit.literal_name()), suf) - } - - (result.unwrap(), lit, suf) - } - token::Dot if self.look_ahead(1, |t| match t { - token::Literal(token::Lit::Integer(_) , _) => true, - _ => false, - }) => { // recover from `let x = .4;` - let lo = self.span; - self.bump(); - if let token::Literal( - token::Lit::Integer(val), - suffix, - ) = self.token { - let float_suffix = suffix.and_then(|s| { - let s = s.as_str(); - if s == "f32" { - Some("f32") - } else if s == "f64" { - Some("f64") - } else { - None - } - }).unwrap_or(""); - self.bump(); - let sp = lo.to(self.prev_span); - let mut err = self.diagnostic() - .struct_span_err(sp, "float literals must have an integer part"); - err.span_suggestion( - sp, - "must have an integer part", - format!("0.{}{}", val, float_suffix), - Applicability::MachineApplicable, - ); - err.emit(); - return Ok((match float_suffix { - "f32" => ast::LitKind::Float(val, ast::FloatTy::F32), - "f64" => ast::LitKind::Float(val, ast::FloatTy::F64), - _ => ast::LitKind::FloatUnsuffixed(val), - }, token::Float(val), suffix)); - } else { - unreachable!(); - }; - } - _ => { return self.unexpected_last(&self.token); } - }; - - self.bump(); - Ok(out) - } - /// Matches `lit = true | false | token_lit`. crate fn parse_lit(&mut self) -> PResult<'a, Lit> { - let lo = self.span; - let (node, token, suffix) = if self.eat_keyword(keywords::True) { - (LitKind::Bool(true), token::Bool(keywords::True.name()), None) - } else if self.eat_keyword(keywords::False) { - (LitKind::Bool(false), token::Bool(keywords::False.name()), None) - } else { - self.parse_lit_token()? - }; - Ok(Lit { node, token, suffix, span: lo.to(self.prev_span) }) + let diag = Some((self.span, &self.sess.span_diagnostic)); + if let Some(lit) = Lit::from_token(&self.token, self.span, diag) { + self.bump(); + return Ok(lit); + } else if self.token == token::Dot { + // Recover `.4` as `0.4`. + let recovered = self.look_ahead(1, |t| { + if let token::Literal(token::Integer(val), suf) = *t { + let next_span = self.look_ahead_span(1); + if self.span.hi() == next_span.lo() { + let sym = String::from("0.") + &val.as_str(); + let token = token::Literal(token::Float(Symbol::intern(&sym)), suf); + return Some((token, self.span.to(next_span))); + } + } + None + }); + if let Some((token, span)) = recovered { + self.diagnostic() + .struct_span_err(span, "float literals must have an integer part") + .span_suggestion( + span, + "must have an integer part", + pprust::token_to_string(&token), + Applicability::MachineApplicable, + ) + .emit(); + let diag = Some((span, &self.sess.span_diagnostic)); + if let Some(lit) = Lit::from_token(&token, span, diag) { + self.bump(); + self.bump(); + return Ok(lit); + } + } + } + + self.unexpected_last(&self.token) } /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 48a949257ff..0c2ea70aa20 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -90,6 +90,13 @@ impl Lit { } } + crate fn may_have_suffix(&self) -> bool { + match *self { + Integer(..) | Float(..) => true, + _ => false, + } + } + // See comments in `Nonterminal::to_tokenstream` for why we care about // *probably* equal here rather than actual equality fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index fa16a2b200f..0e8ac6c35b9 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -162,7 +162,7 @@ fn binop_to_string(op: BinOpToken) -> &'static str { } } -fn literal_to_string(lit: token::Lit, suffix: Option) -> String { +pub fn literal_to_string(lit: token::Lit, suffix: Option) -> String { let mut out = match lit { token::Byte(b) => format!("b'{}'", b), token::Char(c) => format!("'{}'", c), diff --git a/src/test/ui/malformed/malformed-interpolated.rs b/src/test/ui/malformed/malformed-interpolated.rs index e452435968b..7c4ca3c017e 100644 --- a/src/test/ui/malformed/malformed-interpolated.rs +++ b/src/test/ui/malformed/malformed-interpolated.rs @@ -2,8 +2,7 @@ macro_rules! check { ($expr: expr) => ( - #[my_attr = $expr] //~ ERROR suffixed literals are not allowed in attributes - //~| ERROR unexpected token: `-0` + #[my_attr = $expr] //~ ERROR unexpected token: `-0` //~| ERROR unexpected token: `0 + 0` use main as _; ); @@ -11,7 +10,7 @@ macro_rules! check { check!("0"); // OK check!(0); // OK -check!(0u8); // ERROR, see above +check!(0u8); //~ ERROR suffixed literals are not allowed in attributes check!(-0); // ERROR, see above check!(0 + 0); // ERROR, see above diff --git a/src/test/ui/malformed/malformed-interpolated.stderr b/src/test/ui/malformed/malformed-interpolated.stderr index efeede0148d..bc2146e409d 100644 --- a/src/test/ui/malformed/malformed-interpolated.stderr +++ b/src/test/ui/malformed/malformed-interpolated.stderr @@ -1,11 +1,8 @@ error: suffixed literals are not allowed in attributes - --> $DIR/malformed-interpolated.rs:5:21 + --> $DIR/malformed-interpolated.rs:13:8 | -LL | #[my_attr = $expr] - | ^^^^^ -... -LL | check!(0u8); // ERROR, see above - | ------------ in this macro invocation +LL | check!(0u8); + | ^^^ | = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).