Auto merge of #60679 - petrochenkov:lit2, r=matklad
Keep original literal tokens in AST The original literal tokens (`token::Lit`) are kept in AST until lowering to HIR. The tokens are kept together with their lowered "semantic" representation (`ast::LitKind`), so the size of `ast::Lit` is increased (this also increases the size of meta-item structs used for processing built-in attributes). However, the size of `ast::Expr` stays the same. The intent is to remove the "semantic" representation from AST eventually and keep literals as tokens until lowering to HIR (at least), and I'm going to work on that, but it would be good to land this sooner to unblock progress on the [lexer refactoring](https://github.com/rust-lang/rust/pull/59706). Fixes a part of https://github.com/rust-lang/rust/issues/43081 (literal tokens that are passed to proc macros are always precise, including hexadecimal numbers, strings with their original escaping, etc) Fixes a part of https://github.com/rust-lang/rust/issues/60495 (everything except for proc macro API doesn't need escaping anymore) This also allows to eliminate a certain hack from the lexer (https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/pretty-printing.20comments/near/165005357). cc @matklad
This commit is contained in:
commit
1764b29725
@ -4100,7 +4100,7 @@ fn lower_expr(&mut self, e: &Expr) -> hir::Expr {
|
|||||||
let ohs = P(self.lower_expr(ohs));
|
let ohs = P(self.lower_expr(ohs));
|
||||||
hir::ExprKind::Unary(op, ohs)
|
hir::ExprKind::Unary(op, ohs)
|
||||||
}
|
}
|
||||||
ExprKind::Lit(ref l) => hir::ExprKind::Lit((*l).clone()),
|
ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.node.clone())),
|
||||||
ExprKind::Cast(ref expr, ref ty) => {
|
ExprKind::Cast(ref expr, ref ty) => {
|
||||||
let expr = P(self.lower_expr(expr));
|
let expr = P(self.lower_expr(expr));
|
||||||
hir::ExprKind::Cast(expr, self.lower_ty(ty, ImplTraitContext::disallowed()))
|
hir::ExprKind::Cast(expr, self.lower_ty(ty, ImplTraitContext::disallowed()))
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
use syntax::source_map::Spanned;
|
use syntax::source_map::Spanned;
|
||||||
use rustc_target::spec::abi::Abi;
|
use rustc_target::spec::abi::Abi;
|
||||||
use syntax::ast::{self, CrateSugar, Ident, Name, NodeId, AsmDialect};
|
use syntax::ast::{self, CrateSugar, Ident, Name, NodeId, AsmDialect};
|
||||||
use syntax::ast::{Attribute, Label, Lit, StrStyle, FloatTy, IntTy, UintTy};
|
use syntax::ast::{Attribute, Label, LitKind, StrStyle, FloatTy, IntTy, UintTy};
|
||||||
use syntax::attr::{InlineAttr, OptimizeAttr};
|
use syntax::attr::{InlineAttr, OptimizeAttr};
|
||||||
use syntax::ext::hygiene::SyntaxContext;
|
use syntax::ext::hygiene::SyntaxContext;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
@ -1331,6 +1331,9 @@ pub fn is_fn_or_closure(self) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A literal.
|
||||||
|
pub type Lit = Spanned<LitKind>;
|
||||||
|
|
||||||
/// A constant (expression) that's not an item or associated item,
|
/// A constant (expression) that's not an item or associated item,
|
||||||
/// but needs its own `DefId` for type-checking, const-eval, etc.
|
/// but needs its own `DefId` for type-checking, const-eval, etc.
|
||||||
/// These are usually found nested inside types (e.g., array lengths)
|
/// These are usually found nested inside types (e.g., array lengths)
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
use syntax::parse::lexer::comments;
|
use syntax::parse::lexer::comments;
|
||||||
use syntax::print::pp::{self, Breaks};
|
use syntax::print::pp::{self, Breaks};
|
||||||
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
|
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
|
||||||
use syntax::print::pprust::PrintState;
|
use syntax::print::pprust::{self, PrintState};
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::keywords;
|
use syntax::symbol::keywords;
|
||||||
use syntax::util::parser::{self, AssocOp, Fixity};
|
use syntax::util::parser::{self, AssocOp, Fixity};
|
||||||
@ -18,7 +18,6 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::io::{self, Write, Read};
|
use std::io::{self, Write, Read};
|
||||||
use std::iter::Peekable;
|
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
|
||||||
pub enum AnnNode<'a> {
|
pub enum AnnNode<'a> {
|
||||||
@ -76,7 +75,6 @@ pub struct State<'a> {
|
|||||||
pub s: pp::Printer<'a>,
|
pub s: pp::Printer<'a>,
|
||||||
cm: Option<&'a SourceMap>,
|
cm: Option<&'a SourceMap>,
|
||||||
comments: Option<Vec<comments::Comment>>,
|
comments: Option<Vec<comments::Comment>>,
|
||||||
literals: Peekable<vec::IntoIter<comments::Literal>>,
|
|
||||||
cur_cmnt: usize,
|
cur_cmnt: usize,
|
||||||
boxes: Vec<pp::Breaks>,
|
boxes: Vec<pp::Breaks>,
|
||||||
ann: &'a (dyn PpAnn + 'a),
|
ann: &'a (dyn PpAnn + 'a),
|
||||||
@ -98,14 +96,6 @@ fn comments(&mut self) -> &mut Option<Vec<comments::Comment>> {
|
|||||||
fn cur_cmnt(&mut self) -> &mut usize {
|
fn cur_cmnt(&mut self) -> &mut usize {
|
||||||
&mut self.cur_cmnt
|
&mut self.cur_cmnt
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cur_lit(&mut self) -> Option<&comments::Literal> {
|
|
||||||
self.literals.peek()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bump_lit(&mut self) -> Option<comments::Literal> {
|
|
||||||
self.literals.next()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
@ -116,18 +106,16 @@ fn bump_lit(&mut self) -> Option<comments::Literal> {
|
|||||||
|
|
||||||
|
|
||||||
/// Requires you to pass an input filename and reader so that
|
/// Requires you to pass an input filename and reader so that
|
||||||
/// it can scan the input text for comments and literals to
|
/// it can scan the input text for comments to copy forward.
|
||||||
/// copy forward.
|
|
||||||
pub fn print_crate<'a>(cm: &'a SourceMap,
|
pub fn print_crate<'a>(cm: &'a SourceMap,
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
krate: &hir::Crate,
|
krate: &hir::Crate,
|
||||||
filename: FileName,
|
filename: FileName,
|
||||||
input: &mut dyn Read,
|
input: &mut dyn Read,
|
||||||
out: Box<dyn Write + 'a>,
|
out: Box<dyn Write + 'a>,
|
||||||
ann: &'a dyn PpAnn,
|
ann: &'a dyn PpAnn)
|
||||||
is_expanded: bool)
|
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded);
|
let mut s = State::new_from_input(cm, sess, filename, input, out, ann);
|
||||||
|
|
||||||
// When printing the AST, we sometimes need to inject `#[no_std]` here.
|
// When printing the AST, we sometimes need to inject `#[no_std]` here.
|
||||||
// Since you can't compile the HIR, it's not necessary.
|
// Since you can't compile the HIR, it's not necessary.
|
||||||
@ -143,36 +131,21 @@ pub fn new_from_input(cm: &'a SourceMap,
|
|||||||
filename: FileName,
|
filename: FileName,
|
||||||
input: &mut dyn Read,
|
input: &mut dyn Read,
|
||||||
out: Box<dyn Write + 'a>,
|
out: Box<dyn Write + 'a>,
|
||||||
ann: &'a dyn PpAnn,
|
ann: &'a dyn PpAnn)
|
||||||
is_expanded: bool)
|
|
||||||
-> State<'a> {
|
-> State<'a> {
|
||||||
let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input);
|
let comments = comments::gather_comments(sess, filename, input);
|
||||||
|
State::new(cm, out, ann, Some(comments))
|
||||||
State::new(cm,
|
|
||||||
out,
|
|
||||||
ann,
|
|
||||||
Some(cmnts),
|
|
||||||
// If the code is post expansion, don't use the table of
|
|
||||||
// literals, since it doesn't correspond with the literals
|
|
||||||
// in the AST anymore.
|
|
||||||
if is_expanded {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(lits)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(cm: &'a SourceMap,
|
pub fn new(cm: &'a SourceMap,
|
||||||
out: Box<dyn Write + 'a>,
|
out: Box<dyn Write + 'a>,
|
||||||
ann: &'a dyn PpAnn,
|
ann: &'a dyn PpAnn,
|
||||||
comments: Option<Vec<comments::Comment>>,
|
comments: Option<Vec<comments::Comment>>)
|
||||||
literals: Option<Vec<comments::Literal>>)
|
|
||||||
-> State<'a> {
|
-> State<'a> {
|
||||||
State {
|
State {
|
||||||
s: pp::mk_printer(out, default_columns),
|
s: pp::mk_printer(out, default_columns),
|
||||||
cm: Some(cm),
|
cm: Some(cm),
|
||||||
comments,
|
comments,
|
||||||
literals: literals.unwrap_or_default().into_iter().peekable(),
|
|
||||||
cur_cmnt: 0,
|
cur_cmnt: 0,
|
||||||
boxes: Vec::new(),
|
boxes: Vec::new(),
|
||||||
ann,
|
ann,
|
||||||
@ -189,7 +162,6 @@ pub fn to_string<F>(ann: &dyn PpAnn, f: F) -> String
|
|||||||
s: pp::mk_printer(Box::new(&mut wr), default_columns),
|
s: pp::mk_printer(Box::new(&mut wr), default_columns),
|
||||||
cm: None,
|
cm: None,
|
||||||
comments: None,
|
comments: None,
|
||||||
literals: vec![].into_iter().peekable(),
|
|
||||||
cur_cmnt: 0,
|
cur_cmnt: 0,
|
||||||
boxes: Vec::new(),
|
boxes: Vec::new(),
|
||||||
ann,
|
ann,
|
||||||
@ -1276,6 +1248,12 @@ fn print_expr_addr_of(&mut self,
|
|||||||
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
|
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_literal(&mut self, lit: &hir::Lit) -> io::Result<()> {
|
||||||
|
self.maybe_print_comment(lit.span.lo())?;
|
||||||
|
let (token, suffix) = lit.node.to_lit_token();
|
||||||
|
self.writer().word(pprust::literal_to_string(token, suffix))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
|
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
|
||||||
self.maybe_print_comment(expr.span.lo())?;
|
self.maybe_print_comment(expr.span.lo())?;
|
||||||
self.print_outer_attributes(&expr.attrs)?;
|
self.print_outer_attributes(&expr.attrs)?;
|
||||||
|
@ -162,7 +162,13 @@ fn hash_stable<W: StableHasherResult>(&self,
|
|||||||
Unsuffixed
|
Unsuffixed
|
||||||
});
|
});
|
||||||
|
|
||||||
impl_stable_hash_for_spanned!(::syntax::ast::LitKind);
|
impl_stable_hash_for!(struct ::syntax::ast::Lit {
|
||||||
|
node,
|
||||||
|
token,
|
||||||
|
suffix,
|
||||||
|
span
|
||||||
|
});
|
||||||
|
|
||||||
impl_stable_hash_for!(enum ::syntax::ast::LitKind {
|
impl_stable_hash_for!(enum ::syntax::ast::LitKind {
|
||||||
Str(value, style),
|
Str(value, style),
|
||||||
Err(value),
|
Err(value),
|
||||||
@ -175,6 +181,8 @@ fn hash_stable<W: StableHasherResult>(&self,
|
|||||||
Bool(value)
|
Bool(value)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
impl_stable_hash_for_spanned!(::syntax::ast::LitKind);
|
||||||
|
|
||||||
impl_stable_hash_for!(enum ::syntax::ast::IntTy { Isize, I8, I16, I32, I64, I128 });
|
impl_stable_hash_for!(enum ::syntax::ast::IntTy { Isize, I8, I16, I32, I64, I128 });
|
||||||
impl_stable_hash_for!(enum ::syntax::ast::UintTy { Usize, U8, U16, U32, U64, U128 });
|
impl_stable_hash_for!(enum ::syntax::ast::UintTy { Usize, U8, U16, U32, U64, U128 });
|
||||||
impl_stable_hash_for!(enum ::syntax::ast::FloatTy { F32, F64 });
|
impl_stable_hash_for!(enum ::syntax::ast::FloatTy { F32, F64 });
|
||||||
@ -280,6 +288,19 @@ fn hash_stable<W: StableHasherResult>(&self,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl_stable_hash_for!(enum token::Lit {
|
||||||
|
Bool(val),
|
||||||
|
Byte(val),
|
||||||
|
Char(val),
|
||||||
|
Err(val),
|
||||||
|
Integer(val),
|
||||||
|
Float(val),
|
||||||
|
Str_(val),
|
||||||
|
ByteStr(val),
|
||||||
|
StrRaw(val, n),
|
||||||
|
ByteStrRaw(val, n)
|
||||||
|
});
|
||||||
|
|
||||||
fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
||||||
token: &token::Token,
|
token: &token::Token,
|
||||||
hcx: &mut StableHashingContext<'a>,
|
hcx: &mut StableHashingContext<'a>,
|
||||||
@ -327,22 +348,8 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
|||||||
token::Token::CloseDelim(delim_token) => {
|
token::Token::CloseDelim(delim_token) => {
|
||||||
std_hash::Hash::hash(&delim_token, hasher);
|
std_hash::Hash::hash(&delim_token, hasher);
|
||||||
}
|
}
|
||||||
token::Token::Literal(ref lit, ref opt_name) => {
|
token::Token::Literal(lit, opt_name) => {
|
||||||
mem::discriminant(lit).hash_stable(hcx, hasher);
|
lit.hash_stable(hcx, hasher);
|
||||||
match *lit {
|
|
||||||
token::Lit::Byte(val) |
|
|
||||||
token::Lit::Char(val) |
|
|
||||||
token::Lit::Err(val) |
|
|
||||||
token::Lit::Integer(val) |
|
|
||||||
token::Lit::Float(val) |
|
|
||||||
token::Lit::Str_(val) |
|
|
||||||
token::Lit::ByteStr(val) => val.hash_stable(hcx, hasher),
|
|
||||||
token::Lit::StrRaw(val, n) |
|
|
||||||
token::Lit::ByteStrRaw(val, n) => {
|
|
||||||
val.hash_stable(hcx, hasher);
|
|
||||||
n.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
opt_name.hash_stable(hcx, hasher);
|
opt_name.hash_stable(hcx, hasher);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -805,8 +805,7 @@ pub fn print_after_hir_lowering<'tcx>(
|
|||||||
src_name,
|
src_name,
|
||||||
&mut rdr,
|
&mut rdr,
|
||||||
box out,
|
box out,
|
||||||
annotation.pp_ann(),
|
annotation.pp_ann())
|
||||||
true)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -829,8 +828,7 @@ pub fn print_after_hir_lowering<'tcx>(
|
|||||||
src_name,
|
src_name,
|
||||||
&mut rdr,
|
&mut rdr,
|
||||||
box out,
|
box out,
|
||||||
annotation.pp_ann(),
|
annotation.pp_ann());
|
||||||
true);
|
|
||||||
for node_id in uii.all_matching_node_ids(hir_map) {
|
for node_id in uii.all_matching_node_ids(hir_map) {
|
||||||
let node = hir_map.get(node_id);
|
let node = hir_map.get(node_id);
|
||||||
pp_state.print_node(node)?;
|
pp_state.print_node(node)?;
|
||||||
|
@ -62,7 +62,7 @@ pub fn new() -> TypeLimits {
|
|||||||
/// Returns `true` iff the lint was overridden.
|
/// Returns `true` iff the lint was overridden.
|
||||||
fn lint_overflowing_range_endpoint<'a, 'tcx>(
|
fn lint_overflowing_range_endpoint<'a, 'tcx>(
|
||||||
cx: &LateContext<'a, 'tcx>,
|
cx: &LateContext<'a, 'tcx>,
|
||||||
lit: &ast::Lit,
|
lit: &hir::Lit,
|
||||||
lit_val: u128,
|
lit_val: u128,
|
||||||
max: u128,
|
max: u128,
|
||||||
expr: &'tcx hir::Expr,
|
expr: &'tcx hir::Expr,
|
||||||
@ -132,7 +132,7 @@ fn uint_ty_range(uint_ty: ast::UintTy) -> (u128, u128) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_bin_hex_repr(cx: &LateContext<'_, '_>, lit: &ast::Lit) -> Option<String> {
|
fn get_bin_hex_repr(cx: &LateContext<'_, '_>, lit: &hir::Lit) -> Option<String> {
|
||||||
let src = cx.sess().source_map().span_to_snippet(lit.span).ok()?;
|
let src = cx.sess().source_map().span_to_snippet(lit.span).ok()?;
|
||||||
let firstch = src.chars().next()?;
|
let firstch = src.chars().next()?;
|
||||||
|
|
||||||
@ -249,7 +249,7 @@ fn lint_int_literal<'a, 'tcx>(
|
|||||||
cx: &LateContext<'a, 'tcx>,
|
cx: &LateContext<'a, 'tcx>,
|
||||||
type_limits: &TypeLimits,
|
type_limits: &TypeLimits,
|
||||||
e: &'tcx hir::Expr,
|
e: &'tcx hir::Expr,
|
||||||
lit: &ast::Lit,
|
lit: &hir::Lit,
|
||||||
t: ast::IntTy,
|
t: ast::IntTy,
|
||||||
v: u128,
|
v: u128,
|
||||||
) {
|
) {
|
||||||
@ -301,7 +301,7 @@ fn lint_int_literal<'a, 'tcx>(
|
|||||||
fn lint_uint_literal<'a, 'tcx>(
|
fn lint_uint_literal<'a, 'tcx>(
|
||||||
cx: &LateContext<'a, 'tcx>,
|
cx: &LateContext<'a, 'tcx>,
|
||||||
e: &'tcx hir::Expr,
|
e: &'tcx hir::Expr,
|
||||||
lit: &ast::Lit,
|
lit: &hir::Lit,
|
||||||
t: ast::UintTy,
|
t: ast::UintTy,
|
||||||
) {
|
) {
|
||||||
let uint_type = if let ast::UintTy::Usize = t {
|
let uint_type = if let ast::UintTy::Usize = t {
|
||||||
@ -363,7 +363,7 @@ fn lint_literal<'a, 'tcx>(
|
|||||||
cx: &LateContext<'a, 'tcx>,
|
cx: &LateContext<'a, 'tcx>,
|
||||||
type_limits: &TypeLimits,
|
type_limits: &TypeLimits,
|
||||||
e: &'tcx hir::Expr,
|
e: &'tcx hir::Expr,
|
||||||
lit: &ast::Lit,
|
lit: &hir::Lit,
|
||||||
) {
|
) {
|
||||||
match cx.tables.node_type(e.hir_id).sty {
|
match cx.tables.node_type(e.hir_id).sty {
|
||||||
ty::Int(t) => {
|
ty::Int(t) => {
|
||||||
|
@ -3083,7 +3083,7 @@ fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {
|
|||||||
|
|
||||||
// AST fragment checking
|
// AST fragment checking
|
||||||
fn check_lit(&self,
|
fn check_lit(&self,
|
||||||
lit: &ast::Lit,
|
lit: &hir::Lit,
|
||||||
expected: Expectation<'tcx>)
|
expected: Expectation<'tcx>)
|
||||||
-> Ty<'tcx>
|
-> Ty<'tcx>
|
||||||
{
|
{
|
||||||
|
@ -414,10 +414,11 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|||||||
mod test {
|
mod test {
|
||||||
use super::Cfg;
|
use super::Cfg;
|
||||||
|
|
||||||
use syntax::symbol::Symbol;
|
|
||||||
use syntax::ast::*;
|
|
||||||
use syntax::source_map::dummy_spanned;
|
|
||||||
use syntax_pos::DUMMY_SP;
|
use syntax_pos::DUMMY_SP;
|
||||||
|
use syntax::ast::*;
|
||||||
|
use syntax::attr;
|
||||||
|
use syntax::source_map::dummy_spanned;
|
||||||
|
use syntax::symbol::Symbol;
|
||||||
use syntax::with_globals;
|
use syntax::with_globals;
|
||||||
|
|
||||||
fn word_cfg(s: &str) -> Cfg {
|
fn word_cfg(s: &str) -> Cfg {
|
||||||
@ -592,14 +593,10 @@ fn test_parse_ok() {
|
|||||||
let mi = dummy_meta_item_word("all");
|
let mi = dummy_meta_item_word("all");
|
||||||
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
|
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
|
||||||
|
|
||||||
let mi = MetaItem {
|
let mi = attr::mk_name_value_item_str(
|
||||||
path: Path::from_ident(Ident::from_str("all")),
|
Ident::from_str("all"),
|
||||||
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str(
|
dummy_spanned(Symbol::intern("done"))
|
||||||
Symbol::intern("done"),
|
);
|
||||||
StrStyle::Cooked,
|
|
||||||
))),
|
|
||||||
span: DUMMY_SP,
|
|
||||||
};
|
|
||||||
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
|
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
|
||||||
|
|
||||||
let mi = dummy_meta_item_list!(all, [a, b]);
|
let mi = dummy_meta_item_list!(all, [a, b]);
|
||||||
@ -627,11 +624,12 @@ fn test_parse_ok() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_err() {
|
fn test_parse_err() {
|
||||||
with_globals(|| {
|
with_globals(|| {
|
||||||
let mi = MetaItem {
|
let mi = attr::mk_name_value_item(
|
||||||
path: Path::from_ident(Ident::from_str("foo")),
|
DUMMY_SP,
|
||||||
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))),
|
Ident::from_str("foo"),
|
||||||
span: DUMMY_SP,
|
LitKind::Bool(false),
|
||||||
};
|
DUMMY_SP,
|
||||||
|
);
|
||||||
assert!(Cfg::parse(&mi).is_err());
|
assert!(Cfg::parse(&mi).is_err());
|
||||||
|
|
||||||
let mi = dummy_meta_item_list!(not, [a, b]);
|
let mi = dummy_meta_item_list!(not, [a, b]);
|
||||||
|
@ -318,6 +318,8 @@ fn write_token<W: Writer>(&mut self,
|
|||||||
|
|
||||||
// Number literals.
|
// Number literals.
|
||||||
token::Integer(..) | token::Float(..) => Class::Number,
|
token::Integer(..) | token::Float(..) => Class::Number,
|
||||||
|
|
||||||
|
token::Bool(..) => panic!("literal token contains `Lit::Bool`"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
pub use crate::util::parser::ExprPrecedence;
|
pub use crate::util::parser::ExprPrecedence;
|
||||||
|
|
||||||
use crate::ext::hygiene::{Mark, SyntaxContext};
|
use crate::ext::hygiene::{Mark, SyntaxContext};
|
||||||
|
use crate::parse::token;
|
||||||
use crate::print::pprust;
|
use crate::print::pprust;
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::source_map::{dummy_spanned, respan, Spanned};
|
use crate::source_map::{dummy_spanned, respan, Spanned};
|
||||||
@ -1350,8 +1351,19 @@ pub enum StrStyle {
|
|||||||
Raw(u16),
|
Raw(u16),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A literal.
|
/// An AST literal.
|
||||||
pub type Lit = Spanned<LitKind>;
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||||
|
pub struct Lit {
|
||||||
|
/// The original literal token as written in source code.
|
||||||
|
pub token: token::Lit,
|
||||||
|
/// The original literal suffix as written in source code.
|
||||||
|
pub suffix: Option<Symbol>,
|
||||||
|
/// The "semantic" representation of the literal lowered from the original tokens.
|
||||||
|
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
|
||||||
|
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
|
||||||
|
pub node: LitKind,
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)]
|
||||||
pub enum LitIntType {
|
pub enum LitIntType {
|
||||||
|
@ -14,9 +14,9 @@
|
|||||||
use crate::ast;
|
use crate::ast;
|
||||||
use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
|
use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
|
||||||
use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem};
|
use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem};
|
||||||
use crate::ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam};
|
use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam};
|
||||||
use crate::mut_visit::visit_clobber;
|
use crate::mut_visit::visit_clobber;
|
||||||
use crate::source_map::{BytePos, Spanned, respan, dummy_spanned};
|
use crate::source_map::{BytePos, Spanned, dummy_spanned};
|
||||||
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||||
use crate::parse::parser::Parser;
|
use crate::parse::parser::Parser;
|
||||||
use crate::parse::{self, ParseSess, PResult};
|
use crate::parse::{self, ParseSess, PResult};
|
||||||
@ -350,12 +350,13 @@ pub fn with_desugared_doc<T, F>(&self, f: F) -> T where
|
|||||||
/* Constructors */
|
/* Constructors */
|
||||||
|
|
||||||
pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
|
pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
|
||||||
let value = respan(value.span, LitKind::Str(value.node, ast::StrStyle::Cooked));
|
let lit_kind = LitKind::Str(value.node, ast::StrStyle::Cooked);
|
||||||
mk_name_value_item(ident.span.to(value.span), ident, value)
|
mk_name_value_item(ident.span.to(value.span), ident, lit_kind, value.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_name_value_item(span: Span, ident: Ident, value: ast::Lit) -> MetaItem {
|
pub fn mk_name_value_item(span: Span, ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
|
||||||
MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(value) }
|
let lit = Lit::from_lit_kind(lit_kind, lit_span);
|
||||||
|
MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(lit) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
|
pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
|
||||||
@ -417,7 +418,8 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute
|
|||||||
|
|
||||||
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
|
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
|
||||||
let style = doc_comment_style(&text.as_str());
|
let style = doc_comment_style(&text.as_str());
|
||||||
let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked));
|
let lit_kind = LitKind::Str(text, ast::StrStyle::Cooked);
|
||||||
|
let lit = Lit::from_lit_kind(lit_kind, span);
|
||||||
Attribute {
|
Attribute {
|
||||||
id,
|
id,
|
||||||
style,
|
style,
|
||||||
@ -561,8 +563,7 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
|
|||||||
Some(TokenTree::Token(_, token::Eq)) => {
|
Some(TokenTree::Token(_, token::Eq)) => {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
||||||
LitKind::from_token(token)
|
Lit::from_token(&token, span, None).map(MetaItemKind::NameValue)
|
||||||
.map(|lit| MetaItemKind::NameValue(Spanned { node: lit, span: span }))
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
@ -607,9 +608,9 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
|
|||||||
where I: Iterator<Item = TokenTree>,
|
where I: Iterator<Item = TokenTree>,
|
||||||
{
|
{
|
||||||
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
||||||
if let Some(node) = LitKind::from_token(token) {
|
if let Some(lit) = Lit::from_token(&token, span, None) {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return Some(NestedMetaItem::Literal(respan(span, node)));
|
return Some(NestedMetaItem::Literal(lit));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -617,81 +618,6 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Lit {
|
|
||||||
crate fn tokens(&self) -> TokenStream {
|
|
||||||
TokenTree::Token(self.span, self.node.token()).into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LitKind {
|
|
||||||
fn token(&self) -> Token {
|
|
||||||
use std::ascii;
|
|
||||||
|
|
||||||
match *self {
|
|
||||||
LitKind::Str(string, ast::StrStyle::Cooked) => {
|
|
||||||
let escaped = string.as_str().escape_default().to_string();
|
|
||||||
Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)
|
|
||||||
}
|
|
||||||
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
|
|
||||||
Token::Literal(token::Lit::StrRaw(string, n), None)
|
|
||||||
}
|
|
||||||
LitKind::ByteStr(ref bytes) => {
|
|
||||||
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
|
|
||||||
.map(Into::<char>::into).collect::<String>();
|
|
||||||
Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None)
|
|
||||||
}
|
|
||||||
LitKind::Byte(byte) => {
|
|
||||||
let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
|
|
||||||
Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None)
|
|
||||||
}
|
|
||||||
LitKind::Char(ch) => {
|
|
||||||
let string: String = ch.escape_default().map(Into::<char>::into).collect();
|
|
||||||
Token::Literal(token::Lit::Char(Symbol::intern(&string)), None)
|
|
||||||
}
|
|
||||||
LitKind::Int(n, ty) => {
|
|
||||||
let suffix = match ty {
|
|
||||||
ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
|
||||||
ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
|
||||||
ast::LitIntType::Unsuffixed => None,
|
|
||||||
};
|
|
||||||
Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
|
|
||||||
}
|
|
||||||
LitKind::Float(symbol, ty) => {
|
|
||||||
Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
|
|
||||||
}
|
|
||||||
LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
|
|
||||||
LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
|
|
||||||
"true"
|
|
||||||
} else {
|
|
||||||
"false"
|
|
||||||
})), false),
|
|
||||||
LitKind::Err(val) => Token::Literal(token::Lit::Err(val), None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_token(token: Token) -> Option<LitKind> {
|
|
||||||
match token {
|
|
||||||
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
|
|
||||||
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
|
|
||||||
Token::Interpolated(nt) => match *nt {
|
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
|
||||||
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
Token::Literal(lit, suf) => {
|
|
||||||
let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
|
|
||||||
if suffix_illegal && suf.is_some() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait HasAttrs: Sized {
|
pub trait HasAttrs: Sized {
|
||||||
fn attrs(&self) -> &[ast::Attribute];
|
fn attrs(&self) -> &[ast::Attribute];
|
||||||
fn visit_attrs<F: FnOnce(&mut Vec<ast::Attribute>)>(&mut self, f: F);
|
fn visit_attrs<F: FnOnce(&mut Vec<ast::Attribute>)>(&mut self, f: F);
|
||||||
|
@ -697,8 +697,9 @@ fn expr_struct_ident(&self, span: Span,
|
|||||||
self.expr_struct(span, self.path_ident(span, id), fields)
|
self.expr_struct(span, self.path_ident(span, id), fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_lit(&self, sp: Span, lit: ast::LitKind) -> P<ast::Expr> {
|
fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
|
||||||
self.expr(sp, ast::ExprKind::Lit(respan(sp, lit)))
|
let lit = ast::Lit::from_lit_kind(lit_kind, span);
|
||||||
|
self.expr(span, ast::ExprKind::Lit(lit))
|
||||||
}
|
}
|
||||||
fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
||||||
self.expr_lit(span, ast::LitKind::Int(i as u128,
|
self.expr_lit(span, ast::LitKind::Int(i as u128,
|
||||||
@ -1164,10 +1165,10 @@ fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec<ast::NestedMetaItem>)
|
|||||||
attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis)
|
attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn meta_name_value(&self, sp: Span, name: ast::Name, value: ast::LitKind)
|
fn meta_name_value(&self, span: Span, name: ast::Name, lit_kind: ast::LitKind)
|
||||||
-> ast::MetaItem {
|
-> ast::MetaItem {
|
||||||
attr::mk_name_value_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp),
|
attr::mk_name_value_item(span, Ident::with_empty_ctxt(name).with_span_pos(span),
|
||||||
respan(sp, value))
|
lit_kind, span)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_use(&self, sp: Span,
|
fn item_use(&self, sp: Span,
|
||||||
|
@ -25,16 +25,3 @@ pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool {
|
|||||||
_ => true,
|
_ => true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// this statement requires a semicolon after it.
|
|
||||||
/// note that in one case (`stmt_semi`), we've already
|
|
||||||
/// seen the semicolon, and thus don't need another.
|
|
||||||
pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
|
|
||||||
match *stmt {
|
|
||||||
ast::StmtKind::Local(_) => true,
|
|
||||||
ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
|
|
||||||
ast::StmtKind::Item(_) |
|
|
||||||
ast::StmtKind::Semi(..) |
|
|
||||||
ast::StmtKind::Mac(..) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -3,8 +3,7 @@
|
|||||||
use crate::ast;
|
use crate::ast;
|
||||||
use crate::source_map::SourceMap;
|
use crate::source_map::SourceMap;
|
||||||
use crate::parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
|
use crate::parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
|
||||||
use crate::parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
|
use crate::parse::lexer::{self, ParseSess, StringReader};
|
||||||
use crate::print::pprust;
|
|
||||||
|
|
||||||
use syntax_pos::{BytePos, CharPos, Pos, FileName};
|
use syntax_pos::{BytePos, CharPos, Pos, FileName};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
@ -339,16 +338,9 @@ fn consume_comment(rdr: &mut StringReader<'_>,
|
|||||||
debug!("<<< consume comment");
|
debug!("<<< consume comment");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Literal {
|
|
||||||
pub lit: String,
|
|
||||||
pub pos: BytePos,
|
|
||||||
}
|
|
||||||
|
|
||||||
// it appears this function is called only from pprust... that's
|
// it appears this function is called only from pprust... that's
|
||||||
// probably not a good thing.
|
// probably not a good thing.
|
||||||
pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut dyn Read)
|
pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) -> Vec<Comment>
|
||||||
-> (Vec<Comment>, Vec<Literal>)
|
|
||||||
{
|
{
|
||||||
let mut src = String::new();
|
let mut src = String::new();
|
||||||
srdr.read_to_string(&mut src).unwrap();
|
srdr.read_to_string(&mut src).unwrap();
|
||||||
@ -357,7 +349,6 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
|
|||||||
let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
|
let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
|
||||||
|
|
||||||
let mut comments: Vec<Comment> = Vec::new();
|
let mut comments: Vec<Comment> = Vec::new();
|
||||||
let mut literals: Vec<Literal> = Vec::new();
|
|
||||||
let mut code_to_the_left = false; // Only code
|
let mut code_to_the_left = false; // Only code
|
||||||
let mut anything_to_the_left = false; // Code or comments
|
let mut anything_to_the_left = false; // Code or comments
|
||||||
|
|
||||||
@ -382,26 +373,12 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let bstart = rdr.pos;
|
|
||||||
rdr.next_token();
|
rdr.next_token();
|
||||||
// discard, and look ahead; we're working with internal state
|
|
||||||
let TokenAndSpan { tok, sp } = rdr.peek();
|
|
||||||
if tok.is_lit() {
|
|
||||||
rdr.with_str_from(bstart, |s| {
|
|
||||||
debug!("tok lit: {}", s);
|
|
||||||
literals.push(Literal {
|
|
||||||
lit: s.to_string(),
|
|
||||||
pos: sp.lo(),
|
|
||||||
});
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
debug!("tok: {}", pprust::token_to_string(&tok));
|
|
||||||
}
|
|
||||||
code_to_the_left = true;
|
code_to_the_left = true;
|
||||||
anything_to_the_left = true;
|
anything_to_the_left = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
(comments, literals)
|
comments
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -262,18 +262,6 @@ fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(sess: &'a ParseSess,
|
|
||||||
source_file: Lrc<syntax_pos::SourceFile>,
|
|
||||||
override_span: Option<Span>) -> Self {
|
|
||||||
let mut sr = StringReader::new_raw(sess, source_file, override_span);
|
|
||||||
if sr.advance_token().is_err() {
|
|
||||||
sr.emit_fatal_errors();
|
|
||||||
FatalError.raise();
|
|
||||||
}
|
|
||||||
|
|
||||||
sr
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_or_buffered_errs(sess: &'a ParseSess,
|
pub fn new_or_buffered_errs(sess: &'a ParseSess,
|
||||||
source_file: Lrc<syntax_pos::SourceFile>,
|
source_file: Lrc<syntax_pos::SourceFile>,
|
||||||
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
|
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
|
||||||
@ -1627,7 +1615,12 @@ fn setup<'a>(sm: &SourceMap,
|
|||||||
teststr: String)
|
teststr: String)
|
||||||
-> StringReader<'a> {
|
-> StringReader<'a> {
|
||||||
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
|
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
|
||||||
StringReader::new(sess, sf, None)
|
let mut sr = StringReader::new_raw(sess, sf, None);
|
||||||
|
if sr.advance_token().is_err() {
|
||||||
|
sr.emit_fatal_errors();
|
||||||
|
FatalError.raise();
|
||||||
|
}
|
||||||
|
sr
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
487
src/libsyntax/parse/literal.rs
Normal file
487
src/libsyntax/parse/literal.rs
Normal file
@ -0,0 +1,487 @@
|
|||||||
|
//! Code related to parsing literals.
|
||||||
|
|
||||||
|
use crate::ast::{self, Ident, Lit, LitKind};
|
||||||
|
use crate::parse::parser::Parser;
|
||||||
|
use crate::parse::PResult;
|
||||||
|
use crate::parse::token::{self, Token};
|
||||||
|
use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
|
||||||
|
use crate::print::pprust;
|
||||||
|
use crate::symbol::{keywords, Symbol};
|
||||||
|
use crate::tokenstream::{TokenStream, TokenTree};
|
||||||
|
|
||||||
|
use errors::{Applicability, Handler};
|
||||||
|
use log::debug;
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
use syntax_pos::Span;
|
||||||
|
|
||||||
|
use std::ascii;
|
||||||
|
|
||||||
|
macro_rules! err {
|
||||||
|
($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
|
||||||
|
match $opt_diag {
|
||||||
|
Some(($span, $diag)) => { $($body)* }
|
||||||
|
None => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LitKind {
|
||||||
|
/// Converts literal token with a suffix into a semantic literal.
|
||||||
|
/// Works speculatively and may return `None` if diagnostic handler is not passed.
|
||||||
|
/// If diagnostic handler is passed, always returns `Some`,
|
||||||
|
/// possibly after reporting non-fatal errors and recovery.
|
||||||
|
fn from_lit_token(
|
||||||
|
lit: token::Lit,
|
||||||
|
suf: Option<Symbol>,
|
||||||
|
diag: Option<(Span, &Handler)>
|
||||||
|
) -> Option<LitKind> {
|
||||||
|
if suf.is_some() && !lit.may_have_suffix() {
|
||||||
|
err!(diag, |span, diag| {
|
||||||
|
expect_no_suffix(span, diag, &format!("a {}", lit.literal_name()), suf)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(match lit {
|
||||||
|
token::Bool(i) => {
|
||||||
|
assert!(i == keywords::True.name() || i == keywords::False.name());
|
||||||
|
LitKind::Bool(i == keywords::True.name())
|
||||||
|
}
|
||||||
|
token::Byte(i) => {
|
||||||
|
match unescape_byte(&i.as_str()) {
|
||||||
|
Ok(c) => LitKind::Byte(c),
|
||||||
|
Err(_) => LitKind::Err(i),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
token::Char(i) => {
|
||||||
|
match unescape_char(&i.as_str()) {
|
||||||
|
Ok(c) => LitKind::Char(c),
|
||||||
|
Err(_) => LitKind::Err(i),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
token::Err(i) => LitKind::Err(i),
|
||||||
|
|
||||||
|
// There are some valid suffixes for integer and float literals,
|
||||||
|
// so all the handling is done internally.
|
||||||
|
token::Integer(s) => return integer_lit(&s.as_str(), suf, diag),
|
||||||
|
token::Float(s) => return float_lit(&s.as_str(), suf, diag),
|
||||||
|
|
||||||
|
token::Str_(mut sym) => {
|
||||||
|
// If there are no characters requiring special treatment we can
|
||||||
|
// reuse the symbol from the Token. Otherwise, we must generate a
|
||||||
|
// new symbol because the string in the LitKind is different to the
|
||||||
|
// string in the Token.
|
||||||
|
let mut has_error = false;
|
||||||
|
let s = &sym.as_str();
|
||||||
|
if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
|
||||||
|
let mut buf = String::with_capacity(s.len());
|
||||||
|
unescape_str(s, &mut |_, unescaped_char| {
|
||||||
|
match unescaped_char {
|
||||||
|
Ok(c) => buf.push(c),
|
||||||
|
Err(_) => has_error = true,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if has_error {
|
||||||
|
return Some(LitKind::Err(sym));
|
||||||
|
}
|
||||||
|
sym = Symbol::intern(&buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
LitKind::Str(sym, ast::StrStyle::Cooked)
|
||||||
|
}
|
||||||
|
token::StrRaw(mut sym, n) => {
|
||||||
|
// Ditto.
|
||||||
|
let s = &sym.as_str();
|
||||||
|
if s.contains('\r') {
|
||||||
|
sym = Symbol::intern(&raw_str_lit(s));
|
||||||
|
}
|
||||||
|
LitKind::Str(sym, ast::StrStyle::Raw(n))
|
||||||
|
}
|
||||||
|
token::ByteStr(i) => {
|
||||||
|
let s = &i.as_str();
|
||||||
|
let mut buf = Vec::with_capacity(s.len());
|
||||||
|
let mut has_error = false;
|
||||||
|
unescape_byte_str(s, &mut |_, unescaped_byte| {
|
||||||
|
match unescaped_byte {
|
||||||
|
Ok(c) => buf.push(c),
|
||||||
|
Err(_) => has_error = true,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if has_error {
|
||||||
|
return Some(LitKind::Err(i));
|
||||||
|
}
|
||||||
|
buf.shrink_to_fit();
|
||||||
|
LitKind::ByteStr(Lrc::new(buf))
|
||||||
|
}
|
||||||
|
token::ByteStrRaw(i, _) => {
|
||||||
|
LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempts to recover a token from semantic literal.
|
||||||
|
/// This function is used when the original token doesn't exist (e.g. the literal is created
|
||||||
|
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
|
||||||
|
pub fn to_lit_token(&self) -> (token::Lit, Option<Symbol>) {
|
||||||
|
match *self {
|
||||||
|
LitKind::Str(string, ast::StrStyle::Cooked) => {
|
||||||
|
let escaped = string.as_str().escape_default().to_string();
|
||||||
|
(token::Lit::Str_(Symbol::intern(&escaped)), None)
|
||||||
|
}
|
||||||
|
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
|
||||||
|
(token::Lit::StrRaw(string, n), None)
|
||||||
|
}
|
||||||
|
LitKind::ByteStr(ref bytes) => {
|
||||||
|
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
|
||||||
|
.map(Into::<char>::into).collect::<String>();
|
||||||
|
(token::Lit::ByteStr(Symbol::intern(&string)), None)
|
||||||
|
}
|
||||||
|
LitKind::Byte(byte) => {
|
||||||
|
let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
|
||||||
|
(token::Lit::Byte(Symbol::intern(&string)), None)
|
||||||
|
}
|
||||||
|
LitKind::Char(ch) => {
|
||||||
|
let string: String = ch.escape_default().map(Into::<char>::into).collect();
|
||||||
|
(token::Lit::Char(Symbol::intern(&string)), None)
|
||||||
|
}
|
||||||
|
LitKind::Int(n, ty) => {
|
||||||
|
let suffix = match ty {
|
||||||
|
ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
||||||
|
ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
||||||
|
ast::LitIntType::Unsuffixed => None,
|
||||||
|
};
|
||||||
|
(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
|
||||||
|
}
|
||||||
|
LitKind::Float(symbol, ty) => {
|
||||||
|
(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
|
||||||
|
}
|
||||||
|
LitKind::FloatUnsuffixed(symbol) => (token::Lit::Float(symbol), None),
|
||||||
|
LitKind::Bool(value) => {
|
||||||
|
let kw = if value { keywords::True } else { keywords::False };
|
||||||
|
(token::Lit::Bool(kw.name()), None)
|
||||||
|
}
|
||||||
|
LitKind::Err(val) => (token::Lit::Err(val), None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Lit {
|
||||||
|
/// Converts literal token with a suffix into an AST literal.
|
||||||
|
/// Works speculatively and may return `None` if diagnostic handler is not passed.
|
||||||
|
/// If diagnostic handler is passed, may return `Some`,
|
||||||
|
/// possibly after reporting non-fatal errors and recovery, or `None` for irrecoverable errors.
|
||||||
|
crate fn from_token(
|
||||||
|
token: &token::Token,
|
||||||
|
span: Span,
|
||||||
|
diag: Option<(Span, &Handler)>,
|
||||||
|
) -> Option<Lit> {
|
||||||
|
let (token, suffix) = match *token {
|
||||||
|
token::Ident(ident, false) if ident.name == keywords::True.name() ||
|
||||||
|
ident.name == keywords::False.name() =>
|
||||||
|
(token::Bool(ident.name), None),
|
||||||
|
token::Literal(token, suffix) =>
|
||||||
|
(token, suffix),
|
||||||
|
token::Interpolated(ref nt) => {
|
||||||
|
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
|
||||||
|
if let ast::ExprKind::Lit(lit) = &expr.node {
|
||||||
|
return Some(lit.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let node = LitKind::from_lit_token(token, suffix, diag)?;
|
||||||
|
Some(Lit { node, token, suffix, span })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempts to recover an AST literal from semantic literal.
|
||||||
|
/// This function is used when the original token doesn't exist (e.g. the literal is created
|
||||||
|
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
|
||||||
|
pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
|
||||||
|
let (token, suffix) = node.to_lit_token();
|
||||||
|
Lit { node, token, suffix, span }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Losslessly convert an AST literal into a token stream.
|
||||||
|
crate fn tokens(&self) -> TokenStream {
|
||||||
|
let token = match self.token {
|
||||||
|
token::Bool(symbol) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
|
||||||
|
token => Token::Literal(token, self.suffix),
|
||||||
|
};
|
||||||
|
TokenTree::Token(self.span, token).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Parser<'a> {
|
||||||
|
/// Matches `lit = true | false | token_lit`.
|
||||||
|
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
||||||
|
let diag = Some((self.span, &self.sess.span_diagnostic));
|
||||||
|
if let Some(lit) = Lit::from_token(&self.token, self.span, diag) {
|
||||||
|
self.bump();
|
||||||
|
return Ok(lit);
|
||||||
|
} else if self.token == token::Dot {
|
||||||
|
// Recover `.4` as `0.4`.
|
||||||
|
let recovered = self.look_ahead(1, |t| {
|
||||||
|
if let token::Literal(token::Integer(val), suf) = *t {
|
||||||
|
let next_span = self.look_ahead_span(1);
|
||||||
|
if self.span.hi() == next_span.lo() {
|
||||||
|
let sym = String::from("0.") + &val.as_str();
|
||||||
|
let token = token::Literal(token::Float(Symbol::intern(&sym)), suf);
|
||||||
|
return Some((token, self.span.to(next_span)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
});
|
||||||
|
if let Some((token, span)) = recovered {
|
||||||
|
self.diagnostic()
|
||||||
|
.struct_span_err(span, "float literals must have an integer part")
|
||||||
|
.span_suggestion(
|
||||||
|
span,
|
||||||
|
"must have an integer part",
|
||||||
|
pprust::token_to_string(&token),
|
||||||
|
Applicability::MachineApplicable,
|
||||||
|
)
|
||||||
|
.emit();
|
||||||
|
let diag = Some((span, &self.sess.span_diagnostic));
|
||||||
|
if let Some(lit) = Lit::from_token(&token, span, diag) {
|
||||||
|
self.bump();
|
||||||
|
self.bump();
|
||||||
|
return Ok(lit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(self.span_fatal(self.span, &format!("unexpected token: {}", self.this_token_descr())))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
crate fn expect_no_suffix(sp: Span, diag: &Handler, kind: &str, suffix: Option<ast::Name>) {
|
||||||
|
match suffix {
|
||||||
|
None => {/* everything ok */}
|
||||||
|
Some(suf) => {
|
||||||
|
let text = suf.as_str();
|
||||||
|
if text.is_empty() {
|
||||||
|
diag.span_bug(sp, "found empty literal suffix in Some")
|
||||||
|
}
|
||||||
|
let mut err = if kind == "a tuple index" &&
|
||||||
|
["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
|
||||||
|
{
|
||||||
|
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
||||||
|
// through the ecosystem when people fix their macros
|
||||||
|
let mut err = diag.struct_span_warn(
|
||||||
|
sp,
|
||||||
|
&format!("suffixes on {} are invalid", kind),
|
||||||
|
);
|
||||||
|
err.note(&format!(
|
||||||
|
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
||||||
|
incorrectly accepted on stable for a few releases",
|
||||||
|
text,
|
||||||
|
));
|
||||||
|
err.help(
|
||||||
|
"on proc macros, you'll want to use `syn::Index::from` or \
|
||||||
|
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
||||||
|
to tuple field access",
|
||||||
|
);
|
||||||
|
err.note(
|
||||||
|
"for more context, see https://github.com/rust-lang/rust/issues/60210",
|
||||||
|
);
|
||||||
|
err
|
||||||
|
} else {
|
||||||
|
diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
|
||||||
|
};
|
||||||
|
err.span_label(sp, format!("invalid suffix `{}`", text));
|
||||||
|
err.emit();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses a string representing a raw string literal into its final form. The
|
||||||
|
/// only operation this does is convert embedded CRLF into a single LF.
|
||||||
|
fn raw_str_lit(lit: &str) -> String {
|
||||||
|
debug!("raw_str_lit: given {}", lit.escape_default());
|
||||||
|
let mut res = String::with_capacity(lit.len());
|
||||||
|
|
||||||
|
let mut chars = lit.chars().peekable();
|
||||||
|
while let Some(c) = chars.next() {
|
||||||
|
if c == '\r' {
|
||||||
|
if *chars.peek().unwrap() != '\n' {
|
||||||
|
panic!("lexer accepted bare CR");
|
||||||
|
}
|
||||||
|
chars.next();
|
||||||
|
res.push('\n');
|
||||||
|
} else {
|
||||||
|
res.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.shrink_to_fit();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if `s` looks like i32 or u1234 etc.
|
||||||
|
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||||
|
s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
|
-> Option<LitKind> {
|
||||||
|
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
||||||
|
let suffix = match suffix {
|
||||||
|
Some(suffix) => suffix,
|
||||||
|
None => return Some(LitKind::FloatUnsuffixed(data)),
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(match &*suffix.as_str() {
|
||||||
|
"f32" => LitKind::Float(data, ast::FloatTy::F32),
|
||||||
|
"f64" => LitKind::Float(data, ast::FloatTy::F64),
|
||||||
|
suf => {
|
||||||
|
err!(diag, |span, diag| {
|
||||||
|
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
||||||
|
// if it looks like a width, lets try to be helpful.
|
||||||
|
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
|
||||||
|
diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
|
||||||
|
} else {
|
||||||
|
let msg = format!("invalid suffix `{}` for float literal", suf);
|
||||||
|
diag.struct_span_err(span, &msg)
|
||||||
|
.span_label(span, format!("invalid suffix `{}`", suf))
|
||||||
|
.help("valid suffixes are `f32` and `f64`")
|
||||||
|
.emit();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
LitKind::FloatUnsuffixed(data)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
|
-> Option<LitKind> {
|
||||||
|
debug!("float_lit: {:?}, {:?}", s, suffix);
|
||||||
|
// FIXME #2252: bounds checking float literals is deferred until trans
|
||||||
|
|
||||||
|
// Strip underscores without allocating a new String unless necessary.
|
||||||
|
let s2;
|
||||||
|
let s = if s.chars().any(|c| c == '_') {
|
||||||
|
s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||||
|
&s2
|
||||||
|
} else {
|
||||||
|
s
|
||||||
|
};
|
||||||
|
|
||||||
|
filtered_float_lit(Symbol::intern(s), suffix, diag)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
|
-> Option<LitKind> {
|
||||||
|
// s can only be ascii, byte indexing is fine
|
||||||
|
|
||||||
|
// Strip underscores without allocating a new String unless necessary.
|
||||||
|
let s2;
|
||||||
|
let mut s = if s.chars().any(|c| c == '_') {
|
||||||
|
s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||||
|
&s2
|
||||||
|
} else {
|
||||||
|
s
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("integer_lit: {}, {:?}", s, suffix);
|
||||||
|
|
||||||
|
let mut base = 10;
|
||||||
|
let orig = s;
|
||||||
|
let mut ty = ast::LitIntType::Unsuffixed;
|
||||||
|
|
||||||
|
if s.starts_with('0') && s.len() > 1 {
|
||||||
|
match s.as_bytes()[1] {
|
||||||
|
b'x' => base = 16,
|
||||||
|
b'o' => base = 8,
|
||||||
|
b'b' => base = 2,
|
||||||
|
_ => { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1f64 and 2f32 etc. are valid float literals.
|
||||||
|
if let Some(suf) = suffix {
|
||||||
|
if looks_like_width_suffix(&['f'], &suf.as_str()) {
|
||||||
|
let err = match base {
|
||||||
|
16 => Some("hexadecimal float literal is not supported"),
|
||||||
|
8 => Some("octal float literal is not supported"),
|
||||||
|
2 => Some("binary float literal is not supported"),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
if let Some(err) = err {
|
||||||
|
err!(diag, |span, diag| {
|
||||||
|
diag.struct_span_err(span, err)
|
||||||
|
.span_label(span, "not supported")
|
||||||
|
.emit();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if base != 10 {
|
||||||
|
s = &s[2..];
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(suf) = suffix {
|
||||||
|
if suf.as_str().is_empty() {
|
||||||
|
err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
|
||||||
|
}
|
||||||
|
ty = match &*suf.as_str() {
|
||||||
|
"isize" => ast::LitIntType::Signed(ast::IntTy::Isize),
|
||||||
|
"i8" => ast::LitIntType::Signed(ast::IntTy::I8),
|
||||||
|
"i16" => ast::LitIntType::Signed(ast::IntTy::I16),
|
||||||
|
"i32" => ast::LitIntType::Signed(ast::IntTy::I32),
|
||||||
|
"i64" => ast::LitIntType::Signed(ast::IntTy::I64),
|
||||||
|
"i128" => ast::LitIntType::Signed(ast::IntTy::I128),
|
||||||
|
"usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize),
|
||||||
|
"u8" => ast::LitIntType::Unsigned(ast::UintTy::U8),
|
||||||
|
"u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
|
||||||
|
"u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
|
||||||
|
"u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
|
||||||
|
"u128" => ast::LitIntType::Unsigned(ast::UintTy::U128),
|
||||||
|
suf => {
|
||||||
|
// i<digits> and u<digits> look like widths, so lets
|
||||||
|
// give an error message along those lines
|
||||||
|
err!(diag, |span, diag| {
|
||||||
|
if looks_like_width_suffix(&['i', 'u'], suf) {
|
||||||
|
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
|
||||||
|
diag.struct_span_err(span, &msg)
|
||||||
|
.help("valid widths are 8, 16, 32, 64 and 128")
|
||||||
|
.emit();
|
||||||
|
} else {
|
||||||
|
let msg = format!("invalid suffix `{}` for numeric literal", suf);
|
||||||
|
diag.struct_span_err(span, &msg)
|
||||||
|
.span_label(span, format!("invalid suffix `{}`", suf))
|
||||||
|
.help("the suffix must be one of the integral types \
|
||||||
|
(`u32`, `isize`, etc)")
|
||||||
|
.emit();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ty
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
|
||||||
|
string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
|
||||||
|
|
||||||
|
Some(match u128::from_str_radix(s, base) {
|
||||||
|
Ok(r) => LitKind::Int(r, ty),
|
||||||
|
Err(_) => {
|
||||||
|
// small bases are lexed as if they were base 10, e.g, the string
|
||||||
|
// might be `0b10201`. This will cause the conversion above to fail,
|
||||||
|
// but these cases have errors in the lexer: we don't want to emit
|
||||||
|
// two errors, and we especially don't want to emit this error since
|
||||||
|
// it isn't necessarily true.
|
||||||
|
let already_errored = base < 10 &&
|
||||||
|
s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
|
||||||
|
|
||||||
|
if !already_errored {
|
||||||
|
err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
|
||||||
|
}
|
||||||
|
LitKind::Int(0, ty)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
@ -5,7 +5,6 @@
|
|||||||
use crate::source_map::{SourceMap, FilePathMapping};
|
use crate::source_map::{SourceMap, FilePathMapping};
|
||||||
use crate::feature_gate::UnstableFeatures;
|
use crate::feature_gate::UnstableFeatures;
|
||||||
use crate::parse::parser::Parser;
|
use crate::parse::parser::Parser;
|
||||||
use crate::symbol::Symbol;
|
|
||||||
use crate::syntax::parse::parser::emit_unclosed_delims;
|
use crate::syntax::parse::parser::emit_unclosed_delims;
|
||||||
use crate::tokenstream::{TokenStream, TokenTree};
|
use crate::tokenstream::{TokenStream, TokenTree};
|
||||||
use crate::diagnostics::plugin::ErrorMap;
|
use crate::diagnostics::plugin::ErrorMap;
|
||||||
@ -14,7 +13,6 @@
|
|||||||
use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
|
use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
|
||||||
use rustc_data_structures::sync::{Lrc, Lock};
|
use rustc_data_structures::sync::{Lrc, Lock};
|
||||||
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
||||||
use log::debug;
|
|
||||||
|
|
||||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
@ -25,18 +23,15 @@
|
|||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
pub mod attr;
|
||||||
pub mod lexer;
|
pub mod lexer;
|
||||||
pub mod token;
|
pub mod token;
|
||||||
pub mod attr;
|
|
||||||
pub mod diagnostics;
|
|
||||||
|
|
||||||
pub mod classify;
|
crate mod classify;
|
||||||
|
crate mod diagnostics;
|
||||||
pub(crate) mod unescape;
|
crate mod literal;
|
||||||
use unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
|
crate mod unescape;
|
||||||
|
crate mod unescape_error_reporting;
|
||||||
pub(crate) mod unescape_error_reporting;
|
|
||||||
|
|
||||||
/// Info about a parsing session.
|
/// Info about a parsing session.
|
||||||
pub struct ParseSess {
|
pub struct ParseSess {
|
||||||
@ -334,284 +329,6 @@ pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
|
|||||||
Parser::new(sess, stream, None, true, false)
|
Parser::new(sess, stream, None, true, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a string representing a raw string literal into its final form. The
|
|
||||||
/// only operation this does is convert embedded CRLF into a single LF.
|
|
||||||
fn raw_str_lit(lit: &str) -> String {
|
|
||||||
debug!("raw_str_lit: given {}", lit.escape_default());
|
|
||||||
let mut res = String::with_capacity(lit.len());
|
|
||||||
|
|
||||||
let mut chars = lit.chars().peekable();
|
|
||||||
while let Some(c) = chars.next() {
|
|
||||||
if c == '\r' {
|
|
||||||
if *chars.peek().unwrap() != '\n' {
|
|
||||||
panic!("lexer accepted bare CR");
|
|
||||||
}
|
|
||||||
chars.next();
|
|
||||||
res.push('\n');
|
|
||||||
} else {
|
|
||||||
res.push(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
res.shrink_to_fit();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if `s` looks like i32 or u1234 etc.
|
|
||||||
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
|
||||||
s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! err {
|
|
||||||
($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
|
|
||||||
match $opt_diag {
|
|
||||||
Some(($span, $diag)) => { $($body)* }
|
|
||||||
None => return None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
crate fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
|
||||||
-> (bool /* suffix illegal? */, Option<ast::LitKind>) {
|
|
||||||
use ast::LitKind;
|
|
||||||
|
|
||||||
match lit {
|
|
||||||
token::Byte(i) => {
|
|
||||||
let lit_kind = match unescape_byte(&i.as_str()) {
|
|
||||||
Ok(c) => LitKind::Byte(c),
|
|
||||||
Err(_) => LitKind::Err(i),
|
|
||||||
};
|
|
||||||
(true, Some(lit_kind))
|
|
||||||
},
|
|
||||||
token::Char(i) => {
|
|
||||||
let lit_kind = match unescape_char(&i.as_str()) {
|
|
||||||
Ok(c) => LitKind::Char(c),
|
|
||||||
Err(_) => LitKind::Err(i),
|
|
||||||
};
|
|
||||||
(true, Some(lit_kind))
|
|
||||||
},
|
|
||||||
token::Err(i) => (true, Some(LitKind::Err(i))),
|
|
||||||
|
|
||||||
// There are some valid suffixes for integer and float literals,
|
|
||||||
// so all the handling is done internally.
|
|
||||||
token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)),
|
|
||||||
token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)),
|
|
||||||
|
|
||||||
token::Str_(mut sym) => {
|
|
||||||
// If there are no characters requiring special treatment we can
|
|
||||||
// reuse the symbol from the Token. Otherwise, we must generate a
|
|
||||||
// new symbol because the string in the LitKind is different to the
|
|
||||||
// string in the Token.
|
|
||||||
let mut has_error = false;
|
|
||||||
let s = &sym.as_str();
|
|
||||||
if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
|
|
||||||
let mut buf = String::with_capacity(s.len());
|
|
||||||
unescape_str(s, &mut |_, unescaped_char| {
|
|
||||||
match unescaped_char {
|
|
||||||
Ok(c) => buf.push(c),
|
|
||||||
Err(_) => has_error = true,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if has_error {
|
|
||||||
return (true, Some(LitKind::Err(sym)));
|
|
||||||
}
|
|
||||||
sym = Symbol::intern(&buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
(true, Some(LitKind::Str(sym, ast::StrStyle::Cooked)))
|
|
||||||
}
|
|
||||||
token::StrRaw(mut sym, n) => {
|
|
||||||
// Ditto.
|
|
||||||
let s = &sym.as_str();
|
|
||||||
if s.contains('\r') {
|
|
||||||
sym = Symbol::intern(&raw_str_lit(s));
|
|
||||||
}
|
|
||||||
(true, Some(LitKind::Str(sym, ast::StrStyle::Raw(n))))
|
|
||||||
}
|
|
||||||
token::ByteStr(i) => {
|
|
||||||
let s = &i.as_str();
|
|
||||||
let mut buf = Vec::with_capacity(s.len());
|
|
||||||
let mut has_error = false;
|
|
||||||
unescape_byte_str(s, &mut |_, unescaped_byte| {
|
|
||||||
match unescaped_byte {
|
|
||||||
Ok(c) => buf.push(c),
|
|
||||||
Err(_) => has_error = true,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if has_error {
|
|
||||||
return (true, Some(LitKind::Err(i)));
|
|
||||||
}
|
|
||||||
buf.shrink_to_fit();
|
|
||||||
(true, Some(LitKind::ByteStr(Lrc::new(buf))))
|
|
||||||
}
|
|
||||||
token::ByteStrRaw(i, _) => {
|
|
||||||
(true, Some(LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
|
||||||
-> Option<ast::LitKind> {
|
|
||||||
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
|
||||||
let suffix = match suffix {
|
|
||||||
Some(suffix) => suffix,
|
|
||||||
None => return Some(ast::LitKind::FloatUnsuffixed(data)),
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(match &*suffix.as_str() {
|
|
||||||
"f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
|
|
||||||
"f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
|
|
||||||
suf => {
|
|
||||||
err!(diag, |span, diag| {
|
|
||||||
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
|
||||||
// if it looks like a width, lets try to be helpful.
|
|
||||||
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
|
|
||||||
diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
|
|
||||||
} else {
|
|
||||||
let msg = format!("invalid suffix `{}` for float literal", suf);
|
|
||||||
diag.struct_span_err(span, &msg)
|
|
||||||
.span_label(span, format!("invalid suffix `{}`", suf))
|
|
||||||
.help("valid suffixes are `f32` and `f64`")
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
ast::LitKind::FloatUnsuffixed(data)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
|
||||||
-> Option<ast::LitKind> {
|
|
||||||
debug!("float_lit: {:?}, {:?}", s, suffix);
|
|
||||||
// FIXME #2252: bounds checking float literals is deferred until trans
|
|
||||||
|
|
||||||
// Strip underscores without allocating a new String unless necessary.
|
|
||||||
let s2;
|
|
||||||
let s = if s.chars().any(|c| c == '_') {
|
|
||||||
s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
|
||||||
&s2
|
|
||||||
} else {
|
|
||||||
s
|
|
||||||
};
|
|
||||||
|
|
||||||
filtered_float_lit(Symbol::intern(s), suffix, diag)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
|
||||||
-> Option<ast::LitKind> {
|
|
||||||
// s can only be ascii, byte indexing is fine
|
|
||||||
|
|
||||||
// Strip underscores without allocating a new String unless necessary.
|
|
||||||
let s2;
|
|
||||||
let mut s = if s.chars().any(|c| c == '_') {
|
|
||||||
s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
|
||||||
&s2
|
|
||||||
} else {
|
|
||||||
s
|
|
||||||
};
|
|
||||||
|
|
||||||
debug!("integer_lit: {}, {:?}", s, suffix);
|
|
||||||
|
|
||||||
let mut base = 10;
|
|
||||||
let orig = s;
|
|
||||||
let mut ty = ast::LitIntType::Unsuffixed;
|
|
||||||
|
|
||||||
if s.starts_with('0') && s.len() > 1 {
|
|
||||||
match s.as_bytes()[1] {
|
|
||||||
b'x' => base = 16,
|
|
||||||
b'o' => base = 8,
|
|
||||||
b'b' => base = 2,
|
|
||||||
_ => { }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 1f64 and 2f32 etc. are valid float literals.
|
|
||||||
if let Some(suf) = suffix {
|
|
||||||
if looks_like_width_suffix(&['f'], &suf.as_str()) {
|
|
||||||
let err = match base {
|
|
||||||
16 => Some("hexadecimal float literal is not supported"),
|
|
||||||
8 => Some("octal float literal is not supported"),
|
|
||||||
2 => Some("binary float literal is not supported"),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
if let Some(err) = err {
|
|
||||||
err!(diag, |span, diag| {
|
|
||||||
diag.struct_span_err(span, err)
|
|
||||||
.span_label(span, "not supported")
|
|
||||||
.emit();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if base != 10 {
|
|
||||||
s = &s[2..];
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(suf) = suffix {
|
|
||||||
if suf.as_str().is_empty() {
|
|
||||||
err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
|
|
||||||
}
|
|
||||||
ty = match &*suf.as_str() {
|
|
||||||
"isize" => ast::LitIntType::Signed(ast::IntTy::Isize),
|
|
||||||
"i8" => ast::LitIntType::Signed(ast::IntTy::I8),
|
|
||||||
"i16" => ast::LitIntType::Signed(ast::IntTy::I16),
|
|
||||||
"i32" => ast::LitIntType::Signed(ast::IntTy::I32),
|
|
||||||
"i64" => ast::LitIntType::Signed(ast::IntTy::I64),
|
|
||||||
"i128" => ast::LitIntType::Signed(ast::IntTy::I128),
|
|
||||||
"usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize),
|
|
||||||
"u8" => ast::LitIntType::Unsigned(ast::UintTy::U8),
|
|
||||||
"u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
|
|
||||||
"u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
|
|
||||||
"u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
|
|
||||||
"u128" => ast::LitIntType::Unsigned(ast::UintTy::U128),
|
|
||||||
suf => {
|
|
||||||
// i<digits> and u<digits> look like widths, so lets
|
|
||||||
// give an error message along those lines
|
|
||||||
err!(diag, |span, diag| {
|
|
||||||
if looks_like_width_suffix(&['i', 'u'], suf) {
|
|
||||||
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
|
|
||||||
diag.struct_span_err(span, &msg)
|
|
||||||
.help("valid widths are 8, 16, 32, 64 and 128")
|
|
||||||
.emit();
|
|
||||||
} else {
|
|
||||||
let msg = format!("invalid suffix `{}` for numeric literal", suf);
|
|
||||||
diag.struct_span_err(span, &msg)
|
|
||||||
.span_label(span, format!("invalid suffix `{}`", suf))
|
|
||||||
.help("the suffix must be one of the integral types \
|
|
||||||
(`u32`, `isize`, etc)")
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
ty
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
|
|
||||||
string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
|
|
||||||
|
|
||||||
Some(match u128::from_str_radix(s, base) {
|
|
||||||
Ok(r) => ast::LitKind::Int(r, ty),
|
|
||||||
Err(_) => {
|
|
||||||
// small bases are lexed as if they were base 10, e.g, the string
|
|
||||||
// might be `0b10201`. This will cause the conversion above to fail,
|
|
||||||
// but these cases have errors in the lexer: we don't want to emit
|
|
||||||
// two errors, and we especially don't want to emit this error since
|
|
||||||
// it isn't necessarily true.
|
|
||||||
let already_errored = base < 10 &&
|
|
||||||
s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
|
|
||||||
|
|
||||||
if !already_errored {
|
|
||||||
err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
|
|
||||||
}
|
|
||||||
ast::LitKind::Int(0, ty)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A sequence separator.
|
/// A sequence separator.
|
||||||
pub struct SeqSep {
|
pub struct SeqSep {
|
||||||
/// The seperator token.
|
/// The seperator token.
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
use crate::ast::{GenericParam, GenericParamKind};
|
use crate::ast::{GenericParam, GenericParamKind};
|
||||||
use crate::ast::GenericArg;
|
use crate::ast::GenericArg;
|
||||||
use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
|
use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
|
||||||
use crate::ast::{Label, Lifetime, Lit, LitKind};
|
use crate::ast::{Label, Lifetime};
|
||||||
use crate::ast::{Local, LocalSource};
|
use crate::ast::{Local, LocalSource};
|
||||||
use crate::ast::MacStmtStyle;
|
use crate::ast::MacStmtStyle;
|
||||||
use crate::ast::{Mac, Mac_, MacDelimiter};
|
use crate::ast::{Mac, Mac_, MacDelimiter};
|
||||||
@ -35,7 +35,7 @@
|
|||||||
use crate::{ast, attr};
|
use crate::{ast, attr};
|
||||||
use crate::ext::base::DummyResult;
|
use crate::ext::base::DummyResult;
|
||||||
use crate::source_map::{self, SourceMap, Spanned, respan};
|
use crate::source_map::{self, SourceMap, Spanned, respan};
|
||||||
use crate::parse::{self, SeqSep, classify, token};
|
use crate::parse::{SeqSep, classify, literal, token};
|
||||||
use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
|
use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
|
||||||
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||||
use crate::parse::token::DelimToken;
|
use crate::parse::token::DelimToken;
|
||||||
@ -46,7 +46,7 @@
|
|||||||
use crate::parse::PResult;
|
use crate::parse::PResult;
|
||||||
use crate::ThinVec;
|
use crate::ThinVec;
|
||||||
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||||
use crate::symbol::{Symbol, keywords};
|
use crate::symbol::{keywords, Symbol};
|
||||||
|
|
||||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||||
use rustc_target::spec::abi::{self, Abi};
|
use rustc_target::spec::abi::{self, Abi};
|
||||||
@ -613,7 +613,7 @@ fn token_descr(&self) -> Option<&'static str> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn this_token_descr(&self) -> String {
|
crate fn this_token_descr(&self) -> String {
|
||||||
if let Some(prefix) = self.token_descr() {
|
if let Some(prefix) = self.token_descr() {
|
||||||
format!("{} `{}`", prefix, self.this_token_to_string())
|
format!("{} `{}`", prefix, self.this_token_to_string())
|
||||||
} else {
|
} else {
|
||||||
@ -621,11 +621,6 @@ fn this_token_descr(&self) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> {
|
|
||||||
let token_str = pprust::token_to_string(t);
|
|
||||||
Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str)))
|
|
||||||
}
|
|
||||||
|
|
||||||
crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
|
crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
|
||||||
match self.expect_one_of(&[], &[]) {
|
match self.expect_one_of(&[], &[]) {
|
||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
@ -1109,43 +1104,7 @@ fn expect_or(&mut self) -> PResult<'a, ()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
|
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
|
||||||
match suffix {
|
literal::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix)
|
||||||
None => {/* everything ok */}
|
|
||||||
Some(suf) => {
|
|
||||||
let text = suf.as_str();
|
|
||||||
if text.is_empty() {
|
|
||||||
self.span_bug(sp, "found empty literal suffix in Some")
|
|
||||||
}
|
|
||||||
let mut err = if kind == "a tuple index" &&
|
|
||||||
["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
|
|
||||||
{
|
|
||||||
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
|
||||||
// through the ecosystem when people fix their macros
|
|
||||||
let mut err = self.struct_span_warn(
|
|
||||||
sp,
|
|
||||||
&format!("suffixes on {} are invalid", kind),
|
|
||||||
);
|
|
||||||
err.note(&format!(
|
|
||||||
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
|
||||||
incorrectly accepted on stable for a few releases",
|
|
||||||
text,
|
|
||||||
));
|
|
||||||
err.help(
|
|
||||||
"on proc macros, you'll want to use `syn::Index::from` or \
|
|
||||||
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
|
||||||
to tuple field access",
|
|
||||||
);
|
|
||||||
err.note(
|
|
||||||
"for more context, see https://github.com/rust-lang/rust/issues/60210",
|
|
||||||
);
|
|
||||||
err
|
|
||||||
} else {
|
|
||||||
self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
|
|
||||||
};
|
|
||||||
err.span_label(sp, format!("invalid suffix `{}`", text));
|
|
||||||
err.emit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
|
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
|
||||||
@ -1423,7 +1382,7 @@ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn look_ahead_span(&self, dist: usize) -> Span {
|
crate fn look_ahead_span(&self, dist: usize) -> Span {
|
||||||
if dist == 0 {
|
if dist == 0 {
|
||||||
return self.span
|
return self.span
|
||||||
}
|
}
|
||||||
@ -1452,9 +1411,6 @@ fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
|
|||||||
crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
||||||
self.sess.span_diagnostic.struct_span_err(sp, m)
|
self.sess.span_diagnostic.struct_span_err(sp, m)
|
||||||
}
|
}
|
||||||
fn struct_span_warn<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
|
||||||
self.sess.span_diagnostic.struct_span_warn(sp, m)
|
|
||||||
}
|
|
||||||
crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
|
crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
|
||||||
self.sess.span_diagnostic.span_bug(sp, m)
|
self.sess.span_diagnostic.span_bug(sp, m)
|
||||||
}
|
}
|
||||||
@ -2069,88 +2025,6 @@ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `token_lit = LIT_INTEGER | ...`.
|
|
||||||
fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
|
|
||||||
let out = match self.token {
|
|
||||||
token::Interpolated(ref nt) => match **nt {
|
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
|
||||||
ExprKind::Lit(ref lit) => { lit.node.clone() }
|
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
|
||||||
},
|
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
|
||||||
},
|
|
||||||
token::Literal(lit, suf) => {
|
|
||||||
let diag = Some((self.span, &self.sess.span_diagnostic));
|
|
||||||
let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
|
|
||||||
|
|
||||||
if suffix_illegal {
|
|
||||||
let sp = self.span;
|
|
||||||
self.expect_no_suffix(sp, &format!("a {}", lit.literal_name()), suf)
|
|
||||||
}
|
|
||||||
|
|
||||||
result.unwrap()
|
|
||||||
}
|
|
||||||
token::Dot if self.look_ahead(1, |t| match t {
|
|
||||||
token::Literal(parse::token::Lit::Integer(_) , _) => true,
|
|
||||||
_ => false,
|
|
||||||
}) => { // recover from `let x = .4;`
|
|
||||||
let lo = self.span;
|
|
||||||
self.bump();
|
|
||||||
if let token::Literal(
|
|
||||||
parse::token::Lit::Integer(val),
|
|
||||||
suffix,
|
|
||||||
) = self.token {
|
|
||||||
let suffix = suffix.and_then(|s| {
|
|
||||||
let s = s.as_str();
|
|
||||||
if s == "f32" {
|
|
||||||
Some("f32")
|
|
||||||
} else if s == "f64" {
|
|
||||||
Some("f64")
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}).unwrap_or("");
|
|
||||||
self.bump();
|
|
||||||
let sp = lo.to(self.prev_span);
|
|
||||||
let mut err = self.diagnostic()
|
|
||||||
.struct_span_err(sp, "float literals must have an integer part");
|
|
||||||
err.span_suggestion(
|
|
||||||
sp,
|
|
||||||
"must have an integer part",
|
|
||||||
format!("0.{}{}", val, suffix),
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
err.emit();
|
|
||||||
return Ok(match suffix {
|
|
||||||
"f32" => ast::LitKind::Float(val, ast::FloatTy::F32),
|
|
||||||
"f64" => ast::LitKind::Float(val, ast::FloatTy::F64),
|
|
||||||
_ => ast::LitKind::FloatUnsuffixed(val),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
unreachable!();
|
|
||||||
};
|
|
||||||
}
|
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
|
||||||
};
|
|
||||||
|
|
||||||
self.bump();
|
|
||||||
Ok(out)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Matches `lit = true | false | token_lit`.
|
|
||||||
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
|
||||||
let lo = self.span;
|
|
||||||
let lit = if self.eat_keyword(keywords::True) {
|
|
||||||
LitKind::Bool(true)
|
|
||||||
} else if self.eat_keyword(keywords::False) {
|
|
||||||
LitKind::Bool(false)
|
|
||||||
} else {
|
|
||||||
let lit = self.parse_lit_token()?;
|
|
||||||
lit
|
|
||||||
};
|
|
||||||
Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
||||||
crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
|
crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
maybe_whole_expr!(self);
|
maybe_whole_expr!(self);
|
||||||
|
@ -61,6 +61,7 @@ pub fn is_empty(self) -> bool {
|
|||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
pub enum Lit {
|
pub enum Lit {
|
||||||
|
Bool(ast::Name), // AST only, must never appear in a `Token`
|
||||||
Byte(ast::Name),
|
Byte(ast::Name),
|
||||||
Char(ast::Name),
|
Char(ast::Name),
|
||||||
Err(ast::Name),
|
Err(ast::Name),
|
||||||
@ -72,9 +73,13 @@ pub enum Lit {
|
|||||||
ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
|
ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_arch = "x86_64")]
|
||||||
|
static_assert!(MEM_SIZE_OF_LIT: mem::size_of::<Lit>() == 8);
|
||||||
|
|
||||||
impl Lit {
|
impl Lit {
|
||||||
crate fn literal_name(&self) -> &'static str {
|
crate fn literal_name(&self) -> &'static str {
|
||||||
match *self {
|
match *self {
|
||||||
|
Bool(_) => panic!("literal token contains `Lit::Bool`"),
|
||||||
Byte(_) => "byte literal",
|
Byte(_) => "byte literal",
|
||||||
Char(_) => "char literal",
|
Char(_) => "char literal",
|
||||||
Err(_) => "invalid literal",
|
Err(_) => "invalid literal",
|
||||||
@ -85,6 +90,13 @@ impl Lit {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
crate fn may_have_suffix(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Integer(..) | Float(..) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||||
// *probably* equal here rather than actual equality
|
// *probably* equal here rather than actual equality
|
||||||
fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
|
fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
|
||||||
|
@ -20,10 +20,8 @@
|
|||||||
use syntax_pos::{self, BytePos};
|
use syntax_pos::{self, BytePos};
|
||||||
use syntax_pos::{DUMMY_SP, FileName};
|
use syntax_pos::{DUMMY_SP, FileName};
|
||||||
|
|
||||||
use std::ascii;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::io::{self, Write, Read};
|
use std::io::{self, Write, Read};
|
||||||
use std::iter::Peekable;
|
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
|
||||||
pub enum AnnNode<'a> {
|
pub enum AnnNode<'a> {
|
||||||
@ -49,8 +47,7 @@ impl PpAnn for NoAnn {}
|
|||||||
pub struct State<'a> {
|
pub struct State<'a> {
|
||||||
pub s: pp::Printer<'a>,
|
pub s: pp::Printer<'a>,
|
||||||
cm: Option<&'a SourceMap>,
|
cm: Option<&'a SourceMap>,
|
||||||
comments: Option<Vec<comments::Comment> >,
|
comments: Option<Vec<comments::Comment>>,
|
||||||
literals: Peekable<vec::IntoIter<comments::Literal>>,
|
|
||||||
cur_cmnt: usize,
|
cur_cmnt: usize,
|
||||||
boxes: Vec<pp::Breaks>,
|
boxes: Vec<pp::Breaks>,
|
||||||
ann: &'a (dyn PpAnn+'a),
|
ann: &'a (dyn PpAnn+'a),
|
||||||
@ -62,7 +59,6 @@ fn rust_printer<'a>(writer: Box<dyn Write+'a>, ann: &'a dyn PpAnn) -> State<'a>
|
|||||||
s: pp::mk_printer(writer, DEFAULT_COLUMNS),
|
s: pp::mk_printer(writer, DEFAULT_COLUMNS),
|
||||||
cm: None,
|
cm: None,
|
||||||
comments: None,
|
comments: None,
|
||||||
literals: vec![].into_iter().peekable(),
|
|
||||||
cur_cmnt: 0,
|
cur_cmnt: 0,
|
||||||
boxes: Vec::new(),
|
boxes: Vec::new(),
|
||||||
ann,
|
ann,
|
||||||
@ -75,8 +71,7 @@ fn rust_printer<'a>(writer: Box<dyn Write+'a>, ann: &'a dyn PpAnn) -> State<'a>
|
|||||||
pub const DEFAULT_COLUMNS: usize = 78;
|
pub const DEFAULT_COLUMNS: usize = 78;
|
||||||
|
|
||||||
/// Requires you to pass an input filename and reader so that
|
/// Requires you to pass an input filename and reader so that
|
||||||
/// it can scan the input text for comments and literals to
|
/// it can scan the input text for comments to copy forward.
|
||||||
/// copy forward.
|
|
||||||
pub fn print_crate<'a>(cm: &'a SourceMap,
|
pub fn print_crate<'a>(cm: &'a SourceMap,
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
krate: &ast::Crate,
|
krate: &ast::Crate,
|
||||||
@ -118,36 +113,23 @@ pub fn new_from_input(cm: &'a SourceMap,
|
|||||||
out: Box<dyn Write+'a>,
|
out: Box<dyn Write+'a>,
|
||||||
ann: &'a dyn PpAnn,
|
ann: &'a dyn PpAnn,
|
||||||
is_expanded: bool) -> State<'a> {
|
is_expanded: bool) -> State<'a> {
|
||||||
let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input);
|
let comments = comments::gather_comments(sess, filename, input);
|
||||||
|
State::new(cm, out, ann, Some(comments), is_expanded)
|
||||||
State::new(
|
|
||||||
cm,
|
|
||||||
out,
|
|
||||||
ann,
|
|
||||||
Some(cmnts),
|
|
||||||
// If the code is post expansion, don't use the table of
|
|
||||||
// literals, since it doesn't correspond with the literals
|
|
||||||
// in the AST anymore.
|
|
||||||
if is_expanded { None } else { Some(lits) },
|
|
||||||
is_expanded
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(cm: &'a SourceMap,
|
pub fn new(cm: &'a SourceMap,
|
||||||
out: Box<dyn Write+'a>,
|
out: Box<dyn Write+'a>,
|
||||||
ann: &'a dyn PpAnn,
|
ann: &'a dyn PpAnn,
|
||||||
comments: Option<Vec<comments::Comment>>,
|
comments: Option<Vec<comments::Comment>>,
|
||||||
literals: Option<Vec<comments::Literal>>,
|
|
||||||
is_expanded: bool) -> State<'a> {
|
is_expanded: bool) -> State<'a> {
|
||||||
State {
|
State {
|
||||||
s: pp::mk_printer(out, DEFAULT_COLUMNS),
|
s: pp::mk_printer(out, DEFAULT_COLUMNS),
|
||||||
cm: Some(cm),
|
cm: Some(cm),
|
||||||
comments,
|
comments,
|
||||||
literals: literals.unwrap_or_default().into_iter().peekable(),
|
|
||||||
cur_cmnt: 0,
|
cur_cmnt: 0,
|
||||||
boxes: Vec::new(),
|
boxes: Vec::new(),
|
||||||
ann,
|
ann,
|
||||||
is_expanded: is_expanded
|
is_expanded,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -180,6 +162,31 @@ fn binop_to_string(op: BinOpToken) -> &'static str {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn literal_to_string(lit: token::Lit, suffix: Option<ast::Name>) -> String {
|
||||||
|
let mut out = match lit {
|
||||||
|
token::Byte(b) => format!("b'{}'", b),
|
||||||
|
token::Char(c) => format!("'{}'", c),
|
||||||
|
token::Err(c) => format!("'{}'", c),
|
||||||
|
token::Bool(c) |
|
||||||
|
token::Float(c) |
|
||||||
|
token::Integer(c) => c.to_string(),
|
||||||
|
token::Str_(s) => format!("\"{}\"", s),
|
||||||
|
token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
|
||||||
|
delim="#".repeat(n as usize),
|
||||||
|
string=s),
|
||||||
|
token::ByteStr(v) => format!("b\"{}\"", v),
|
||||||
|
token::ByteStrRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
|
||||||
|
delim="#".repeat(n as usize),
|
||||||
|
string=s),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(suffix) = suffix {
|
||||||
|
out.push_str(&suffix.as_str())
|
||||||
|
}
|
||||||
|
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
pub fn token_to_string(tok: &Token) -> String {
|
pub fn token_to_string(tok: &Token) -> String {
|
||||||
match *tok {
|
match *tok {
|
||||||
token::Eq => "=".to_string(),
|
token::Eq => "=".to_string(),
|
||||||
@ -223,29 +230,7 @@ pub fn token_to_string(tok: &Token) -> String {
|
|||||||
token::SingleQuote => "'".to_string(),
|
token::SingleQuote => "'".to_string(),
|
||||||
|
|
||||||
/* Literals */
|
/* Literals */
|
||||||
token::Literal(lit, suf) => {
|
token::Literal(lit, suf) => literal_to_string(lit, suf),
|
||||||
let mut out = match lit {
|
|
||||||
token::Byte(b) => format!("b'{}'", b),
|
|
||||||
token::Char(c) => format!("'{}'", c),
|
|
||||||
token::Err(c) => format!("'{}'", c),
|
|
||||||
token::Float(c) |
|
|
||||||
token::Integer(c) => c.to_string(),
|
|
||||||
token::Str_(s) => format!("\"{}\"", s),
|
|
||||||
token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
|
|
||||||
delim="#".repeat(n as usize),
|
|
||||||
string=s),
|
|
||||||
token::ByteStr(v) => format!("b\"{}\"", v),
|
|
||||||
token::ByteStrRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
|
|
||||||
delim="#".repeat(n as usize),
|
|
||||||
string=s),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(s) = suf {
|
|
||||||
out.push_str(&s.as_str())
|
|
||||||
}
|
|
||||||
|
|
||||||
out
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Name components */
|
/* Name components */
|
||||||
token::Ident(s, false) => s.to_string(),
|
token::Ident(s, false) => s.to_string(),
|
||||||
@ -438,8 +423,6 @@ pub trait PrintState<'a> {
|
|||||||
fn boxes(&mut self) -> &mut Vec<pp::Breaks>;
|
fn boxes(&mut self) -> &mut Vec<pp::Breaks>;
|
||||||
fn comments(&mut self) -> &mut Option<Vec<comments::Comment>>;
|
fn comments(&mut self) -> &mut Option<Vec<comments::Comment>>;
|
||||||
fn cur_cmnt(&mut self) -> &mut usize;
|
fn cur_cmnt(&mut self) -> &mut usize;
|
||||||
fn cur_lit(&mut self) -> Option<&comments::Literal>;
|
|
||||||
fn bump_lit(&mut self) -> Option<comments::Literal>;
|
|
||||||
|
|
||||||
fn word_space<S: Into<Cow<'static, str>>>(&mut self, w: S) -> io::Result<()> {
|
fn word_space<S: Into<Cow<'static, str>>>(&mut self, w: S) -> io::Result<()> {
|
||||||
self.writer().word(w)?;
|
self.writer().word(w)?;
|
||||||
@ -504,21 +487,6 @@ fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> io::Result<()>
|
|||||||
self.end()
|
self.end()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> {
|
|
||||||
while let Some(ltrl) = self.cur_lit().cloned() {
|
|
||||||
if ltrl.pos > pos { break; }
|
|
||||||
|
|
||||||
// we don't need the value here since we're forced to clone cur_lit
|
|
||||||
// due to lack of NLL.
|
|
||||||
self.bump_lit();
|
|
||||||
if ltrl.pos == pos {
|
|
||||||
return Some(ltrl);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> {
|
fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> {
|
||||||
while let Some(ref cmnt) = self.next_comment() {
|
while let Some(ref cmnt) = self.next_comment() {
|
||||||
if cmnt.pos < pos {
|
if cmnt.pos < pos {
|
||||||
@ -606,60 +574,7 @@ fn next_comment(&mut self) -> Option<comments::Comment> {
|
|||||||
|
|
||||||
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
|
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
|
||||||
self.maybe_print_comment(lit.span.lo())?;
|
self.maybe_print_comment(lit.span.lo())?;
|
||||||
if let Some(ltrl) = self.next_lit(lit.span.lo()) {
|
self.writer().word(literal_to_string(lit.token, lit.suffix))
|
||||||
return self.writer().word(ltrl.lit.clone());
|
|
||||||
}
|
|
||||||
match lit.node {
|
|
||||||
ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
|
|
||||||
ast::LitKind::Err(st) => {
|
|
||||||
let st = st.as_str().escape_debug().to_string();
|
|
||||||
let mut res = String::with_capacity(st.len() + 2);
|
|
||||||
res.push('\'');
|
|
||||||
res.push_str(&st);
|
|
||||||
res.push('\'');
|
|
||||||
self.writer().word(res)
|
|
||||||
}
|
|
||||||
ast::LitKind::Byte(byte) => {
|
|
||||||
let mut res = String::from("b'");
|
|
||||||
res.extend(ascii::escape_default(byte).map(|c| c as char));
|
|
||||||
res.push('\'');
|
|
||||||
self.writer().word(res)
|
|
||||||
}
|
|
||||||
ast::LitKind::Char(ch) => {
|
|
||||||
let mut res = String::from("'");
|
|
||||||
res.extend(ch.escape_default());
|
|
||||||
res.push('\'');
|
|
||||||
self.writer().word(res)
|
|
||||||
}
|
|
||||||
ast::LitKind::Int(i, t) => {
|
|
||||||
match t {
|
|
||||||
ast::LitIntType::Signed(st) => {
|
|
||||||
self.writer().word(st.val_to_string(i as i128))
|
|
||||||
}
|
|
||||||
ast::LitIntType::Unsigned(ut) => {
|
|
||||||
self.writer().word(ut.val_to_string(i))
|
|
||||||
}
|
|
||||||
ast::LitIntType::Unsuffixed => {
|
|
||||||
self.writer().word(i.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ast::LitKind::Float(ref f, t) => {
|
|
||||||
self.writer().word(format!("{}{}", &f, t.ty_to_string()))
|
|
||||||
}
|
|
||||||
ast::LitKind::FloatUnsuffixed(ref f) => self.writer().word(f.as_str().to_string()),
|
|
||||||
ast::LitKind::Bool(val) => {
|
|
||||||
if val { self.writer().word("true") } else { self.writer().word("false") }
|
|
||||||
}
|
|
||||||
ast::LitKind::ByteStr(ref v) => {
|
|
||||||
let mut escaped: String = String::new();
|
|
||||||
for &ch in v.iter() {
|
|
||||||
escaped.extend(ascii::escape_default(ch)
|
|
||||||
.map(|c| c as char));
|
|
||||||
}
|
|
||||||
self.writer().word(format!("b\"{}\"", escaped))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_string(&mut self, st: &str,
|
fn print_string(&mut self, st: &str,
|
||||||
@ -880,14 +795,6 @@ fn comments(&mut self) -> &mut Option<Vec<comments::Comment>> {
|
|||||||
fn cur_cmnt(&mut self) -> &mut usize {
|
fn cur_cmnt(&mut self) -> &mut usize {
|
||||||
&mut self.cur_cmnt
|
&mut self.cur_cmnt
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cur_lit(&mut self) -> Option<&comments::Literal> {
|
|
||||||
self.literals.peek()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bump_lit(&mut self) -> Option<comments::Literal> {
|
|
||||||
self.literals.next()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> State<'a> {
|
impl<'a> State<'a> {
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
error: unexpected token: `!`
|
error: unexpected token: `!`
|
||||||
--> $DIR/attr-eq-token-tree.rs:3:11
|
--> $DIR/attr-eq-token-tree.rs:3:13
|
||||||
|
|
|
|
||||||
LL | #[my_attr = !]
|
LL | #[my_attr = !]
|
||||||
| ^
|
| ^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
error: unexpected token: `,`
|
error: unexpected token: `,`
|
||||||
--> $DIR/exclusive_range_pattern_syntax_collision.rs:5:15
|
--> $DIR/exclusive_range_pattern_syntax_collision.rs:5:17
|
||||||
|
|
|
|
||||||
LL | [_, 99.., _] => {},
|
LL | [_, 99.., _] => {},
|
||||||
| ^^
|
| ^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
error: unexpected token: `]`
|
error: unexpected token: `]`
|
||||||
--> $DIR/exclusive_range_pattern_syntax_collision2.rs:5:15
|
--> $DIR/exclusive_range_pattern_syntax_collision2.rs:5:17
|
||||||
|
|
|
|
||||||
LL | [_, 99..] => {},
|
LL | [_, 99..] => {},
|
||||||
| ^^
|
| ^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
error: unexpected token: `$`
|
error: unexpected token: `$`
|
||||||
--> $DIR/macro-attribute.rs:1:7
|
--> $DIR/macro-attribute.rs:1:9
|
||||||
|
|
|
|
||||||
LL | #[doc = $not_there]
|
LL | #[doc = $not_there]
|
||||||
| ^
|
| ^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
|
|
||||||
macro_rules! check {
|
macro_rules! check {
|
||||||
($expr: expr) => (
|
($expr: expr) => (
|
||||||
#[my_attr = $expr] //~ ERROR suffixed literals are not allowed in attributes
|
#[my_attr = $expr] //~ ERROR unexpected token: `-0`
|
||||||
//~| ERROR unexpected token: `-0`
|
|
||||||
//~| ERROR unexpected token: `0 + 0`
|
//~| ERROR unexpected token: `0 + 0`
|
||||||
use main as _;
|
use main as _;
|
||||||
);
|
);
|
||||||
@ -11,7 +10,7 @@ macro_rules! check {
|
|||||||
|
|
||||||
check!("0"); // OK
|
check!("0"); // OK
|
||||||
check!(0); // OK
|
check!(0); // OK
|
||||||
check!(0u8); // ERROR, see above
|
check!(0u8); //~ ERROR suffixed literals are not allowed in attributes
|
||||||
check!(-0); // ERROR, see above
|
check!(-0); // ERROR, see above
|
||||||
check!(0 + 0); // ERROR, see above
|
check!(0 + 0); // ERROR, see above
|
||||||
|
|
||||||
|
@ -1,28 +1,25 @@
|
|||||||
error: suffixed literals are not allowed in attributes
|
error: suffixed literals are not allowed in attributes
|
||||||
|
--> $DIR/malformed-interpolated.rs:13:8
|
||||||
|
|
|
||||||
|
LL | check!(0u8);
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
|
= help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
|
||||||
|
|
||||||
|
error: unexpected token: `-0`
|
||||||
--> $DIR/malformed-interpolated.rs:5:21
|
--> $DIR/malformed-interpolated.rs:5:21
|
||||||
|
|
|
|
||||||
LL | #[my_attr = $expr]
|
LL | #[my_attr = $expr]
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
...
|
...
|
||||||
LL | check!(0u8); // ERROR, see above
|
|
||||||
| ------------ in this macro invocation
|
|
||||||
|
|
|
||||||
= help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
|
|
||||||
|
|
||||||
error: unexpected token: `-0`
|
|
||||||
--> $DIR/malformed-interpolated.rs:5:19
|
|
||||||
|
|
|
||||||
LL | #[my_attr = $expr]
|
|
||||||
| ^
|
|
||||||
...
|
|
||||||
LL | check!(-0); // ERROR, see above
|
LL | check!(-0); // ERROR, see above
|
||||||
| ----------- in this macro invocation
|
| ----------- in this macro invocation
|
||||||
|
|
||||||
error: unexpected token: `0 + 0`
|
error: unexpected token: `0 + 0`
|
||||||
--> $DIR/malformed-interpolated.rs:5:19
|
--> $DIR/malformed-interpolated.rs:5:21
|
||||||
|
|
|
|
||||||
LL | #[my_attr = $expr]
|
LL | #[my_attr = $expr]
|
||||||
| ^
|
| ^^^^^
|
||||||
...
|
...
|
||||||
LL | check!(0 + 0); // ERROR, see above
|
LL | check!(0 + 0); // ERROR, see above
|
||||||
| -------------- in this macro invocation
|
| -------------- in this macro invocation
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
error: unexpected token: `]`
|
error: unexpected token: `]`
|
||||||
--> $DIR/attr-bad-meta-2.rs:1:8
|
--> $DIR/attr-bad-meta-2.rs:1:9
|
||||||
|
|
|
|
||||||
LL | #[path =]
|
LL | #[path =]
|
||||||
| ^
|
| ^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
error: unexpected token: `)`
|
error: unexpected token: `)`
|
||||||
--> $DIR/pat-tuple-5.rs:3:14
|
--> $DIR/pat-tuple-5.rs:3:16
|
||||||
|
|
|
|
||||||
LL | (pat ..) => {}
|
LL | (pat ..) => {}
|
||||||
| ^^
|
| ^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user