Rollup merge of #84717 - dtolnay:literalfromstr, r=petrochenkov
impl FromStr for proc_macro::Literal Note that unlike `impl FromStr for proc_macro::TokenStream`, this impl does not permit whitespace or comments. The input string must consist of nothing but your literal. - `"1".parse::<Literal>()` ⟶ ok - `"1.0".parse::<Literal>()` ⟶ ok - `"'a'".parse::<Literal>()` ⟶ ok - `"\"\n\"".parse::<Literal>()` ⟶ ok - `"0 1".parse::<Literal>()` ⟶ LexError - `" 0".parse::<Literal>()` ⟶ LexError - `"0 ".parse::<Literal>()` ⟶ LexError - `"/* comment */0".parse::<Literal>()` ⟶ LexError - `"0/* comment */".parse::<Literal>()` ⟶ LexError - `"0// comment".parse::<Literal>()` ⟶ LexError --- ## Use case ```rust let hex_int: Literal = format!("0x{:x}", int).parse().unwrap(); ``` The only way this is expressible in the current API is significantly worse. ```rust let hex_int = match format!("0x{:x}", int) .parse::<TokenStream>() .unwrap() .into_iter() .next() .unwrap() { TokenTree::Literal(literal) => literal, _ => unreachable!(), }; ```
This commit is contained in:
commit
a1ac372894
@ -1,9 +1,7 @@
|
||||
use crate::base::{ExtCtxt, ResolverExpand};
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::token::Nonterminal;
|
||||
use rustc_ast::token::NtIdent;
|
||||
use rustc_ast::token::{self, Nonterminal, NtIdent, TokenKind};
|
||||
use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens};
|
||||
use rustc_ast::tokenstream::{DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -541,6 +539,33 @@ impl server::Ident for Rustc<'_> {
|
||||
}
|
||||
|
||||
impl server::Literal for Rustc<'_> {
|
||||
fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
|
||||
let override_span = None;
|
||||
let stream = parse_stream_from_source_str(
|
||||
FileName::proc_macro_source_code(s),
|
||||
s.to_owned(),
|
||||
self.sess,
|
||||
override_span,
|
||||
);
|
||||
if stream.len() != 1 {
|
||||
return Err(());
|
||||
}
|
||||
let tree = stream.into_trees().next().unwrap();
|
||||
let token = match tree {
|
||||
tokenstream::TokenTree::Token(token) => token,
|
||||
tokenstream::TokenTree::Delimited { .. } => return Err(()),
|
||||
};
|
||||
let span_data = token.span.data();
|
||||
if (span_data.hi.0 - span_data.lo.0) as usize != s.len() {
|
||||
// There is a comment or whitespace adjacent to the literal.
|
||||
return Err(());
|
||||
}
|
||||
let lit = match token.kind {
|
||||
TokenKind::Literal(lit) => lit,
|
||||
_ => return Err(()),
|
||||
};
|
||||
Ok(Literal { lit, span: self.call_site })
|
||||
}
|
||||
fn debug_kind(&mut self, literal: &Self::Literal) -> String {
|
||||
format!("{:?}", literal.lit.kind)
|
||||
}
|
||||
|
@ -107,6 +107,7 @@ macro_rules! with_api {
|
||||
Literal {
|
||||
fn drop($self: $S::Literal);
|
||||
fn clone($self: &$S::Literal) -> $S::Literal;
|
||||
fn from_str(s: &str) -> Result<$S::Literal, ()>;
|
||||
fn debug_kind($self: &$S::Literal) -> String;
|
||||
fn symbol($self: &$S::Literal) -> String;
|
||||
fn suffix($self: &$S::Literal) -> Option<String>;
|
||||
@ -315,6 +316,19 @@ impl<T: Unmark> Unmark for Option<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Mark, E: Mark> Mark for Result<T, E> {
|
||||
type Unmarked = Result<T::Unmarked, E::Unmarked>;
|
||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
||||
unmarked.map(T::mark).map_err(E::mark)
|
||||
}
|
||||
}
|
||||
impl<T: Unmark, E: Unmark> Unmark for Result<T, E> {
|
||||
type Unmarked = Result<T::Unmarked, E::Unmarked>;
|
||||
fn unmark(self) -> Self::Unmarked {
|
||||
self.map(T::unmark).map_err(E::unmark)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! mark_noop {
|
||||
($($ty:ty),* $(,)?) => {
|
||||
$(
|
||||
|
@ -91,6 +91,12 @@ pub struct LexError {
|
||||
_inner: (),
|
||||
}
|
||||
|
||||
impl LexError {
|
||||
fn new() -> Self {
|
||||
LexError { _inner: () }
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "proc_macro_lexerror_impls", since = "1.44.0")]
|
||||
impl fmt::Display for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
@ -1171,6 +1177,28 @@ impl Literal {
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a single literal from its stringified representation.
|
||||
///
|
||||
/// In order to parse successfully, the input string must not contain anything
|
||||
/// but the literal token. Specifically, it must not contain whitespace or
|
||||
/// comments in addition to the literal.
|
||||
///
|
||||
/// The resulting literal token will have a `Span::call_site()` span.
|
||||
///
|
||||
/// NOTE: some errors may cause panics instead of returning `LexError`. We
|
||||
/// reserve the right to change these errors into `LexError`s later.
|
||||
#[stable(feature = "proc_macro_literal_parse", since = "1.54.0")]
|
||||
impl FromStr for Literal {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<Self, LexError> {
|
||||
match bridge::client::Literal::from_str(src) {
|
||||
Ok(literal) => Ok(Literal(literal)),
|
||||
Err(()) => Err(LexError::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// N.B., the bridge only provides `to_string`, implement `fmt::Display`
|
||||
// based on it (the reverse of the usual relationship between the two).
|
||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||
|
@ -1,8 +1,10 @@
|
||||
#![feature(proc_macro_span)]
|
||||
use proc_macro::{LineColumn, Punct, Spacing};
|
||||
|
||||
use proc_macro::{LineColumn, Punct};
|
||||
pub fn test() {
|
||||
test_line_column_ord();
|
||||
test_punct_eq();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_line_column_ord() {
|
||||
let line0_column0 = LineColumn { line: 0, column: 0 };
|
||||
let line0_column1 = LineColumn { line: 0, column: 1 };
|
||||
@ -11,10 +13,9 @@ fn test_line_column_ord() {
|
||||
assert!(line0_column1 < line1_column0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_punct_eq() {
|
||||
// Good enough if it typechecks, since proc_macro::Punct can't exist in a test.
|
||||
fn _check(punct: Punct) {
|
||||
let _ = punct == ':';
|
||||
}
|
||||
let colon_alone = Punct::new(':', Spacing::Alone);
|
||||
assert_eq!(colon_alone, ':');
|
||||
let colon_joint = Punct::new(':', Spacing::Joint);
|
||||
assert_eq!(colon_joint, ':');
|
||||
}
|
24
src/test/ui/proc-macro/auxiliary/api/mod.rs
Normal file
24
src/test/ui/proc-macro/auxiliary/api/mod.rs
Normal file
@ -0,0 +1,24 @@
|
||||
// force-host
|
||||
// no-prefer-dynamic
|
||||
|
||||
#![crate_type = "proc-macro"]
|
||||
#![crate_name = "proc_macro_api_tests"]
|
||||
#![feature(proc_macro_span)]
|
||||
#![deny(dead_code)] // catch if a test function is never called
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
mod cmp;
|
||||
mod parse;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
#[proc_macro]
|
||||
pub fn run(input: TokenStream) -> TokenStream {
|
||||
assert!(input.is_empty());
|
||||
|
||||
cmp::test();
|
||||
parse::test();
|
||||
|
||||
TokenStream::new()
|
||||
}
|
23
src/test/ui/proc-macro/auxiliary/api/parse.rs
Normal file
23
src/test/ui/proc-macro/auxiliary/api/parse.rs
Normal file
@ -0,0 +1,23 @@
|
||||
use proc_macro::Literal;
|
||||
|
||||
pub fn test() {
|
||||
test_parse_literal();
|
||||
}
|
||||
|
||||
fn test_parse_literal() {
|
||||
assert_eq!("1".parse::<Literal>().unwrap().to_string(), "1");
|
||||
assert_eq!("1.0".parse::<Literal>().unwrap().to_string(), "1.0");
|
||||
assert_eq!("'a'".parse::<Literal>().unwrap().to_string(), "'a'");
|
||||
assert_eq!("\"\n\"".parse::<Literal>().unwrap().to_string(), "\"\n\"");
|
||||
assert_eq!("b\"\"".parse::<Literal>().unwrap().to_string(), "b\"\"");
|
||||
assert_eq!("r##\"\"##".parse::<Literal>().unwrap().to_string(), "r##\"\"##");
|
||||
assert_eq!("10ulong".parse::<Literal>().unwrap().to_string(), "10ulong");
|
||||
|
||||
assert!("0 1".parse::<Literal>().is_err());
|
||||
assert!("'a".parse::<Literal>().is_err());
|
||||
assert!(" 0".parse::<Literal>().is_err());
|
||||
assert!("0 ".parse::<Literal>().is_err());
|
||||
assert!("/* comment */0".parse::<Literal>().is_err());
|
||||
assert!("0/* comment */".parse::<Literal>().is_err());
|
||||
assert!("0// comment".parse::<Literal>().is_err());
|
||||
}
|
12
src/test/ui/proc-macro/test.rs
Normal file
12
src/test/ui/proc-macro/test.rs
Normal file
@ -0,0 +1,12 @@
|
||||
// check-pass
|
||||
// aux-build:api/mod.rs
|
||||
|
||||
//! This is for everything that *would* be a #[test] inside of libproc_macro,
|
||||
//! except for the fact that proc_macro objects are not capable of existing
|
||||
//! inside of an ordinary Rust test execution, only inside a macro.
|
||||
|
||||
extern crate proc_macro_api_tests;
|
||||
|
||||
proc_macro_api_tests::run!();
|
||||
|
||||
fn main() {}
|
Loading…
x
Reference in New Issue
Block a user