Auto merge of #14580 - lowr:patch/parse-more-unstable-pat, r=Veykril

Parse more exclusive range patterns and inline const patterns

Closes #13955

This PR
- implements exclusive range pattern without start bound (tracking issue: rust-lang/rust#37854)
  - additionally moves rest pattern handling into the same place since they only differ in whether another pattern follows; this actually solves some FIXMEs
- updates `PATTERN_FIRST` token set to include `const` token so we can parse inline const pattern in nested patterns
This commit is contained in:
bors 2023-04-15 20:11:30 +00:00
commit c28f538568
7 changed files with 253 additions and 38 deletions

View File

@ -106,7 +106,6 @@ macro_rules! m {
#[test] #[test]
fn range_patterns() { fn range_patterns() {
// FIXME: rustc thinks there are three patterns here, not one.
check( check(
r#" r#"
macro_rules! m { macro_rules! m {
@ -118,7 +117,7 @@ macro_rules! m {
macro_rules! m { macro_rules! m {
($($p:pat)*) => (stringify!($($p |)*);) ($($p:pat)*) => (stringify!($($p |)*);)
} }
stringify!(.. .. .. |); stringify!(.. | .. | .. |);
"#]], "#]],
); );
} }

View File

@ -5,6 +5,7 @@
T![box], T![box],
T![ref], T![ref],
T![mut], T![mut],
T![const],
T!['('], T!['('],
T!['['], T!['['],
T![&], T![&],
@ -15,6 +16,10 @@
const PAT_TOP_FIRST: TokenSet = PATTERN_FIRST.union(TokenSet::new(&[T![|]])); const PAT_TOP_FIRST: TokenSet = PATTERN_FIRST.union(TokenSet::new(&[T![|]]));
/// Set of possible tokens at the start of a range pattern's end bound.
const RANGE_PAT_END_FIRST: TokenSet =
expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[T![-], T![const]]));
pub(crate) fn pattern(p: &mut Parser<'_>) { pub(crate) fn pattern(p: &mut Parser<'_>) {
pattern_r(p, PAT_RECOVERY_SET); pattern_r(p, PAT_RECOVERY_SET);
} }
@ -105,6 +110,52 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
return; return;
} }
// test exclusive_range_pat
// fn main() {
// match 42 {
// ..0 => {}
// 1..2 => {}
// }
// }
// test dot_dot_pat
// fn main() {
// let .. = ();
// //
// // Tuples
// //
// let (a, ..) = ();
// let (a, ..,) = ();
// let Tuple(a, ..) = ();
// let Tuple(a, ..,) = ();
// let (.., ..) = ();
// let Tuple(.., ..) = ();
// let (.., a, ..) = ();
// let Tuple(.., a, ..) = ();
// //
// // Slices
// //
// let [..] = ();
// let [head, ..] = ();
// let [head, tail @ ..] = ();
// let [head, .., cons] = ();
// let [head, mid @ .., cons] = ();
// let [head, .., .., cons] = ();
// let [head, .., mid, tail @ ..] = ();
// let [head, .., mid, .., cons] = ();
// }
if p.at(T![..]) {
let m = p.start();
p.bump(T![..]);
if p.at_ts(RANGE_PAT_END_FIRST) {
atom_pat(p, recovery_set);
m.complete(p, RANGE_PAT);
} else {
m.complete(p, REST_PAT);
}
return;
}
if let Some(lhs) = atom_pat(p, recovery_set) { if let Some(lhs) = atom_pat(p, recovery_set) {
for range_op in [T![...], T![..=], T![..]] { for range_op in [T![...], T![..=], T![..]] {
if p.at(range_op) { if p.at(range_op) {
@ -173,7 +224,6 @@ fn atom_pat(p: &mut Parser<'_>, recovery_set: TokenSet) -> Option<CompletedMarke
_ if paths::is_path_start(p) => path_or_macro_pat(p), _ if paths::is_path_start(p) => path_or_macro_pat(p),
_ if is_literal_pat_start(p) => literal_pat(p), _ if is_literal_pat_start(p) => literal_pat(p),
T![.] if p.at(T![..]) => rest_pat(p),
T![_] => wildcard_pat(p), T![_] => wildcard_pat(p),
T![&] => ref_pat(p), T![&] => ref_pat(p),
T!['('] => tuple_pat(p), T!['('] => tuple_pat(p),
@ -334,39 +384,6 @@ fn wildcard_pat(p: &mut Parser<'_>) -> CompletedMarker {
m.complete(p, WILDCARD_PAT) m.complete(p, WILDCARD_PAT)
} }
// test dot_dot_pat
// fn main() {
// let .. = ();
// //
// // Tuples
// //
// let (a, ..) = ();
// let (a, ..,) = ();
// let Tuple(a, ..) = ();
// let Tuple(a, ..,) = ();
// let (.., ..) = ();
// let Tuple(.., ..) = ();
// let (.., a, ..) = ();
// let Tuple(.., a, ..) = ();
// //
// // Slices
// //
// let [..] = ();
// let [head, ..] = ();
// let [head, tail @ ..] = ();
// let [head, .., cons] = ();
// let [head, mid @ .., cons] = ();
// let [head, .., .., cons] = ();
// let [head, .., mid, tail @ ..] = ();
// let [head, .., mid, .., cons] = ();
// }
fn rest_pat(p: &mut Parser<'_>) -> CompletedMarker {
assert!(p.at(T![..]));
let m = p.start();
p.bump(T![..]);
m.complete(p, REST_PAT)
}
// test ref_pat // test ref_pat
// fn main() { // fn main() {
// let &a = (); // let &a = ();
@ -483,6 +500,14 @@ fn box_pat(p: &mut Parser<'_>) -> CompletedMarker {
// fn main() { // fn main() {
// let const { 15 } = (); // let const { 15 } = ();
// let const { foo(); bar() } = (); // let const { foo(); bar() } = ();
//
// match 42 {
// const { 0 } .. const { 1 } => (),
// .. const { 0 } => (),
// const { 2 } .. => (),
// }
//
// let (const { () },) = ();
// } // }
fn const_block_pat(p: &mut Parser<'_>) -> CompletedMarker { fn const_block_pat(p: &mut Parser<'_>) -> CompletedMarker {
assert!(p.at(T![const])); assert!(p.at(T![const]));

View File

@ -33,8 +33,7 @@ fn stmt() {
fn pat() { fn pat() {
check(PrefixEntryPoint::Pat, "x y", "x"); check(PrefixEntryPoint::Pat, "x y", "x");
check(PrefixEntryPoint::Pat, "fn f() {}", "fn"); check(PrefixEntryPoint::Pat, "fn f() {}", "fn");
// FIXME: This one is wrong, we should consume only one pattern. check(PrefixEntryPoint::Pat, ".. ..", "..");
check(PrefixEntryPoint::Pat, ".. ..", ".. ..");
} }
#[test] #[test]

View File

@ -74,6 +74,126 @@ SOURCE_FILE
L_PAREN "(" L_PAREN "("
R_PAREN ")" R_PAREN ")"
SEMICOLON ";" SEMICOLON ";"
WHITESPACE "\n\n "
EXPR_STMT
MATCH_EXPR
MATCH_KW "match"
WHITESPACE " "
LITERAL
INT_NUMBER "42"
WHITESPACE " "
MATCH_ARM_LIST
L_CURLY "{"
WHITESPACE "\n "
MATCH_ARM
RANGE_PAT
CONST_BLOCK_PAT
CONST_KW "const"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
LITERAL
INT_NUMBER "0"
WHITESPACE " "
R_CURLY "}"
WHITESPACE " "
DOT2 ".."
WHITESPACE " "
CONST_BLOCK_PAT
CONST_KW "const"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
LITERAL
INT_NUMBER "1"
WHITESPACE " "
R_CURLY "}"
WHITESPACE " "
FAT_ARROW "=>"
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
COMMA ","
WHITESPACE "\n "
MATCH_ARM
RANGE_PAT
DOT2 ".."
WHITESPACE " "
CONST_BLOCK_PAT
CONST_KW "const"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
LITERAL
INT_NUMBER "0"
WHITESPACE " "
R_CURLY "}"
WHITESPACE " "
FAT_ARROW "=>"
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
COMMA ","
WHITESPACE "\n "
MATCH_ARM
RANGE_PAT
CONST_BLOCK_PAT
CONST_KW "const"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
LITERAL
INT_NUMBER "2"
WHITESPACE " "
R_CURLY "}"
WHITESPACE " "
DOT2 ".."
WHITESPACE " "
FAT_ARROW "=>"
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
COMMA ","
WHITESPACE "\n "
R_CURLY "}"
WHITESPACE "\n\n "
LET_STMT
LET_KW "let"
WHITESPACE " "
TUPLE_PAT
L_PAREN "("
CONST_BLOCK_PAT
CONST_KW "const"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
R_CURLY "}"
COMMA ","
R_PAREN ")"
WHITESPACE " "
EQ "="
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
SEMICOLON ";"
WHITESPACE "\n" WHITESPACE "\n"
R_CURLY "}" R_CURLY "}"
WHITESPACE "\n" WHITESPACE "\n"

View File

@ -1,4 +1,12 @@
fn main() { fn main() {
let const { 15 } = (); let const { 15 } = ();
let const { foo(); bar() } = (); let const { foo(); bar() } = ();
match 42 {
const { 0 } .. const { 1 } => (),
.. const { 0 } => (),
const { 2 } .. => (),
}
let (const { () },) = ();
} }

View File

@ -0,0 +1,58 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "main"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE "\n "
MATCH_EXPR
MATCH_KW "match"
WHITESPACE " "
LITERAL
INT_NUMBER "42"
WHITESPACE " "
MATCH_ARM_LIST
L_CURLY "{"
WHITESPACE "\n "
MATCH_ARM
RANGE_PAT
DOT2 ".."
LITERAL_PAT
LITERAL
INT_NUMBER "0"
WHITESPACE " "
FAT_ARROW "=>"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n "
MATCH_ARM
RANGE_PAT
LITERAL_PAT
LITERAL
INT_NUMBER "1"
DOT2 ".."
LITERAL_PAT
LITERAL
INT_NUMBER "2"
WHITESPACE " "
FAT_ARROW "=>"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n "
R_CURLY "}"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"

View File

@ -0,0 +1,6 @@
fn main() {
match 42 {
..0 => {}
1..2 => {}
}
}