Merge pull request #18417 from ChayimFriedman2/hash-string

fix: Correctly handle `#""` in edition <2024
This commit is contained in:
Lukas Wirth 2024-10-27 09:28:47 +00:00 committed by GitHub
commit 59dd6422cc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 30 additions and 7 deletions

View File

@ -39,7 +39,9 @@ pub fn new(edition: Edition, text: &'a str) -> LexedStr<'a> {
conv.offset = shebang_len; conv.offset = shebang_len;
}; };
for token in rustc_lexer::tokenize(&text[conv.offset..]) { // Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer
// but we want to split it to two in edition <2024.
while let Some(token) = rustc_lexer::tokenize(&text[conv.offset..]).next() {
let token_text = &text[conv.offset..][..token.len as usize]; let token_text = &text[conv.offset..][..token.len as usize];
conv.extend_token(&token.kind, token_text); conv.extend_token(&token.kind, token_text);
@ -158,7 +160,7 @@ fn push(&mut self, kind: SyntaxKind, len: usize, err: Option<&str>) {
} }
} }
fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, token_text: &str) { fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, mut token_text: &str) {
// A note on an intended tradeoff: // A note on an intended tradeoff:
// We drop some useful information here (see patterns with double dots `..`) // We drop some useful information here (see patterns with double dots `..`)
// Storing that info in `SyntaxKind` is not possible due to its layout requirements of // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
@ -189,10 +191,15 @@ fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, token_text: &str) {
rustc_lexer::TokenKind::RawIdent => IDENT, rustc_lexer::TokenKind::RawIdent => IDENT,
rustc_lexer::TokenKind::GuardedStrPrefix if self.edition.at_least_2024() => { rustc_lexer::TokenKind::GuardedStrPrefix if self.edition.at_least_2024() => {
// FIXME: rustc does something better for recovery.
err = "Invalid string literal (reserved syntax)"; err = "Invalid string literal (reserved syntax)";
ERROR ERROR
} }
rustc_lexer::TokenKind::GuardedStrPrefix => POUND, rustc_lexer::TokenKind::GuardedStrPrefix => {
// The token is `#"` or `##`, split it into two.
token_text = &token_text[1..];
POUND
}
rustc_lexer::TokenKind::Literal { kind, .. } => { rustc_lexer::TokenKind::Literal { kind, .. } => {
self.extend_literal(token_text.len(), kind); self.extend_literal(token_text.len(), kind);

View File

@ -15,11 +15,20 @@
#[path = "../test_data/generated/runner.rs"] #[path = "../test_data/generated/runner.rs"]
mod runner; mod runner;
fn infer_edition(file_path: &Path) -> Edition {
let file_content = std::fs::read_to_string(file_path).unwrap();
if let Some(edition) = file_content.strip_prefix("//@ edition: ") {
edition[..4].parse().expect("invalid edition directive")
} else {
Edition::CURRENT
}
}
#[test] #[test]
fn lex_ok() { fn lex_ok() {
for case in TestCase::list("lexer/ok") { for case in TestCase::list("lexer/ok") {
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
let actual = lex(&case.text); let actual = lex(&case.text, infer_edition(&case.rs));
expect_file![case.rast].assert_eq(&actual) expect_file![case.rast].assert_eq(&actual)
} }
} }
@ -28,13 +37,13 @@ fn lex_ok() {
fn lex_err() { fn lex_err() {
for case in TestCase::list("lexer/err") { for case in TestCase::list("lexer/err") {
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
let actual = lex(&case.text); let actual = lex(&case.text, infer_edition(&case.rs));
expect_file![case.rast].assert_eq(&actual) expect_file![case.rast].assert_eq(&actual)
} }
} }
fn lex(text: &str) -> String { fn lex(text: &str, edition: Edition) -> String {
let lexed = LexedStr::new(Edition::CURRENT, text); let lexed = LexedStr::new(edition, text);
let mut res = String::new(); let mut res = String::new();
for i in 0..lexed.len() { for i in 0..lexed.len() {

View File

@ -0,0 +1,4 @@
COMMENT "//@ edition: 2021"
WHITESPACE "\n\n"
POUND "#"
STRING "\"foo\""

View File

@ -0,0 +1,3 @@
//@ edition: 2021
#"foo"