From 88de2e111504439f76315548dd3e442999b46e95 Mon Sep 17 00:00:00 2001 From: yukang Date: Wed, 22 Feb 2023 06:09:57 +0000 Subject: [PATCH] no need to return unmatched_delims from tokentrees --- compiler/rustc_expand/src/tests.rs | 1 - compiler/rustc_parse/src/lib.rs | 16 ++++++---------- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs index 14918d3c190..480d95b77e9 100644 --- a/compiler/rustc_expand/src/tests.rs +++ b/compiler/rustc_expand/src/tests.rs @@ -43,7 +43,6 @@ pub(crate) fn string_to_stream(source_str: String) -> TokenStream { ps.source_map().new_source_file(PathBuf::from("bogofile").into(), source_str), None, ) - .0 } /// Parses a string, returns a crate. diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index b6e54b53e26..eede0991010 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -30,7 +30,7 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] pub mod parser; -use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser}; +use parser::{make_unclosed_delims_error, Parser}; pub mod lexer; pub mod validate_attr; @@ -96,10 +96,7 @@ pub fn parse_stream_from_source_str( sess: &ParseSess, override_span: Option, ) -> TokenStream { - let (stream, mut errors) = - source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span); - emit_unclosed_delims(&mut errors, &sess); - stream + source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } /// Creates a new parser from a source string. @@ -135,9 +132,8 @@ fn maybe_source_file_to_parser( source_file: Lrc, ) -> Result, Vec> { let end_pos = source_file.end_pos; - let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; + let stream = maybe_file_to_stream(sess, source_file, None)?; let mut parser = stream_to_parser(sess, stream, None); - parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof { parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None); } @@ -182,7 +178,7 @@ pub fn source_file_to_stream( sess: &ParseSess, source_file: Lrc, override_span: Option, -) -> (TokenStream, Vec) { +) -> TokenStream { panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } @@ -192,7 +188,7 @@ pub fn maybe_file_to_stream( sess: &ParseSess, source_file: Lrc, override_span: Option, -) -> Result<(TokenStream, Vec), Vec> { +) -> Result> { let src = source_file.src.as_ref().unwrap_or_else(|| { sess.span_diagnostic.bug(&format!( "cannot lex `source_file` without source: {}", @@ -204,7 +200,7 @@ pub fn maybe_file_to_stream( lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span); match token_trees { - Ok(stream) if unmatched_delims.is_empty() => Ok((stream, unmatched_delims)), + Ok(stream) if unmatched_delims.is_empty() => Ok(stream), _ => { // Return error if there are unmatched delimiters or unclosng delimiters. // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch