diff options
author | yukang <moorekang@gmail.com> | 2023-02-22 06:09:57 +0000 |
---|---|---|
committer | yukang <moorekang@gmail.com> | 2023-02-28 07:57:17 +0000 |
commit | 88de2e111504439f76315548dd3e442999b46e95 (patch) | |
tree | be975670d4d641d28ee45969c941664b276754f1 /compiler | |
parent | 9ce7472db46f62bbc328dbe9e627d4a85a11913c (diff) | |
download | rust-88de2e111504439f76315548dd3e442999b46e95.tar.gz |
no need to return unmatched_delims from tokentrees
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/rustc_expand/src/tests.rs | 1 | ||||
-rw-r--r-- | compiler/rustc_parse/src/lib.rs | 16 |
2 files changed, 6 insertions, 11 deletions
diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs index 14918d3c190..480d95b77e9 100644 --- a/compiler/rustc_expand/src/tests.rs +++ b/compiler/rustc_expand/src/tests.rs @@ -43,7 +43,6 @@ pub(crate) fn string_to_stream(source_str: String) -> TokenStream { ps.source_map().new_source_file(PathBuf::from("bogofile").into(), source_str), None, ) - .0 } /// Parses a string, returns a crate. diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index b6e54b53e26..eede0991010 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -30,7 +30,7 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] pub mod parser; -use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser}; +use parser::{make_unclosed_delims_error, Parser}; pub mod lexer; pub mod validate_attr; @@ -96,10 +96,7 @@ pub fn parse_stream_from_source_str( sess: &ParseSess, override_span: Option<Span>, ) -> TokenStream { - let (stream, mut errors) = - source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span); - emit_unclosed_delims(&mut errors, &sess); - stream + source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } /// Creates a new parser from a source string. @@ -135,9 +132,8 @@ fn maybe_source_file_to_parser( source_file: Lrc<SourceFile>, ) -> Result<Parser<'_>, Vec<Diagnostic>> { let end_pos = source_file.end_pos; - let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; + let stream = maybe_file_to_stream(sess, source_file, None)?; let mut parser = stream_to_parser(sess, stream, None); - parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof { parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None); } @@ -182,7 +178,7 @@ pub fn source_file_to_stream( sess: &ParseSess, source_file: Lrc<SourceFile>, override_span: Option<Span>, -) -> (TokenStream, Vec<lexer::UnmatchedDelim>) { +) -> TokenStream { panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } @@ -192,7 +188,7 @@ pub fn maybe_file_to_stream( sess: &ParseSess, source_file: Lrc<SourceFile>, override_span: Option<Span>, -) -> Result<(TokenStream, Vec<lexer::UnmatchedDelim>), Vec<Diagnostic>> { +) -> Result<TokenStream, Vec<Diagnostic>> { let src = source_file.src.as_ref().unwrap_or_else(|| { sess.span_diagnostic.bug(&format!( "cannot lex `source_file` without source: {}", @@ -204,7 +200,7 @@ pub fn maybe_file_to_stream( lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span); match token_trees { - Ok(stream) if unmatched_delims.is_empty() => Ok((stream, unmatched_delims)), + Ok(stream) if unmatched_delims.is_empty() => Ok(stream), _ => { // Return error if there are unmatched delimiters or unclosng delimiters. // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch |