diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 034442b798b29..8099b8a24658d 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -1,22 +1,19 @@ use rustc_ast::ast::AttrStyle; use rustc_ast::token::{self, CommentKind, Token, TokenKind}; -use rustc_ast::tokenstream::IsJoint; -use rustc_data_structures::sync::Lrc; -use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError}; -use rustc_lexer::Base; -use rustc_lexer::{unescape, RawStrError}; +use rustc_ast::tokenstream::{IsJoint, TokenStream}; +use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult}; +use rustc_lexer::unescape::{self, Mode}; +use rustc_lexer::{Base, DocStyle, RawStrError}; use rustc_session::parse::ParseSess; use rustc_span::symbol::{sym, Symbol}; use rustc_span::{BytePos, Pos, Span}; -use std::char; use tracing::debug; mod tokentrees; mod unescape_error_reporting; mod unicode_chars; -use rustc_lexer::{unescape::Mode, DocStyle}; use unescape_error_reporting::{emit_unescape_error, push_escaped_char}; #[derive(Clone, Debug)] @@ -28,7 +25,17 @@ pub struct UnmatchedBrace { pub candidate_span: Option, } -crate struct StringReader<'a> { +crate fn parse_token_trees<'a>( + sess: &'a ParseSess, + src: &'a str, + start_pos: BytePos, + override_span: Option, +) -> (PResult<'a, TokenStream>, Vec) { + StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span } + .into_token_trees() +} + +struct StringReader<'a> { sess: &'a ParseSess, /// Initial position, read-only. start_pos: BytePos, @@ -37,31 +44,11 @@ crate struct StringReader<'a> { /// Stop reading src at this index. end_src_index: usize, /// Source text to tokenize. - src: Lrc, + src: &'a str, override_span: Option, } impl<'a> StringReader<'a> { - crate fn new( - sess: &'a ParseSess, - source_file: Lrc, - override_span: Option, - ) -> Self { - let src = source_file.src.clone().unwrap_or_else(|| { - sess.span_diagnostic - .bug(&format!("cannot lex `source_file` without source: {}", source_file.name)); - }); - - StringReader { - sess, - start_pos: source_file.start_pos, - pos: source_file.start_pos, - end_src_index: src.len(), - src, - override_span, - } - } - fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi)) } diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index d5977ca3c7d2f..357b10ab89d41 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -12,7 +12,7 @@ use rustc_errors::PResult; use rustc_span::Span; impl<'a> StringReader<'a> { - crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { + pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { let mut tt_reader = TokenTreesReader { string_reader: self, token: Token::dummy(), diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index 8dc0db01ecb51..40e2e34aa0589 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -332,7 +332,7 @@ const ASCII_ARRAY: &[(char, &str, Option)] = &[ ('"', "Quotation Mark", None), ]; -crate fn check_for_substitution<'a>( +pub(super) fn check_for_substitution<'a>( reader: &StringReader<'a>, pos: BytePos, ch: char, diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 462279b0a9e03..b804e8a825f37 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -200,8 +200,13 @@ pub fn maybe_file_to_stream( source_file: Lrc, override_span: Option, ) -> Result<(TokenStream, Vec), Vec> { - let srdr = lexer::StringReader::new(sess, source_file, override_span); - let (token_trees, unmatched_braces) = srdr.into_token_trees(); + let src = source_file.src.as_ref().unwrap_or_else(|| { + sess.span_diagnostic + .bug(&format!("cannot lex `source_file` without source: {}", source_file.name)); + }); + + let (token_trees, unmatched_braces) = + lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span); match token_trees { Ok(stream) => Ok((stream, unmatched_braces)),