From b2605c118de31db9d61a9f7d0d158b9bf35f7a4a Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Mon, 24 Feb 2020 13:04:13 +0300 Subject: [PATCH] parser: `token` -> `normalized_token`, `nonnormalized_token` -> `token` --- src/librustc_builtin_macros/format.rs | 2 +- src/librustc_expand/mbe/macro_parser.rs | 3 +- src/librustc_parse/parser/diagnostics.rs | 6 +-- src/librustc_parse/parser/expr.rs | 39 +++++++-------- src/librustc_parse/parser/item.rs | 14 +++--- src/librustc_parse/parser/mod.rs | 48 +++++++++---------- src/librustc_parse/parser/path.rs | 7 ++- src/librustc_parse/parser/ty.rs | 7 ++- .../ui/borrowck/move-error-snippets.stderr | 20 ++++---- .../directory_ownership/macro-expanded-mod.rs | 3 +- .../macro-expanded-mod.stderr | 9 +++- src/test/ui/hygiene/fields-definition.stderr | 4 +- src/test/ui/issues/issue-39848.rs | 3 +- src/test/ui/issues/issue-39848.stderr | 12 +++-- 14 files changed, 88 insertions(+), 89 deletions(-) diff --git a/src/librustc_builtin_macros/format.rs b/src/librustc_builtin_macros/format.rs index a9298abe2d759..072c987a5230e 100644 --- a/src/librustc_builtin_macros/format.rs +++ b/src/librustc_builtin_macros/format.rs @@ -158,7 +158,7 @@ fn parse_args<'a>( } // accept trailing commas if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) { named = true; - let name = if let token::Ident(name, _) = p.token.kind { + let name = if let token::Ident(name, _) = p.normalized_token.kind { p.bump(); name } else { diff --git a/src/librustc_expand/mbe/macro_parser.rs b/src/librustc_expand/mbe/macro_parser.rs index b0bbed6fabfdc..2a53d600c5bcf 100644 --- a/src/librustc_expand/mbe/macro_parser.rs +++ b/src/librustc_expand/mbe/macro_parser.rs @@ -889,9 +889,8 @@ fn parse_nt_inner<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> PResult<'a, // this could be handled like a token, since it is one sym::ident => { if let Some((name, is_raw)) = get_macro_name(&p.token) { - let span = p.token.span; p.bump(); - token::NtIdent(Ident::new(name, span), is_raw) + token::NtIdent(Ident::new(name, p.normalized_prev_token.span), is_raw) } else { let token_str = pprust::token_to_string(&p.token); let msg = &format!("expected ident, found {}", &token_str); diff --git a/src/librustc_parse/parser/diagnostics.rs b/src/librustc_parse/parser/diagnostics.rs index 09b47df2b19ba..00f5fb9705286 100644 --- a/src/librustc_parse/parser/diagnostics.rs +++ b/src/librustc_parse/parser/diagnostics.rs @@ -192,12 +192,12 @@ impl<'a> Parser<'a> { TokenKind::CloseDelim(token::DelimToken::Brace), TokenKind::CloseDelim(token::DelimToken::Paren), ]; - if let token::Ident(name, false) = self.token.kind { - if Ident::new(name, self.token.span).is_raw_guess() + if let token::Ident(name, false) = self.normalized_token.kind { + if Ident::new(name, self.normalized_token.span).is_raw_guess() && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) { err.span_suggestion( - self.token.span, + self.normalized_token.span, "you can escape reserved keywords to use them as identifiers", format!("r#{}", name), Applicability::MaybeIncorrect, diff --git a/src/librustc_parse/parser/expr.rs b/src/librustc_parse/parser/expr.rs index 3ae97ed5f8822..859e53127b7b0 100644 --- a/src/librustc_parse/parser/expr.rs +++ b/src/librustc_parse/parser/expr.rs @@ -97,15 +97,14 @@ impl<'a> Parser<'a> { fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P> { match self.parse_expr() { Ok(expr) => Ok(expr), - Err(mut err) => match self.token.kind { + Err(mut err) => match self.normalized_token.kind { token::Ident(name, false) if name == kw::Underscore && self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` err.emit(); - let sp = self.token.span; self.bump(); - Ok(self.mk_expr(sp, ExprKind::Err, AttrVec::new())) + Ok(self.mk_expr(self.prev_token.span, ExprKind::Err, AttrVec::new())) } _ => Err(err), }, @@ -166,7 +165,7 @@ impl<'a> Parser<'a> { while let Some(op) = self.check_assoc_op() { // Adjust the span for interpolated LHS to point to the `$lhs` token // and not to what it refers to. - let lhs_span = match self.unnormalized_prev_token.kind { + let lhs_span = match self.prev_token.kind { TokenKind::Interpolated(..) => self.prev_span, _ => lhs.span, }; @@ -337,7 +336,7 @@ impl<'a> Parser<'a> { /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively. fn check_assoc_op(&self) -> Option> { Some(Spanned { - node: match (AssocOp::from_token(&self.token), &self.token.kind) { + node: match (AssocOp::from_token(&self.token), &self.normalized_token.kind) { (Some(op), _) => op, (None, token::Ident(sym::and, false)) => { self.error_bad_logical_op("and", "&&", "conjunction"); @@ -349,7 +348,7 @@ impl<'a> Parser<'a> { } _ => return None, }, - span: self.token.span, + span: self.normalized_token.span, }) } @@ -441,7 +440,7 @@ impl<'a> Parser<'a> { let attrs = self.parse_or_use_outer_attributes(attrs)?; let lo = self.token.span; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() - let (hi, ex) = match self.token.kind { + let (hi, ex) = match self.normalized_token.kind { token::Not => self.parse_unary_expr(lo, UnOp::Not), // `!expr` token::Tilde => self.recover_tilde_expr(lo), // `~expr` token::BinOp(token::Minus) => self.parse_unary_expr(lo, UnOp::Neg), // `-expr` @@ -527,7 +526,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, (Span, P)> { expr.map(|e| { ( - match self.unnormalized_prev_token.kind { + match self.prev_token.kind { TokenKind::Interpolated(..) => self.prev_span, _ => e.span, }, @@ -708,7 +707,7 @@ impl<'a> Parser<'a> { } fn parse_dot_suffix_expr(&mut self, lo: Span, base: P) -> PResult<'a, P> { - match self.token.kind { + match self.normalized_token.kind { token::Ident(..) => self.parse_dot_suffix(base, lo), token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix)) @@ -777,8 +776,8 @@ impl<'a> Parser<'a> { field: Symbol, suffix: Option, ) -> P { - let span = self.token.span; self.bump(); + let span = self.prev_token.span; let field = ExprKind::Field(base, Ident::new(field, span)); self.expect_no_suffix(span, "a tuple index", suffix); self.mk_expr(lo.to(span), field, AttrVec::new()) @@ -802,7 +801,7 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P, lo: Span) -> PResult<'a, P> { - if self.token.span.rust_2018() && self.eat_keyword(kw::Await) { + if self.normalized_token.span.rust_2018() && self.eat_keyword(kw::Await) { return self.mk_await_expr(self_arg, lo); } @@ -916,7 +915,7 @@ impl<'a> Parser<'a> { // | ^ expected expression self.bump(); Ok(self.mk_expr_err(self.token.span)) - } else if self.token.span.rust_2018() { + } else if self.normalized_token.span.rust_2018() { // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly. if self.check_keyword(kw::Async) { if self.is_async_block() { @@ -1346,7 +1345,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable }; let asyncness = - if self.token.span.rust_2018() { self.parse_asyncness() } else { Async::No }; + if self.normalized_token.span.rust_2018() { self.parse_asyncness() } else { Async::No }; if asyncness.is_async() { // Feature-gate `async ||` closures. self.sess.gated_spans.gate(sym::async_closure, self.prev_span); @@ -1560,9 +1559,8 @@ impl<'a> Parser<'a> { fn eat_label(&mut self) -> Option