Skip to content

Speed up Parser::expected_tokens #133793

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Dec 19, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
112 changes: 56 additions & 56 deletions compiler/rustc_builtin_macros/src/asm.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
use ast::token::IdentIsRaw;
use lint::BuiltinLintDiag;
use rustc_ast::AsmMacro;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{AsmMacro, token};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_errors::PResult;
use rustc_expand::base::*;
use rustc_index::bit_set::GrowableBitSet;
use rustc_parse::parser::Parser;
use rustc_parse::exp;
use rustc_parse::parser::{ExpKeywordPair, Parser};
use rustc_session::lint;
use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw, sym};
use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw};
use rustc_target::asm::InlineAsmArch;
use smallvec::smallvec;
use {rustc_ast as ast, rustc_parse_format as parse};
@@ -38,16 +38,16 @@ pub struct AsmArgs {
/// - `Err(_)` if the current token matches the keyword, but was not expected
fn eat_operand_keyword<'a>(
p: &mut Parser<'a>,
symbol: Symbol,
exp: ExpKeywordPair,
asm_macro: AsmMacro,
) -> PResult<'a, bool> {
if matches!(asm_macro, AsmMacro::Asm) {
Ok(p.eat_keyword(symbol))
Ok(p.eat_keyword(exp))
} else {
let span = p.token.span;
if p.eat_keyword_noexpect(symbol) {
if p.eat_keyword_noexpect(exp.kw) {
// in gets printed as `r#in` otherwise
let symbol = if symbol == kw::In { "in" } else { symbol.as_str() };
let symbol = if exp.kw == kw::In { "in" } else { exp.kw.as_str() };
Err(p.dcx().create_err(errors::AsmUnsupportedOperand {
span,
symbol,
@@ -95,28 +95,28 @@ pub fn parse_asm_args<'a>(

let mut allow_templates = true;
while p.token != token::Eof {
if !p.eat(&token::Comma) {
if !p.eat(exp!(Comma)) {
if allow_templates {
// After a template string, we always expect *only* a comma...
return Err(dcx.create_err(errors::AsmExpectedComma { span: p.token.span }));
} else {
// ...after that delegate to `expect` to also include the other expected tokens.
return Err(p.expect(&token::Comma).err().unwrap());
return Err(p.expect(exp!(Comma)).err().unwrap());
}
}
if p.token == token::Eof {
break;
} // accept trailing commas

// Parse clobber_abi
if p.eat_keyword(sym::clobber_abi) {
if p.eat_keyword(exp!(ClobberAbi)) {
parse_clobber_abi(p, &mut args)?;
allow_templates = false;
continue;
}

// Parse options
if p.eat_keyword(sym::options) {
if p.eat_keyword(exp!(Options)) {
parse_options(p, &mut args, asm_macro)?;
allow_templates = false;
continue;
@@ -128,65 +128,65 @@ pub fn parse_asm_args<'a>(
let name = if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) {
let (ident, _) = p.token.ident().unwrap();
p.bump();
p.expect(&token::Eq)?;
p.expect(exp!(Eq))?;
allow_templates = false;
Some(ident.name)
} else {
None
};

let mut explicit_reg = false;
let op = if eat_operand_keyword(p, kw::In, asm_macro)? {
let op = if eat_operand_keyword(p, exp!(In), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
if p.eat_keyword(kw::Underscore) {
if p.eat_keyword(exp!(Underscore)) {
let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
return Err(err);
}
let expr = p.parse_expr()?;
ast::InlineAsmOperand::In { reg, expr }
} else if eat_operand_keyword(p, sym::out, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Out), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: false }
} else if eat_operand_keyword(p, sym::lateout, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Lateout), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: true }
} else if eat_operand_keyword(p, sym::inout, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Inout), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
if p.eat_keyword(kw::Underscore) {
if p.eat_keyword(exp!(Underscore)) {
let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
return Err(err);
}
let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) {
if p.eat(exp!(FatArrow)) {
let out_expr =
if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: false }
} else {
ast::InlineAsmOperand::InOut { reg, expr, late: false }
}
} else if eat_operand_keyword(p, sym::inlateout, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Inlateout), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
if p.eat_keyword(kw::Underscore) {
if p.eat_keyword(exp!(Underscore)) {
let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
return Err(err);
}
let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) {
if p.eat(exp!(FatArrow)) {
let out_expr =
if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: true }
} else {
ast::InlineAsmOperand::InOut { reg, expr, late: true }
}
} else if eat_operand_keyword(p, sym::label, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Label), asm_macro)? {
let block = p.parse_block()?;
ast::InlineAsmOperand::Label { block }
} else if p.eat_keyword(kw::Const) {
} else if p.eat_keyword(exp!(Const)) {
let anon_const = p.parse_expr_anon_const()?;
ast::InlineAsmOperand::Const { anon_const }
} else if p.eat_keyword(sym::sym) {
} else if p.eat_keyword(exp!(Sym)) {
let expr = p.parse_expr()?;
let ast::ExprKind::Path(qself, path) = &expr.kind else {
let err = dcx.create_err(errors::AsmSymNoPath { span: expr.span });
@@ -389,31 +389,31 @@ fn parse_options<'a>(
) -> PResult<'a, ()> {
let span_start = p.prev_token.span;

p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;

while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
const OPTIONS: [(Symbol, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [
(sym::pure, ast::InlineAsmOptions::PURE),
(sym::nomem, ast::InlineAsmOptions::NOMEM),
(sym::readonly, ast::InlineAsmOptions::READONLY),
(sym::preserves_flags, ast::InlineAsmOptions::PRESERVES_FLAGS),
(sym::noreturn, ast::InlineAsmOptions::NORETURN),
(sym::nostack, ast::InlineAsmOptions::NOSTACK),
(sym::may_unwind, ast::InlineAsmOptions::MAY_UNWIND),
(sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX),
(kw::Raw, ast::InlineAsmOptions::RAW),
p.expect(exp!(OpenParen))?;

while !p.eat(exp!(CloseParen)) {
const OPTIONS: [(ExpKeywordPair, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [
(exp!(Pure), ast::InlineAsmOptions::PURE),
(exp!(Nomem), ast::InlineAsmOptions::NOMEM),
(exp!(Readonly), ast::InlineAsmOptions::READONLY),
(exp!(PreservesFlags), ast::InlineAsmOptions::PRESERVES_FLAGS),
(exp!(Noreturn), ast::InlineAsmOptions::NORETURN),
(exp!(Nostack), ast::InlineAsmOptions::NOSTACK),
(exp!(MayUnwind), ast::InlineAsmOptions::MAY_UNWIND),
(exp!(AttSyntax), ast::InlineAsmOptions::ATT_SYNTAX),
(exp!(Raw), ast::InlineAsmOptions::RAW),
];

'blk: {
for (symbol, option) in OPTIONS {
for (exp, option) in OPTIONS {
let kw_matched = if asm_macro.is_supported_option(option) {
p.eat_keyword(symbol)
p.eat_keyword(exp)
} else {
p.eat_keyword_noexpect(symbol)
p.eat_keyword_noexpect(exp.kw)
};

if kw_matched {
try_set_option(p, args, asm_macro, symbol, option);
try_set_option(p, args, asm_macro, exp.kw, option);
break 'blk;
}
}
@@ -422,10 +422,10 @@ fn parse_options<'a>(
}

// Allow trailing commas
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
if p.eat(exp!(CloseParen)) {
break;
}
p.expect(&token::Comma)?;
p.expect(exp!(Comma))?;
}

let new_span = span_start.to(p.prev_token.span);
@@ -437,14 +437,14 @@ fn parse_options<'a>(
fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, ()> {
let span_start = p.prev_token.span;

p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
p.expect(exp!(OpenParen))?;

if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
if p.eat(exp!(CloseParen)) {
return Err(p.dcx().create_err(errors::NonABI { span: p.token.span }));
}

let mut new_abis = Vec::new();
while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
while !p.eat(exp!(CloseParen)) {
match p.parse_str_lit() {
Ok(str_lit) => {
new_abis.push((str_lit.symbol_unescaped, str_lit.span));
@@ -456,10 +456,10 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a,
};

// Allow trailing commas
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
if p.eat(exp!(CloseParen)) {
break;
}
p.expect(&token::Comma)?;
p.expect(exp!(Comma))?;
}

let full_span = span_start.to(p.prev_token.span);
@@ -482,7 +482,7 @@ fn parse_reg<'a>(
p: &mut Parser<'a>,
explicit_reg: &mut bool,
) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
p.expect(exp!(OpenParen))?;
let result = match p.token.uninterpolate().kind {
token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
@@ -496,7 +496,7 @@ fn parse_reg<'a>(
}
};
p.bump();
p.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
p.expect(exp!(CloseParen))?;
Ok(result)
}

3 changes: 2 additions & 1 deletion compiler/rustc_builtin_macros/src/assert.rs
Original file line number Diff line number Diff line change
@@ -7,6 +7,7 @@ use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, Path, PathSegment, UnOp, tok
use rustc_ast_pretty::pprust;
use rustc_errors::PResult;
use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult};
use rustc_parse::exp;
use rustc_parse::parser::Parser;
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym};
use thin_vec::thin_vec;
@@ -143,7 +144,7 @@ fn parse_assert<'a>(cx: &ExtCtxt<'a>, sp: Span, stream: TokenStream) -> PResult<
cx.dcx().emit_err(errors::AssertMissingComma { span: parser.token.span, comma });

parse_custom_message(&mut parser)
} else if parser.eat(&token::Comma) {
} else if parser.eat(exp!(Comma)) {
parse_custom_message(&mut parser)
} else {
None
5 changes: 3 additions & 2 deletions compiler/rustc_builtin_macros/src/cfg.rs
Original file line number Diff line number Diff line change
@@ -6,6 +6,7 @@ use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_errors::PResult;
use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult};
use rustc_parse::exp;
use rustc_span::Span;
use {rustc_ast as ast, rustc_attr_parsing as attr};

@@ -48,9 +49,9 @@ fn parse_cfg<'a>(

let cfg = p.parse_meta_item_inner()?;

let _ = p.eat(&token::Comma);
let _ = p.eat(exp!(Comma));

if !p.eat(&token::Eof) {
if !p.eat(exp!(Eof)) {
return Err(cx.dcx().create_err(errors::OneCfgPattern { span }));
}

9 changes: 5 additions & 4 deletions compiler/rustc_builtin_macros/src/format.rs
Original file line number Diff line number Diff line change
@@ -12,6 +12,7 @@ use rustc_errors::{Applicability, Diag, MultiSpan, PResult, SingleLabelManySpans
use rustc_expand::base::*;
use rustc_lint_defs::builtin::NAMED_ARGUMENTS_USED_POSITIONALLY;
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiag, LintId};
use rustc_parse::exp;
use rustc_parse_format as parse;
use rustc_span::{BytePos, ErrorGuaranteed, Ident, InnerSpan, Span, Symbol};

@@ -93,12 +94,12 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a,
let mut first = true;

while p.token != token::Eof {
if !p.eat(&token::Comma) {
if !p.eat(exp!(Comma)) {
if first {
p.clear_expected_tokens();
p.clear_expected_token_types();
}

match p.expect(&token::Comma) {
match p.expect(exp!(Comma)) {
Err(err) => {
match token::TokenKind::Comma.similar_tokens() {
Some(tks) if tks.contains(&p.token.kind) => {
@@ -122,7 +123,7 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a,
match p.token.ident() {
Some((ident, _)) if p.look_ahead(1, |t| *t == token::Eq) => {
p.bump();
p.expect(&token::Eq)?;
p.expect(exp!(Eq))?;
let expr = p.parse_expr()?;
if let Some((_, prev)) = args.by_name(ident.name) {
ecx.dcx().emit_err(errors::FormatDuplicateArg {
5 changes: 3 additions & 2 deletions compiler/rustc_builtin_macros/src/pattern_type.rs
Original file line number Diff line number Diff line change
@@ -3,7 +3,8 @@ use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{Pat, Ty, ast};
use rustc_errors::PResult;
use rustc_expand::base::{self, DummyResult, ExpandResult, ExtCtxt, MacroExpanderResult};
use rustc_span::{Span, sym};
use rustc_parse::exp;
use rustc_span::Span;

pub(crate) fn expand<'cx>(
cx: &'cx mut ExtCtxt<'_>,
@@ -24,7 +25,7 @@ fn parse_pat_ty<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P
let mut parser = cx.new_parser_from_tts(stream);

let ty = parser.parse_ty()?;
parser.expect_keyword(sym::is)?;
parser.expect_keyword(exp!(Is))?;
let pat = parser.parse_pat_no_top_alt(None, None)?;

Ok((ty, pat))
6 changes: 3 additions & 3 deletions compiler/rustc_builtin_macros/src/util.rs
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@ use rustc_expand::expand::AstFragment;
use rustc_feature::AttributeTemplate;
use rustc_lint_defs::BuiltinLintDiag;
use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES;
use rustc_parse::{parser, validate_attr};
use rustc_parse::{exp, parser, validate_attr};
use rustc_session::errors::report_lit_error;
use rustc_span::{BytePos, Span, Symbol};

@@ -204,7 +204,7 @@ pub(crate) fn get_single_expr_from_tts(
Ok(ret) => ret,
Err(guar) => return ExpandResult::Ready(Err(guar)),
};
let _ = p.eat(&token::Comma);
let _ = p.eat(exp!(Comma));

if p.token != token::Eof {
cx.dcx().emit_err(errors::OnlyOneArgument { span, name });
@@ -237,7 +237,7 @@ pub(crate) fn get_exprs_from_tts(
let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();

es.push(expr);
if p.eat(&token::Comma) {
if p.eat(exp!(Comma)) {
continue;
}
if p.token != token::Eof {
6 changes: 3 additions & 3 deletions compiler/rustc_expand/src/module.rs
Original file line number Diff line number Diff line change
@@ -2,9 +2,9 @@ use std::iter::once;
use std::path::{self, Path, PathBuf};

use rustc_ast::ptr::P;
use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans, token};
use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans};
use rustc_errors::{Diag, ErrorGuaranteed};
use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal, validate_attr};
use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal, validate_attr};
use rustc_session::Session;
use rustc_session::parse::ParseSess;
use rustc_span::{Ident, Span, sym};
@@ -70,7 +70,7 @@ pub(crate) fn parse_external_mod(
let mut parser =
unwrap_or_emit_fatal(new_parser_from_file(&sess.psess, &mp.file_path, Some(span)));
let (inner_attrs, items, inner_span) =
parser.parse_mod(&token::Eof).map_err(|err| ModError::ParserError(err))?;
parser.parse_mod(exp!(Eof)).map_err(|err| ModError::ParserError(err))?;
attrs.extend(inner_attrs);
(items, inner_span, mp.file_path)
};
4 changes: 2 additions & 2 deletions compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
@@ -15,7 +15,7 @@ use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
use rustc_parse::lexer::nfc_normalize;
use rustc_parse::parser::Parser;
use rustc_parse::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
use rustc_session::parse::ParseSess;
use rustc_span::def_id::CrateNum;
use rustc_span::{BytePos, FileName, Pos, SourceFile, Span, Symbol, sym};
@@ -473,7 +473,7 @@ impl server::FreeFunctions for Rustc<'_, '_> {
unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned()));

let first_span = parser.token.span.data();
let minus_present = parser.eat(&token::BinOp(token::Minus));
let minus_present = parser.eat(exp!(Minus));

let lit_span = parser.token.span.data();
let token::Literal(mut lit) = parser.token.kind else {
41 changes: 20 additions & 21 deletions compiler/rustc_parse/src/parser/attr.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
use rustc_ast::token::{self, Delimiter};
use rustc_ast::{self as ast, Attribute, attr};
use rustc_ast::{self as ast, Attribute, attr, token};
use rustc_errors::codes::*;
use rustc_errors::{Diag, PResult};
use rustc_span::{BytePos, Span, kw};
use rustc_span::{BytePos, Span};
use thin_vec::ThinVec;
use tracing::debug;

use super::{
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
UsePreAttrPos,
};
use crate::{errors, fluent_generated as fluent, maybe_whole};
use crate::{errors, exp, fluent_generated as fluent, maybe_whole};

// Public for rustfmt usage
#[derive(Debug)]
@@ -45,7 +44,7 @@ impl<'a> Parser<'a> {
let mut just_parsed_doc_comment = false;
let start_pos = self.num_bump_calls;
loop {
let attr = if self.check(&token::Pound) {
let attr = if self.check(exp!(Pound)) {
let prev_outer_attr_sp = outer_attrs.last().map(|attr: &Attribute| attr.span);

let inner_error_reason = if just_parsed_doc_comment {
@@ -126,14 +125,14 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
// Attributes can't have attributes of their own [Editor's note: not with that attitude]
self.collect_tokens_no_attrs(|this| {
assert!(this.eat(&token::Pound), "parse_attribute called in non-attribute position");
assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");

let style =
if this.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };

this.expect(&token::OpenDelim(Delimiter::Bracket))?;
this.expect(exp!(OpenBracket))?;
let item = this.parse_attr_item(ForceCollect::No)?;
this.expect(&token::CloseDelim(Delimiter::Bracket))?;
this.expect(exp!(CloseBracket))?;
let attr_sp = lo.to(this.prev_token.span);

// Emit error if inner attribute is encountered and forbidden.
@@ -274,10 +273,10 @@ impl<'a> Parser<'a> {

// Attr items don't have attributes.
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
let is_unsafe = this.eat_keyword(kw::Unsafe);
let is_unsafe = this.eat_keyword(exp!(Unsafe));
let unsafety = if is_unsafe {
let unsafe_span = this.prev_token.span;
this.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
this.expect(exp!(OpenParen))?;
ast::Safety::Unsafe(unsafe_span)
} else {
ast::Safety::Default
@@ -286,7 +285,7 @@ impl<'a> Parser<'a> {
let path = this.parse_path(PathStyle::Mod)?;
let args = this.parse_attr_args()?;
if is_unsafe {
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
this.expect(exp!(CloseParen))?;
}
Ok((
ast::AttrItem { unsafety, path, args, tokens: None },
@@ -306,7 +305,7 @@ impl<'a> Parser<'a> {
loop {
let start_pos = self.num_bump_calls;
// Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
if attr_style == ast::AttrStyle::Inner {
@@ -358,15 +357,15 @@ impl<'a> Parser<'a> {
&mut self,
) -> PResult<'a, (ast::MetaItemInner, Vec<(ast::AttrItem, Span)>)> {
let cfg_predicate = self.parse_meta_item_inner()?;
self.expect(&token::Comma)?;
self.expect(exp!(Comma))?;

// Presumably, the majority of the time there will only be one attr.
let mut expanded_attrs = Vec::with_capacity(1);
while self.token != token::Eof {
let lo = self.token.span;
let item = self.parse_attr_item(ForceCollect::Yes)?;
expanded_attrs.push((item, lo.to(self.prev_token.span)));
if !self.eat(&token::Comma) {
if !self.eat(exp!(Comma)) {
break;
}
}
@@ -380,7 +379,7 @@ impl<'a> Parser<'a> {
let mut nmis = ThinVec::with_capacity(1);
while self.token != token::Eof {
nmis.push(self.parse_meta_item_inner()?);
if !self.eat(&token::Comma) {
if !self.eat(exp!(Comma)) {
break;
}
}
@@ -413,13 +412,13 @@ impl<'a> Parser<'a> {

let lo = self.token.span;
let is_unsafe = if unsafe_allowed == AllowLeadingUnsafe::Yes {
self.eat_keyword(kw::Unsafe)
self.eat_keyword(exp!(Unsafe))
} else {
false
};
let unsafety = if is_unsafe {
let unsafe_span = self.prev_token.span;
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
self.expect(exp!(OpenParen))?;

ast::Safety::Unsafe(unsafe_span)
} else {
@@ -429,17 +428,17 @@ impl<'a> Parser<'a> {
let path = self.parse_path(PathStyle::Mod)?;
let kind = self.parse_meta_item_kind()?;
if is_unsafe {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
}
let span = lo.to(self.prev_token.span);

Ok(ast::MetaItem { unsafety, path, kind, span })
}

pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) {
Ok(if self.eat(exp!(Eq)) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
ast::MetaItemKind::List(list)
} else {
156 changes: 61 additions & 95 deletions compiler/rustc_parse/src/parser/diagnostics.rs

Large diffs are not rendered by default.

308 changes: 151 additions & 157 deletions compiler/rustc_parse/src/parser/expr.rs

Large diffs are not rendered by default.

33 changes: 17 additions & 16 deletions compiler/rustc_parse/src/parser/generics.rs
Original file line number Diff line number Diff line change
@@ -13,6 +13,7 @@ use crate::errors::{
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
WhereClauseBeforeTupleStructBodySugg,
};
use crate::exp;

enum PredicateKindOrStructBody {
PredicateKind(ast::WherePredicateKind),
@@ -52,7 +53,7 @@ impl<'a> Parser<'a> {

// Parse optional colon and param bounds.
let mut colon_span = None;
let bounds = if self.eat(&token::Colon) {
let bounds = if self.eat(exp!(Colon)) {
colon_span = Some(self.prev_token.span);
// recover from `impl Trait` in type param bound
if self.token.is_keyword(kw::Impl) {
@@ -89,7 +90,7 @@ impl<'a> Parser<'a> {
Vec::new()
};

let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
let default = if self.eat(exp!(Eq)) { Some(self.parse_ty()?) } else { None };
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
@@ -107,13 +108,13 @@ impl<'a> Parser<'a> {
) -> PResult<'a, GenericParam> {
let const_span = self.token.span;

self.expect_keyword(kw::Const)?;
self.expect_keyword(exp!(Const))?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
self.expect(exp!(Colon))?;
let ty = self.parse_ty()?;

// Parse optional const generics default value.
let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };

Ok(GenericParam {
ident,
@@ -132,11 +133,11 @@ impl<'a> Parser<'a> {
mistyped_const_ident: Ident,
) -> PResult<'a, GenericParam> {
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
self.expect(exp!(Colon))?;
let ty = self.parse_ty()?;

// Parse optional const generics default value.
let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };

self.dcx()
.struct_span_err(
@@ -177,13 +178,13 @@ impl<'a> Parser<'a> {
.emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });

// Eat a trailing comma, if it exists.
let _ = this.eat(&token::Comma);
let _ = this.eat(exp!(Comma));
}

let param = if this.check_lifetime() {
let lifetime = this.expect_lifetime();
// Parse lifetime parameter.
let (colon_span, bounds) = if this.eat(&token::Colon) {
let (colon_span, bounds) = if this.eat(exp!(Colon)) {
(Some(this.prev_token.span), this.parse_lt_param_bounds())
} else {
(None, Vec::new())
@@ -209,7 +210,7 @@ impl<'a> Parser<'a> {
is_placeholder: false,
colon_span,
})
} else if this.check_keyword(kw::Const) {
} else if this.check_keyword(exp!(Const)) {
// Parse const parameter.
Some(this.parse_const_param(attrs)?)
} else if this.check_ident() {
@@ -246,7 +247,7 @@ impl<'a> Parser<'a> {
return Ok((None, Trailing::No, UsePreAttrPos::No));
};

if !this.eat(&token::Comma) {
if !this.eat(exp!(Comma)) {
done = true;
}
// We just ate the comma, so no need to capture the trailing token.
@@ -324,7 +325,7 @@ impl<'a> Parser<'a> {
};
let mut tuple_struct_body = None;

if !self.eat_keyword(kw::Where) {
if !self.eat_keyword(exp!(Where)) {
return Ok((where_clause, None));
}
where_clause.has_where_token = true;
@@ -344,7 +345,7 @@ impl<'a> Parser<'a> {
let kind = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
let lifetime = self.expect_lifetime();
// Bounds starting with a colon are mandatory, but possibly empty.
self.expect(&token::Colon)?;
self.expect(exp!(Colon))?;
let bounds = self.parse_lt_param_bounds();
ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate {
lifetime,
@@ -370,7 +371,7 @@ impl<'a> Parser<'a> {
});

let prev_token = self.prev_token.span;
let ate_comma = self.eat(&token::Comma);
let ate_comma = self.eat(exp!(Comma));

if self.eat_keyword_noexpect(kw::Where) {
self.dcx().emit_err(MultipleWhereClauses {
@@ -464,7 +465,7 @@ impl<'a> Parser<'a> {
// Parse type with mandatory colon and (possibly empty) bounds,
// or with mandatory equality sign and the second type.
let ty = self.parse_ty_for_where_clause()?;
if self.eat(&token::Colon) {
if self.eat(exp!(Colon)) {
let bounds = self.parse_generic_bounds()?;
Ok(ast::WherePredicateKind::BoundPredicate(ast::WhereBoundPredicate {
bound_generic_params: lifetime_defs,
@@ -473,7 +474,7 @@ impl<'a> Parser<'a> {
}))
// FIXME: Decide what should be used here, `=` or `==`.
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
} else if self.eat(exp!(Eq)) || self.eat(exp!(EqEq)) {
let rhs_ty = self.parse_ty()?;
Ok(ast::WherePredicateKind::EqPredicate(ast::WhereEqPredicate { lhs_ty: ty, rhs_ty }))
} else {
372 changes: 191 additions & 181 deletions compiler/rustc_parse/src/parser/item.rs

Large diffs are not rendered by default.

305 changes: 136 additions & 169 deletions compiler/rustc_parse/src/parser/mod.rs

Large diffs are not rendered by default.

77 changes: 39 additions & 38 deletions compiler/rustc_parse/src/parser/pat.rs
Original file line number Diff line number Diff line change
@@ -30,7 +30,7 @@ use crate::errors::{
UnexpectedVertVertInPattern, WrapInParens,
};
use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};

#[derive(PartialEq, Copy, Clone)]
pub enum Expected {
@@ -110,7 +110,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Pat>> {
let pat = self.parse_pat_no_top_guard(expected, rc, ra, rt)?;

if self.eat_keyword(kw::If) {
if self.eat_keyword(exp!(If)) {
let cond = self.parse_expr()?;
// Feature-gate guard patterns
self.psess.gated_spans.gate(sym::guard_patterns, cond.span);
@@ -193,7 +193,7 @@ impl<'a> Parser<'a> {

// If the next token is not a `|`,
// this is not an or-pattern and we should exit here.
if !self.check(&token::BinOp(token::Or)) && self.token != token::OrOr {
if !self.check(exp!(Or)) && self.token != token::OrOr {
// If we parsed a leading `|` which should be gated,
// then we should really gate the leading `|`.
// This complicated procedure is done purely for diagnostics UX.
@@ -263,7 +263,7 @@ impl<'a> Parser<'a> {
CommaRecoveryMode::LikelyTuple,
Some(syntax_loc),
)?;
let colon = self.eat(&token::Colon);
let colon = self.eat(exp!(Colon));

if let PatKind::Or(pats) = &pat.kind {
let span = pat.span;
@@ -327,7 +327,7 @@ impl<'a> Parser<'a> {
self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
self.bump();
EatOrResult::AteOr
} else if self.eat(&token::BinOp(token::Or)) {
} else if self.eat(exp!(Or)) {
EatOrResult::AteOr
} else {
EatOrResult::None
@@ -714,40 +714,41 @@ impl<'a> Parser<'a> {
lo = self.token.span;
}

let pat = if self.check(&token::BinOp(token::And)) || self.token == token::AndAnd {
let pat = if self.check(exp!(And)) || self.token == token::AndAnd {
self.parse_pat_deref(expected)?
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
self.parse_pat_tuple_or_parens()?
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
} else if self.check(exp!(OpenBracket)) {
// Parse `[pat, pat,...]` as a slice pattern.
let (pats, _) = self.parse_delim_comma_seq(Delimiter::Bracket, |p| {
p.parse_pat_allow_top_guard(
None,
RecoverComma::No,
RecoverColon::No,
CommaRecoveryMode::EitherTupleOrPipe,
)
})?;
let (pats, _) =
self.parse_delim_comma_seq(exp!(OpenBracket), exp!(CloseBracket), |p| {
p.parse_pat_allow_top_guard(
None,
RecoverComma::No,
RecoverColon::No,
CommaRecoveryMode::EitherTupleOrPipe,
)
})?;
PatKind::Slice(pats)
} else if self.check(&token::DotDot) && !self.is_pat_range_end_start(1) {
} else if self.check(exp!(DotDot)) && !self.is_pat_range_end_start(1) {
// A rest pattern `..`.
self.bump(); // `..`
PatKind::Rest
} else if self.check(&token::DotDotDot) && !self.is_pat_range_end_start(1) {
} else if self.check(exp!(DotDotDot)) && !self.is_pat_range_end_start(1) {
self.recover_dotdotdot_rest_pat(lo)
} else if let Some(form) = self.parse_range_end() {
self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
} else if self.eat(&token::Not) {
} else if self.eat(exp!(Not)) {
// Parse `!`
self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
PatKind::Never
} else if self.eat_keyword(kw::Underscore) {
} else if self.eat_keyword(exp!(Underscore)) {
// Parse `_`
PatKind::Wild
} else if self.eat_keyword(kw::Mut) {
} else if self.eat_keyword(exp!(Mut)) {
self.parse_pat_ident_mut()?
} else if self.eat_keyword(kw::Ref) {
if self.check_keyword(kw::Box) {
} else if self.eat_keyword(exp!(Ref)) {
if self.check_keyword(exp!(Box)) {
// Suggest `box ref`.
let span = self.prev_token.span.to(self.token.span);
self.bump();
@@ -756,7 +757,7 @@ impl<'a> Parser<'a> {
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = self.parse_mutability();
self.parse_pat_ident(BindingMode(ByRef::Yes(mutbl), Mutability::Not), syntax_loc)?
} else if self.eat_keyword(kw::Box) {
} else if self.eat_keyword(exp!(Box)) {
self.parse_pat_box()?
} else if self.check_inline_const(0) {
// Parse `const pat`
@@ -793,14 +794,14 @@ impl<'a> Parser<'a> {
};
let span = lo.to(self.prev_token.span);

if qself.is_none() && self.check(&token::Not) {
if qself.is_none() && self.check(exp!(Not)) {
self.parse_pat_mac_invoc(path)?
} else if let Some(form) = self.parse_range_end() {
let begin = self.mk_expr(span, ExprKind::Path(qself, path));
self.parse_pat_range_begin_with(begin, form)?
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
} else if self.check(exp!(OpenBrace)) {
self.parse_pat_struct(qself, path)?
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
self.parse_pat_tuple_struct(qself, path)?
} else {
match self.maybe_recover_trailing_expr(span, false) {
@@ -1106,7 +1107,7 @@ impl<'a> Parser<'a> {
/// Eat any extraneous `mut`s and error + recover if we ate any.
fn recover_additional_muts(&mut self) {
let lo = self.token.span;
while self.eat_keyword(kw::Mut) {}
while self.eat_keyword(exp!(Mut)) {}
if lo == self.token.span {
return;
}
@@ -1147,11 +1148,11 @@ impl<'a> Parser<'a> {

/// Parses the range pattern end form `".." | "..." | "..=" ;`.
fn parse_range_end(&mut self) -> Option<Spanned<RangeEnd>> {
let re = if self.eat(&token::DotDotDot) {
let re = if self.eat(exp!(DotDotDot)) {
RangeEnd::Included(RangeSyntax::DotDotDot)
} else if self.eat(&token::DotDotEq) {
} else if self.eat(exp!(DotDotEq)) {
RangeEnd::Included(RangeSyntax::DotDotEq)
} else if self.eat(&token::DotDot) {
} else if self.eat(exp!(DotDot)) {
RangeEnd::Excluded
} else {
return None;
@@ -1271,7 +1272,7 @@ impl<'a> Parser<'a> {

// recover trailing `)`
if let Some(open_paren) = open_paren {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;

self.dcx().emit_err(UnexpectedParenInRangePat {
span: vec![open_paren, self.prev_token.span],
@@ -1331,7 +1332,7 @@ impl<'a> Parser<'a> {
}));
}

let sub = if self.eat(&token::At) {
let sub = if self.eat(exp!(At)) {
Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
} else {
None
@@ -1447,7 +1448,7 @@ impl<'a> Parser<'a> {

// We cannot use `parse_pat_ident()` since it will complain `box`
// is not an identifier.
let sub = if self.eat(&token::At) {
let sub = if self.eat(exp!(At)) {
Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
} else {
None
@@ -1504,9 +1505,9 @@ impl<'a> Parser<'a> {
}
ate_comma = false;

if self.check(&token::DotDot)
if self.check(exp!(DotDot))
|| self.check_noexpect(&token::DotDotDot)
|| self.check_keyword(kw::Underscore)
|| self.check_keyword(exp!(Underscore))
{
etc = PatFieldsRest::Rest;
let mut etc_sp = self.token.span;
@@ -1594,7 +1595,7 @@ impl<'a> Parser<'a> {
return Err(err);
}
}?;
ate_comma = this.eat(&token::Comma);
ate_comma = this.eat(exp!(Comma));

last_non_comma_dotdot_span = Some(this.prev_token.span);

@@ -1706,7 +1707,7 @@ impl<'a> Parser<'a> {
(pat, fieldname, false)
} else {
// Parsing a pattern of the form `(box) (ref) (mut) fieldname`.
let is_box = self.eat_keyword(kw::Box);
let is_box = self.eat_keyword(exp!(Box));
let boxed_span = self.token.span;
let mutability = self.parse_mutability();
let by_ref = self.parse_byref();
34 changes: 16 additions & 18 deletions compiler/rustc_parse/src/parser/path.rs
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{Parser, Restrictions, TokenType};
use crate::errors::{PathSingleColon, PathTripleColon};
use crate::parser::{CommaRecoveryMode, RecoverColon, RecoverComma};
use crate::{errors, maybe_whole};
use crate::{errors, exp, maybe_whole};

/// Specifies how to parse a path.
#[derive(Copy, Clone, PartialEq)]
@@ -80,7 +80,7 @@ impl<'a> Parser<'a> {
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
if self.eat_keyword(kw::As) {
if self.eat_keyword(exp!(As)) {
let path_lo = self.token.span;
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_token.span);
@@ -90,15 +90,15 @@ impl<'a> Parser<'a> {
}

// See doc comment for `unmatched_angle_bracket_count`.
self.expect(&token::Gt)?;
self.expect(exp!(Gt))?;
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
}

let is_import_coupler = self.is_import_coupler();
if !is_import_coupler && !self.recover_colon_before_qpath_proj() {
self.expect(&token::PathSep)?;
self.expect(exp!(PathSep))?;
}

let qself = P(QSelf { ty, path_span, position: path.segments.len() });
@@ -242,7 +242,7 @@ impl<'a> Parser<'a> {
// `PathStyle::Expr` is only provided at the root invocation and never in
// `parse_path_segment` to recurse and therefore can be checked to maintain
// this invariant.
self.check_trailing_angle_brackets(&segment, &[&token::PathSep]);
self.check_trailing_angle_brackets(&segment, &[exp!(PathSep)]);
}
segments.push(segment);

@@ -275,7 +275,7 @@ impl<'a> Parser<'a> {
/// Eat `::` or, potentially, `:::`.
#[must_use]
pub(super) fn eat_path_sep(&mut self) -> bool {
let result = self.eat(&token::PathSep);
let result = self.eat(exp!(PathSep));
if result && self.may_recover() {
if self.eat_noexpect(&token::Colon) {
self.dcx().emit_err(PathTripleColon { span: self.prev_token.span });
@@ -300,10 +300,8 @@ impl<'a> Parser<'a> {
)
};
let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(&[
TokenType::Token(token::Lt),
TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
]);
this.expected_token_types.insert(TokenType::Lt);
this.expected_token_types.insert(TokenType::OpenParen);
is_args_start(&this.token)
};

@@ -367,7 +365,7 @@ impl<'a> Parser<'a> {
{
self.bump(); // (
self.bump(); // ..
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
let span = lo.to(self.prev_token.span);

self.psess.gated_spans.gate(sym::return_type_notation, span);
@@ -661,12 +659,12 @@ impl<'a> Parser<'a> {
let mut args = ThinVec::new();
while let Some(arg) = self.parse_angle_arg(ty_generics)? {
args.push(arg);
if !self.eat(&token::Comma) {
if !self.eat(exp!(Comma)) {
if self.check_noexpect(&TokenKind::Semi)
&& self.look_ahead(1, |t| t.is_ident() || t.is_lifetime())
{
// Add `>` to the list of expected tokens.
self.check(&token::Gt);
self.check(exp!(Gt));
// Handle `,` to `;` substitution
let mut err = self.unexpected().unwrap_err();
self.bump();
@@ -705,7 +703,7 @@ impl<'a> Parser<'a> {
// is present and then use that info to push the other token onto the tokens list
let separated =
self.check_noexpect(&token::Colon) || self.check_noexpect(&token::Eq);
if separated && (self.check(&token::Colon) | self.check(&token::Eq)) {
if separated && (self.check(exp!(Colon)) | self.check(exp!(Eq))) {
let arg_span = arg.span();
let (binder, ident, gen_args) = match self.get_ident_from_generic_arg(&arg) {
Ok(ident_gen_args) => ident_gen_args,
@@ -720,9 +718,9 @@ impl<'a> Parser<'a> {
"`for<...>` is not allowed on associated type bounds",
));
}
let kind = if self.eat(&token::Colon) {
let kind = if self.eat(exp!(Colon)) {
AssocItemConstraintKind::Bound { bounds: self.parse_generic_bounds()? }
} else if self.eat(&token::Eq) {
} else if self.eat(exp!(Eq)) {
self.parse_assoc_equality_term(
ident,
gen_args.as_ref(),
@@ -743,8 +741,8 @@ impl<'a> Parser<'a> {
if self.prev_token.is_ident()
&& (self.token.is_ident() || self.look_ahead(1, |token| token.is_ident()))
{
self.check(&token::Colon);
self.check(&token::Eq);
self.check(exp!(Colon));
self.check(exp!(Eq));
}
Ok(Some(AngleBracketedArg::Arg(arg)))
}
27 changes: 13 additions & 14 deletions compiler/rustc_parse/src/parser/stmt.rs
Original file line number Diff line number Diff line change
@@ -24,7 +24,7 @@ use super::{
Trailing, UsePreAttrPos,
};
use crate::errors::MalformedLoopLabel;
use crate::{errors, maybe_whole};
use crate::{errors, exp, maybe_whole};

impl<'a> Parser<'a> {
/// Parses a statement. This stops just before trailing semicolons on everything but items.
@@ -71,7 +71,7 @@ impl<'a> Parser<'a> {

let stmt = if self.token.is_keyword(kw::Let) {
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
this.expect_keyword(kw::Let)?;
this.expect_keyword(exp!(Let))?;
let local = this.parse_local(attrs)?;
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
Ok((
@@ -140,7 +140,7 @@ impl<'a> Parser<'a> {
force_collect,
)? {
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if self.eat(&token::Semi) {
} else if self.eat(exp!(Semi)) {
// Do not attempt to parse an expression if we're done here.
self.error_outer_attrs(attrs);
self.mk_stmt(lo, StmtKind::Empty)
@@ -156,7 +156,7 @@ impl<'a> Parser<'a> {
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
},
)?;
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(exp!(Else)) {
let bl = self.parse_block()?;
// Destructuring assignment ... else.
// This is not allowed, but point it out in a nice way.
@@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let path = this.parse_path(PathStyle::Expr)?;

if this.eat(&token::Not) {
if this.eat(exp!(Not)) {
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
return Ok((
stmt_mac,
@@ -185,7 +185,7 @@ impl<'a> Parser<'a> {
));
}

let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
let expr = if this.eat(exp!(OpenBrace)) {
this.parse_expr_struct(None, path, true)?
} else {
let hi = this.prev_token.span;
@@ -370,7 +370,7 @@ impl<'a> Parser<'a> {
let kind = match init {
None => LocalKind::Decl,
Some(init) => {
if self.eat_keyword(kw::Else) {
if self.eat_keyword(exp!(Else)) {
if self.token.is_keyword(kw::If) {
// `let...else if`. Emit the same error that `parse_block()` would,
// but explicitly point out that this pattern is not allowed.
@@ -449,7 +449,7 @@ impl<'a> Parser<'a> {
self.bump();
true
}
_ => self.eat(&token::Eq),
_ => self.eat(exp!(Eq)),
};

Ok(if eq_consumed || eq_optional { Some(self.parse_expr()?) } else { None })
@@ -509,7 +509,7 @@ impl<'a> Parser<'a> {
Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
Ok(Some(stmt)) => {
let stmt_own_line = self.psess.source_map().is_line_before_span_empty(sp);
let stmt_span = if stmt_own_line && self.eat(&token::Semi) {
let stmt_span = if stmt_own_line && self.eat(exp!(Semi)) {
// Expand the span to include the semicolon.
stmt.span.with_hi(self.prev_token.span.hi())
} else {
@@ -651,7 +651,7 @@ impl<'a> Parser<'a> {

let maybe_ident = self.prev_token.clone();
self.maybe_recover_unexpected_block_label();
if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
if !self.eat(exp!(OpenBrace)) {
return self.error_block_no_opening_brace();
}

@@ -678,7 +678,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Block>> {
let mut stmts = ThinVec::new();
let mut snapshot = None;
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
while !self.eat(exp!(CloseBrace)) {
if self.token == token::Eof {
break;
}
@@ -781,8 +781,7 @@ impl<'a> Parser<'a> {
{
// Just check for errors and recover; do not eat semicolon yet.

let expect_result =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
let expect_result = self.expect_one_of(&[], &[exp!(Semi), exp!(CloseBrace)]);

// Try to both emit a better diagnostic, and avoid further errors by replacing
// the `expr` with `ExprKind::Err`.
@@ -930,7 +929,7 @@ impl<'a> Parser<'a> {
}
}

if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) {
if add_semi_to_stmt || (eat_semi && self.eat(exp!(Semi))) {
stmt = stmt.add_trailing_semicolon();
}

631 changes: 631 additions & 0 deletions compiler/rustc_parse/src/parser/token_type.rs

Large diffs are not rendered by default.

92 changes: 46 additions & 46 deletions compiler/rustc_parse/src/parser/ty.rs
Original file line number Diff line number Diff line change
@@ -18,7 +18,7 @@ use crate::errors::{
HelpUseLatestEdition, InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
NestedCVariadicType, ReturnTypesUseThinArrow,
};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};

/// Signals whether parsing a type should allow `+`.
///
@@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
recover_return_sign: RecoverReturnSign,
) -> PResult<'a, FnRetTy> {
let lo = self.prev_token.span;
Ok(if self.eat(&token::RArrow) {
Ok(if self.eat(exp!(RArrow)) {
// FIXME(Centril): Can we unconditionally `allow_plus`?
let ty = self.parse_ty_common(
allow_plus,
@@ -251,28 +251,28 @@ impl<'a> Parser<'a> {

let lo = self.token.span;
let mut impl_dyn_multi = false;
let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let kind = if self.check(exp!(OpenParen)) {
self.parse_ty_tuple_or_parens(lo, allow_plus)?
} else if self.eat(&token::Not) {
} else if self.eat(exp!(Not)) {
// Never type `!`
TyKind::Never
} else if self.eat(&token::BinOp(token::Star)) {
} else if self.eat(exp!(Star)) {
self.parse_ty_ptr()?
} else if self.eat(&token::OpenDelim(Delimiter::Bracket)) {
} else if self.eat(exp!(OpenBracket)) {
self.parse_array_or_slice_ty()?
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
} else if self.check(exp!(And)) || self.check(exp!(AndAnd)) {
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
} else if self.eat_keyword_noexpect(kw::Typeof) {
self.parse_typeof_ty()?
} else if self.eat_keyword(kw::Underscore) {
} else if self.eat_keyword(exp!(Underscore)) {
// A type to be inferred `_`
TyKind::Infer
} else if self.check_fn_front_matter(false, Case::Sensitive) {
// Function pointer type
self.parse_ty_bare_fn(lo, ThinVec::new(), None, recover_return_sign)?
} else if self.check_keyword(kw::For) {
} else if self.check_keyword(exp!(For)) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
@@ -324,7 +324,7 @@ impl<'a> Parser<'a> {
self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
}
}
} else if self.eat_keyword(kw::Impl) {
} else if self.eat_keyword(exp!(Impl)) {
self.parse_impl_ty(&mut impl_dyn_multi)?
} else if self.is_explicit_dyn_type() {
self.parse_dyn_ty(&mut impl_dyn_multi)?
@@ -336,7 +336,7 @@ impl<'a> Parser<'a> {
self.parse_path_start_ty(lo, allow_plus, ty_generics)?
} else if self.can_begin_bound() {
self.parse_bare_trait_object(lo, allow_plus)?
} else if self.eat(&token::DotDotDot) {
} else if self.eat(exp!(DotDotDot)) {
match allow_c_variadic {
AllowCVariadic::Yes => TyKind::CVarArgs,
AllowCVariadic::No => {
@@ -347,7 +347,7 @@ impl<'a> Parser<'a> {
TyKind::Err(guar)
}
}
} else if self.check_keyword(kw::Unsafe)
} else if self.check_keyword(exp!(Unsafe))
&& self.look_ahead(1, |tok| matches!(tok.kind, token::Lt))
{
self.parse_unsafe_binder_ty()?
@@ -374,7 +374,7 @@ impl<'a> Parser<'a> {

fn parse_unsafe_binder_ty(&mut self) -> PResult<'a, TyKind> {
let lo = self.token.span;
assert!(self.eat_keyword(kw::Unsafe));
assert!(self.eat_keyword(exp!(Unsafe)));
self.expect_lt()?;
let generic_params = self.parse_generic_params()?;
self.expect_gt()?;
@@ -487,16 +487,16 @@ impl<'a> Parser<'a> {
Err(err) => return Err(err),
};

let ty = if self.eat(&token::Semi) {
let ty = if self.eat(exp!(Semi)) {
let mut length = self.parse_expr_anon_const()?;
if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
if let Err(e) = self.expect(exp!(CloseBracket)) {
// Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
self.expect(exp!(CloseBracket))?;
}
TyKind::Array(elt_ty, length)
} else {
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
self.expect(exp!(CloseBracket))?;
TyKind::Slice(elt_ty)
};

@@ -579,9 +579,9 @@ impl<'a> Parser<'a> {
// Parses the `typeof(EXPR)`.
// To avoid ambiguity, the type is surrounded by parentheses.
fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
self.expect(exp!(OpenParen))?;
let expr = self.parse_expr_anon_const()?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
Ok(TyKind::Typeof(expr))
}

@@ -697,15 +697,15 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
self.expect_lt()?;
let (args, _, _) = self.parse_seq_to_before_tokens(
&[&TokenKind::Gt],
&[exp!(Gt)],
&[
&TokenKind::Ge,
&TokenKind::BinOp(BinOpToken::Shr),
&TokenKind::BinOpEq(BinOpToken::Shr),
],
SeqSep::trailing_allowed(token::Comma),
SeqSep::trailing_allowed(exp!(Comma)),
|self_| {
if self_.check_keyword(kw::SelfUpper) {
if self_.check_keyword(exp!(SelfUpper)) {
self_.bump();
Ok(PreciseCapturingArg::Arg(
ast::Path::from_ident(self_.prev_token.ident().unwrap().0),
@@ -729,7 +729,7 @@ impl<'a> Parser<'a> {

/// Is a `dyn B0 + ... + Bn` type allowed here?
fn is_explicit_dyn_type(&mut self) -> bool {
self.check_keyword(kw::Dyn)
self.check_keyword(exp!(Dyn))
&& (self.token.uninterpolated_span().at_least_rust_2018()
|| self.look_ahead(1, |t| {
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
@@ -745,7 +745,7 @@ impl<'a> Parser<'a> {
self.bump(); // `dyn`

// parse dyn* types
let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
let syntax = if self.eat(exp!(Star)) {
self.psess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
TraitObjectSyntax::DynStar
} else {
@@ -772,7 +772,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, TyKind> {
// Simple path
let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
if self.eat(&token::Not) {
if self.eat(exp!(Not)) {
// Macro invocation in type position
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
@@ -825,14 +825,14 @@ impl<'a> Parser<'a> {
fn can_begin_bound(&mut self) -> bool {
self.check_path()
|| self.check_lifetime()
|| self.check(&token::Not)
|| self.check(&token::Question)
|| self.check(&token::Tilde)
|| self.check_keyword(kw::For)
|| self.check(&token::OpenDelim(Delimiter::Parenthesis))
|| self.check_keyword(kw::Const)
|| self.check_keyword(kw::Async)
|| self.check_keyword(kw::Use)
|| self.check(exp!(Not))
|| self.check(exp!(Question))
|| self.check(exp!(Tilde))
|| self.check_keyword(exp!(For))
|| self.check(exp!(OpenParen))
|| self.check_keyword(exp!(Const))
|| self.check_keyword(exp!(Async))
|| self.check_keyword(exp!(Use))
}

/// Parses a bound according to the grammar:
@@ -842,11 +842,11 @@ impl<'a> Parser<'a> {
fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
let lo = self.token.span;
let leading_token = self.prev_token.clone();
let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
let has_parens = self.eat(exp!(OpenParen));

let bound = if self.token.is_lifetime() {
self.parse_generic_lt_bound(lo, has_parens)?
} else if self.eat_keyword(kw::Use) {
} else if self.eat_keyword(exp!(Use)) {
// parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
// lifetimes and ident params (including SelfUpper). These are validated later
// for order, duplication, and whether they actually reference params.
@@ -919,7 +919,7 @@ impl<'a> Parser<'a> {

/// Recover on `('lifetime)` with `(` already eaten.
fn recover_paren_lifetime(&mut self, lo: Span) -> PResult<'a, ()> {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
let span = lo.to(self.prev_token.span);
let sugg = errors::RemoveParens { lo, hi: self.prev_token.span };

@@ -940,21 +940,21 @@ impl<'a> Parser<'a> {
/// See `parse_generic_ty_bound` for the complete grammar of trait bound modifiers.
fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> {
let modifier_lo = self.token.span;
let constness = if self.eat(&token::Tilde) {
let constness = if self.eat(exp!(Tilde)) {
let tilde = self.prev_token.span;
self.expect_keyword(kw::Const)?;
self.expect_keyword(exp!(Const))?;
let span = tilde.to(self.prev_token.span);
self.psess.gated_spans.gate(sym::const_trait_impl, span);
BoundConstness::Maybe(span)
} else if self.eat_keyword(kw::Const) {
} else if self.eat_keyword(exp!(Const)) {
self.psess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span);
BoundConstness::Always(self.prev_token.span)
} else {
BoundConstness::Never
};

let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
&& self.eat_keyword(kw::Async)
&& self.eat_keyword(exp!(Async))
{
self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
BoundAsyncness::Async(self.prev_token.span)
@@ -974,9 +974,9 @@ impl<'a> Parser<'a> {
};
let modifier_hi = self.prev_token.span;

let polarity = if self.eat(&token::Question) {
let polarity = if self.eat(exp!(Question)) {
BoundPolarity::Maybe(self.prev_token.span)
} else if self.eat(&token::Not) {
} else if self.eat(exp!(Not)) {
self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
BoundPolarity::Negative(self.prev_token.span)
} else {
@@ -1122,7 +1122,7 @@ impl<'a> Parser<'a> {
if self.token.is_like_plus() && leading_token.is_keyword(kw::Dyn) {
let bounds = vec![];
self.parse_remaining_bounds(bounds, true)?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
self.dcx().emit_err(errors::IncorrectParensTraitBounds {
span: vec![lo, self.prev_token.span],
sugg: errors::IncorrectParensTraitBoundsSugg {
@@ -1131,7 +1131,7 @@ impl<'a> Parser<'a> {
},
});
} else {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
}
}

@@ -1176,7 +1176,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_late_bound_lifetime_defs(
&mut self,
) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> {
if self.eat_keyword(kw::For) {
if self.eat_keyword(exp!(For)) {
let lo = self.token.span;
self.expect_lt()?;
let params = self.parse_generic_params()?;
@@ -1280,7 +1280,7 @@ impl<'a> Parser<'a> {
}

pub(super) fn check_lifetime(&mut self) -> bool {
self.expected_tokens.push(TokenType::Lifetime);
self.expected_token_types.insert(TokenType::Lifetime);
self.token.is_lifetime()
}

11 changes: 6 additions & 5 deletions src/tools/rustfmt/src/parse/macros/cfg_if.rs
Original file line number Diff line number Diff line change
@@ -2,6 +2,7 @@ use std::panic::{AssertUnwindSafe, catch_unwind};

use rustc_ast::ast;
use rustc_ast::token::{Delimiter, TokenKind};
use rustc_parse::exp;
use rustc_parse::parser::ForceCollect;
use rustc_span::symbol::kw;

@@ -31,7 +32,7 @@ fn parse_cfg_if_inner<'a>(

while parser.token.kind != TokenKind::Eof {
if process_if_cfg {
if !parser.eat_keyword(kw::If) {
if !parser.eat_keyword(exp!(If)) {
return Err("Expected `if`");
}

@@ -55,7 +56,7 @@ fn parse_cfg_if_inner<'a>(
})?;
}

if !parser.eat(&TokenKind::OpenDelim(Delimiter::Brace)) {
if !parser.eat(exp!(OpenBrace)) {
return Err("Expected an opening brace");
}

@@ -78,15 +79,15 @@ fn parse_cfg_if_inner<'a>(
}
}

if !parser.eat(&TokenKind::CloseDelim(Delimiter::Brace)) {
if !parser.eat(exp!(CloseBrace)) {
return Err("Expected a closing brace");
}

if parser.eat(&TokenKind::Eof) {
if parser.eat(exp!(Eof)) {
break;
}

if !parser.eat_keyword(kw::Else) {
if !parser.eat_keyword(exp!(Else)) {
return Err("Expected `else`");
}

17 changes: 9 additions & 8 deletions src/tools/rustfmt/src/parse/macros/lazy_static.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_ast::token::TokenKind;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_span::symbol::{self, kw};
use rustc_parse::exp;
use rustc_span::symbol;

use crate::rewrite::RewriteContext;

@@ -31,19 +32,19 @@ pub(crate) fn parse_lazy_static(
}
}
}
while parser.token.kind != TokenKind::Eof {
while parser.token.kind != token::Eof {
// Parse a `lazy_static!` item.
// FIXME: These `eat_*` calls should be converted to `parse_or` to avoid
// silently formatting malformed lazy-statics.
let vis = parse_or!(parse_visibility, rustc_parse::parser::FollowedByType::No);
let _ = parser.eat_keyword(kw::Static);
let _ = parser.eat_keyword(kw::Ref);
let _ = parser.eat_keyword(exp!(Static));
let _ = parser.eat_keyword(exp!(Ref));
let id = parse_or!(parse_ident);
let _ = parser.eat(&TokenKind::Colon);
let _ = parser.eat(exp!(Colon));
let ty = parse_or!(parse_ty);
let _ = parser.eat(&TokenKind::Eq);
let _ = parser.eat(exp!(Eq));
let expr = parse_or!(parse_expr);
let _ = parser.eat(&TokenKind::Semi);
let _ = parser.eat(exp!(Semi));
result.push((vis, id, ty, expr));
}

5 changes: 2 additions & 3 deletions src/tools/rustfmt/src/parse/parser.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use std::panic::{AssertUnwindSafe, catch_unwind};
use std::path::{Path, PathBuf};

use rustc_ast::token::TokenKind;
use rustc_ast::{ast, attr, ptr};
use rustc_errors::Diag;
use rustc_parse::parser::Parser as RawParser;
use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
use rustc_parse::{exp, new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
use rustc_span::{Span, sym};
use thin_vec::ThinVec;

@@ -107,7 +106,7 @@ impl<'a> Parser<'a> {
let result = catch_unwind(AssertUnwindSafe(|| {
let mut parser =
unwrap_or_emit_fatal(new_parser_from_file(psess.inner(), path, Some(span)));
match parser.parse_mod(&TokenKind::Eof) {
match parser.parse_mod(exp!(Eof)) {
Ok((a, i, spans)) => Some((a, i, spans.inner_span)),
Err(e) => {
e.emit();