Skip to content

Commit f5b2896

Browse files
committed
Move more things around in collect_tokens_trailing_token.
To make things a little clearer, and to avoid some `mut` variables.
1 parent 8b5a7eb commit f5b2896

File tree

1 file changed

+18
-23
lines changed

1 file changed

+18
-23
lines changed

compiler/rustc_parse/src/parser/attr_wrapper.rs

+18-23
Original file line numberDiff line numberDiff line change
@@ -276,37 +276,32 @@ impl<'a> Parser<'a> {
276276

277277
let replace_ranges_end = self.capture_state.replace_ranges.len();
278278

279-
let mut end_pos = self.num_bump_calls;
280-
281-
let mut captured_trailing = false;
282-
283279
// Capture a trailing token if requested by the callback 'f'
284-
match trailing {
285-
TrailingToken::None => {}
280+
let captured_trailing = match trailing {
281+
TrailingToken::None => false,
286282
TrailingToken::Gt => {
287283
assert_eq!(self.token.kind, token::Gt);
284+
false
288285
}
289286
TrailingToken::Semi => {
290287
assert_eq!(self.token.kind, token::Semi);
291-
end_pos += 1;
292-
captured_trailing = true;
288+
true
293289
}
294-
TrailingToken::MaybeComma => {
295-
if self.token.kind == token::Comma {
296-
end_pos += 1;
297-
captured_trailing = true;
298-
}
299-
}
300-
}
290+
TrailingToken::MaybeComma => self.token.kind == token::Comma,
291+
};
301292

302-
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
303-
// then extend the range of captured tokens to include it, since the parser
304-
// was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
305-
// into an `AttrTokenStream`, we will create the proper token.
306-
if self.break_last_token {
307-
assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
308-
end_pos += 1;
309-
}
293+
assert!(
294+
!(self.break_last_token && captured_trailing),
295+
"Cannot set break_last_token and have trailing token"
296+
);
297+
298+
let end_pos = self.num_bump_calls
299+
+ captured_trailing as usize
300+
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), then
301+
// extend the range of captured tokens to include it, since the parser was not actually
302+
// bumped past it. When the `LazyAttrTokenStream` gets converted into an
303+
// `AttrTokenStream`, we will create the proper token.
304+
+ self.break_last_token as usize;
310305

311306
let num_calls = end_pos - start_pos;
312307

0 commit comments

Comments
 (0)