@@ -25,20 +25,89 @@ use crate::mbe::macro_parser::NamedMatch::*;
25
25
use crate :: mbe:: metavar_expr:: { MetaVarExprConcatElem , RAW_IDENT_ERR } ;
26
26
use crate :: mbe:: { self , KleeneOp , MetaVarExpr } ;
27
27
28
- // A Marker adds the given mark to the syntax context.
29
- struct Marker ( LocalExpnId , Transparency , FxHashMap < SyntaxContext , SyntaxContext > ) ;
28
+ /// Context needed to perform transcription of metavariable expressions.
29
+ struct MacroTcbCtx < ' psess , ' itp > {
30
+ psess : & ' psess ParseSess ,
31
+
32
+ /// Map from metavars to matched tokens
33
+ interp : & ' itp FxHashMap < MacroRulesNormalizedIdent , NamedMatch > ,
34
+
35
+ /// Allow marking spans.
36
+ marker : Marker ,
37
+
38
+ /// The stack of things yet to be completely expanded.
39
+ ///
40
+ /// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
41
+ /// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
42
+ /// choice of spacing values doesn't matter.
43
+ stack : SmallVec < [ Frame < ' itp > ; 1 ] > ,
44
+
45
+ /// A stack of where we are in the repeat expansion.
46
+ ///
47
+ /// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
48
+ /// `repeats` keeps track of where we are in matching at each level, with the last element
49
+ /// being the most deeply nested sequence. This is used as a stack.
50
+ repeats : Vec < ( usize , usize ) > ,
51
+
52
+ /// The resulting token stream from the `TokenTree` we just finished processing.
53
+ ///
54
+ /// At the end, this will contain the full result of transcription, but at arbitrary points
55
+ /// during `transcribe`, `result` will contain subsets of the final result.
56
+ ///
57
+ /// Specifically, as we descend into each TokenTree, we will push the existing results onto the
58
+ /// `result_stack` and clear `results`. We will then produce the results of transcribing the
59
+ /// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
60
+ /// `result_stack` and append `results` too it to produce the new `results` up to that point.
61
+ ///
62
+ /// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
63
+ /// again, and we are done transcribing.
64
+ result : Vec < TokenTree > ,
65
+
66
+ /// The in-progress `result` lives at the top of this stack. Each entered `TokenTree` adds a
67
+ /// new entry.
68
+ result_stack : Vec < Vec < TokenTree > > ,
69
+ }
70
+
71
+ impl < ' psess > MacroTcbCtx < ' psess , ' _ > {
72
+ fn dcx ( & self ) -> DiagCtxtHandle < ' psess > {
73
+ self . psess . dcx ( )
74
+ }
75
+
76
+ fn symbol_gallery ( & self ) -> & ' psess SymbolGallery {
77
+ & self . psess . symbol_gallery
78
+ }
79
+
80
+ /// Span marked with the correct expansion and transparency.
81
+ fn visited_dspan ( & mut self , dspan : DelimSpan ) -> Span {
82
+ let mut span = dspan. entire ( ) ;
83
+ self . marker . mark_span ( & mut span) ;
84
+ span
85
+ }
86
+ }
87
+
88
+ /// A Marker adds the given mark to the syntax context.
89
+ struct Marker {
90
+ expn_id : LocalExpnId ,
91
+ transparency : Transparency ,
92
+ cache : FxHashMap < SyntaxContext , SyntaxContext > ,
93
+ }
30
94
31
95
impl Marker {
96
+ fn new ( expn_id : LocalExpnId , transparency : Transparency ) -> Self {
97
+ Self { expn_id, transparency, cache : Default :: default ( ) }
98
+ }
99
+
100
+ /// Mark a span with the stored expansion ID and transparency.
32
101
fn mark_span ( & mut self , span : & mut Span ) {
33
102
// `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and
34
103
// by itself. All tokens in a macro body typically have the same syntactic context, unless
35
104
// it's some advanced case with macro-generated macros. So if we cache the marked version
36
105
// of that context once, we'll typically have a 100% cache hit rate after that.
37
- let Marker ( expn_id, transparency, ref mut cache) = * self ;
38
106
* span = span. map_ctxt ( |ctxt| {
39
- * cache
107
+ * self
108
+ . cache
40
109
. entry ( ctxt)
41
- . or_insert_with ( || ctxt. apply_mark ( expn_id. to_expn_id ( ) , transparency) )
110
+ . or_insert_with ( || ctxt. apply_mark ( self . expn_id . to_expn_id ( ) , self . transparency ) )
42
111
} ) ;
43
112
}
44
113
}
@@ -116,52 +185,36 @@ pub(super) fn transcribe<'a>(
116
185
return Ok ( TokenStream :: default ( ) ) ;
117
186
}
118
187
119
- // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
120
- // we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
121
- // choice of spacing values doesn't matter.
122
- let mut stack: SmallVec < [ Frame < ' _ > ; 1 ] > = smallvec ! [ Frame :: new_delimited(
123
- src,
124
- src_span,
125
- DelimSpacing :: new( Spacing :: Alone , Spacing :: Alone )
126
- ) ] ;
127
-
128
- // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
129
- // `repeats` keeps track of where we are in matching at each level, with the last element being
130
- // the most deeply nested sequence. This is used as a stack.
131
- let mut repeats: Vec < ( usize , usize ) > = Vec :: new ( ) ;
132
-
133
- // `result` contains resulting token stream from the TokenTree we just finished processing. At
134
- // the end, this will contain the full result of transcription, but at arbitrary points during
135
- // `transcribe`, `result` will contain subsets of the final result.
136
- //
137
- // Specifically, as we descend into each TokenTree, we will push the existing results onto the
138
- // `result_stack` and clear `results`. We will then produce the results of transcribing the
139
- // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
140
- // `result_stack` and append `results` too it to produce the new `results` up to that point.
141
- //
142
- // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
143
- // again, and we are done transcribing.
144
- let mut result: Vec < TokenTree > = Vec :: new ( ) ;
145
- let mut result_stack = Vec :: new ( ) ;
146
- let mut marker = Marker ( expand_id, transparency, Default :: default ( ) ) ;
147
-
148
- let dcx = psess. dcx ( ) ;
188
+ let mut mcx = MacroTcbCtx {
189
+ psess,
190
+ interp,
191
+ marker : Marker :: new ( expand_id, transparency) ,
192
+ repeats : Vec :: new ( ) ,
193
+ stack : smallvec ! [ Frame :: new_delimited(
194
+ src,
195
+ src_span,
196
+ DelimSpacing :: new( Spacing :: Alone , Spacing :: Alone )
197
+ ) ] ,
198
+ result : Vec :: new ( ) ,
199
+ result_stack : Vec :: new ( ) ,
200
+ } ;
201
+
149
202
loop {
150
203
// Look at the last frame on the stack.
151
204
// If it still has a TokenTree we have not looked at yet, use that tree.
152
- let Some ( tree) = stack. last_mut ( ) . unwrap ( ) . next ( ) else {
205
+ let Some ( tree) = mcx . stack . last_mut ( ) . unwrap ( ) . next ( ) else {
153
206
// This else-case never produces a value for `tree` (it `continue`s or `return`s).
154
207
155
208
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
156
209
// go back to the beginning of the sequence.
157
- let frame = stack. last_mut ( ) . unwrap ( ) ;
210
+ let frame = mcx . stack . last_mut ( ) . unwrap ( ) ;
158
211
if let FrameKind :: Sequence { sep, .. } = & frame. kind {
159
- let ( repeat_idx, repeat_len) = repeats. last_mut ( ) . unwrap ( ) ;
212
+ let ( repeat_idx, repeat_len) = mcx . repeats . last_mut ( ) . unwrap ( ) ;
160
213
* repeat_idx += 1 ;
161
214
if repeat_idx < repeat_len {
162
215
frame. idx = 0 ;
163
216
if let Some ( sep) = sep {
164
- result. push ( TokenTree :: Token ( * sep, Spacing :: Alone ) ) ;
217
+ mcx . result . push ( TokenTree :: Token ( * sep, Spacing :: Alone ) ) ;
165
218
}
166
219
continue ;
167
220
}
@@ -170,10 +223,10 @@ pub(super) fn transcribe<'a>(
170
223
// We are done with the top of the stack. Pop it. Depending on what it was, we do
171
224
// different things. Note that the outermost item must be the delimited, wrapped RHS
172
225
// that was passed in originally to `transcribe`.
173
- match stack. pop ( ) . unwrap ( ) . kind {
226
+ match mcx . stack . pop ( ) . unwrap ( ) . kind {
174
227
// Done with a sequence. Pop from repeats.
175
228
FrameKind :: Sequence { .. } => {
176
- repeats. pop ( ) ;
229
+ mcx . repeats . pop ( ) ;
177
230
}
178
231
179
232
// We are done processing a Delimited. If this is the top-level delimited, we are
@@ -185,15 +238,16 @@ pub(super) fn transcribe<'a>(
185
238
if delim == Delimiter :: Bracket {
186
239
spacing. close = Spacing :: Alone ;
187
240
}
188
- if result_stack. is_empty ( ) {
241
+ if mcx . result_stack . is_empty ( ) {
189
242
// No results left to compute! We are back at the top-level.
190
- return Ok ( TokenStream :: new ( result) ) ;
243
+ return Ok ( TokenStream :: new ( mcx . result ) ) ;
191
244
}
192
245
193
246
// Step back into the parent Delimited.
194
- let tree = TokenTree :: Delimited ( span, spacing, delim, TokenStream :: new ( result) ) ;
195
- result = result_stack. pop ( ) . unwrap ( ) ;
196
- result. push ( tree) ;
247
+ let tree =
248
+ TokenTree :: Delimited ( span, spacing, delim, TokenStream :: new ( mcx. result ) ) ;
249
+ mcx. result = mcx. result_stack . pop ( ) . unwrap ( ) ;
250
+ mcx. result . push ( tree) ;
197
251
}
198
252
}
199
253
continue ;
@@ -202,223 +256,19 @@ pub(super) fn transcribe<'a>(
202
256
// At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
203
257
// `tree` contains the next `TokenTree` to be processed.
204
258
match tree {
205
- // We are descending into a sequence. We first make sure that the matchers in the RHS
206
- // and the matches in `interp` have the same shape. Otherwise, either the caller or the
207
- // macro writer has made a mistake.
259
+ // Replace the sequence with its expansion.
208
260
seq @ mbe:: TokenTree :: Sequence ( _, seq_rep) => {
209
- match lockstep_iter_size ( seq, interp, & repeats) {
210
- LockstepIterSize :: Unconstrained => {
211
- return Err ( dcx. create_err ( NoSyntaxVarsExprRepeat { span : seq. span ( ) } ) ) ;
212
- }
213
-
214
- LockstepIterSize :: Contradiction ( msg) => {
215
- // FIXME: this really ought to be caught at macro definition time... It
216
- // happens when two meta-variables are used in the same repetition in a
217
- // sequence, but they come from different sequence matchers and repeat
218
- // different amounts.
219
- return Err (
220
- dcx. create_err ( MetaVarsDifSeqMatchers { span : seq. span ( ) , msg } )
221
- ) ;
222
- }
223
-
224
- LockstepIterSize :: Constraint ( len, _) => {
225
- // We do this to avoid an extra clone above. We know that this is a
226
- // sequence already.
227
- let mbe:: TokenTree :: Sequence ( sp, seq) = seq else { unreachable ! ( ) } ;
228
-
229
- // Is the repetition empty?
230
- if len == 0 {
231
- if seq. kleene . op == KleeneOp :: OneOrMore {
232
- // FIXME: this really ought to be caught at macro definition
233
- // time... It happens when the Kleene operator in the matcher and
234
- // the body for the same meta-variable do not match.
235
- return Err ( dcx. create_err ( MustRepeatOnce { span : sp. entire ( ) } ) ) ;
236
- }
237
- } else {
238
- // 0 is the initial counter (we have done 0 repetitions so far). `len`
239
- // is the total number of repetitions we should generate.
240
- repeats. push ( ( 0 , len) ) ;
241
-
242
- // The first time we encounter the sequence we push it to the stack. It
243
- // then gets reused (see the beginning of the loop) until we are done
244
- // repeating.
245
- stack. push ( Frame :: new_sequence (
246
- seq_rep,
247
- seq. separator . clone ( ) ,
248
- seq. kleene . op ,
249
- ) ) ;
250
- }
251
- }
252
- }
261
+ transcribe_sequence ( & mut mcx, seq, seq_rep) ?;
253
262
}
254
263
255
264
// Replace the meta-var with the matched token tree from the invocation.
256
- & mbe:: TokenTree :: MetaVar ( mut sp, mut original_ident) => {
257
- // Find the matched nonterminal from the macro invocation, and use it to replace
258
- // the meta-var.
259
- //
260
- // We use `Spacing::Alone` everywhere here, because that's the conservative choice
261
- // and spacing of declarative macros is tricky. E.g. in this macro:
262
- // ```
263
- // macro_rules! idents {
264
- // ($($a:ident,)*) => { stringify!($($a)*) }
265
- // }
266
- // ```
267
- // `$a` has no whitespace after it and will be marked `JointHidden`. If you then
268
- // call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
269
- // if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
270
- // producing "xyz", which is bad because it effectively merges tokens.
271
- // `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
272
- // some of the unnecessary whitespace.
273
- let ident = MacroRulesNormalizedIdent :: new ( original_ident) ;
274
- if let Some ( cur_matched) = lookup_cur_matched ( ident, interp, & repeats) {
275
- // We wrap the tokens in invisible delimiters, unless they are already wrapped
276
- // in invisible delimiters with the same `MetaVarKind`. Because some proc
277
- // macros can't handle multiple layers of invisible delimiters of the same
278
- // `MetaVarKind`. This loses some span info, though it hopefully won't matter.
279
- let mut mk_delimited = |mk_span, mv_kind, mut stream : TokenStream | {
280
- if stream. len ( ) == 1 {
281
- let tree = stream. iter ( ) . next ( ) . unwrap ( ) ;
282
- if let TokenTree :: Delimited ( _, _, delim, inner) = tree
283
- && let Delimiter :: Invisible ( InvisibleOrigin :: MetaVar ( mvk) ) = delim
284
- && mv_kind == * mvk
285
- {
286
- stream = inner. clone ( ) ;
287
- }
288
- }
289
-
290
- // Emit as a token stream within `Delimiter::Invisible` to maintain
291
- // parsing priorities.
292
- marker. mark_span ( & mut sp) ;
293
- with_metavar_spans ( |mspans| mspans. insert ( mk_span, sp) ) ;
294
- // Both the open delim and close delim get the same span, which covers the
295
- // `$foo` in the decl macro RHS.
296
- TokenTree :: Delimited (
297
- DelimSpan :: from_single ( sp) ,
298
- DelimSpacing :: new ( Spacing :: Alone , Spacing :: Alone ) ,
299
- Delimiter :: Invisible ( InvisibleOrigin :: MetaVar ( mv_kind) ) ,
300
- stream,
301
- )
302
- } ;
303
- let tt = match cur_matched {
304
- MatchedSingle ( ParseNtResult :: Tt ( tt) ) => {
305
- // `tt`s are emitted into the output stream directly as "raw tokens",
306
- // without wrapping them into groups. Other variables are emitted into
307
- // the output stream as groups with `Delimiter::Invisible` to maintain
308
- // parsing priorities.
309
- maybe_use_metavar_location ( psess, & stack, sp, tt, & mut marker)
310
- }
311
- MatchedSingle ( ParseNtResult :: Ident ( ident, is_raw) ) => {
312
- marker. mark_span ( & mut sp) ;
313
- with_metavar_spans ( |mspans| mspans. insert ( ident. span , sp) ) ;
314
- let kind = token:: NtIdent ( * ident, * is_raw) ;
315
- TokenTree :: token_alone ( kind, sp)
316
- }
317
- MatchedSingle ( ParseNtResult :: Lifetime ( ident, is_raw) ) => {
318
- marker. mark_span ( & mut sp) ;
319
- with_metavar_spans ( |mspans| mspans. insert ( ident. span , sp) ) ;
320
- let kind = token:: NtLifetime ( * ident, * is_raw) ;
321
- TokenTree :: token_alone ( kind, sp)
322
- }
323
- MatchedSingle ( ParseNtResult :: Item ( item) ) => {
324
- mk_delimited ( item. span , MetaVarKind :: Item , TokenStream :: from_ast ( item) )
325
- }
326
- MatchedSingle ( ParseNtResult :: Block ( block) ) => mk_delimited (
327
- block. span ,
328
- MetaVarKind :: Block ,
329
- TokenStream :: from_ast ( block) ,
330
- ) ,
331
- MatchedSingle ( ParseNtResult :: Stmt ( stmt) ) => {
332
- let stream = if let StmtKind :: Empty = stmt. kind {
333
- // FIXME: Properly collect tokens for empty statements.
334
- TokenStream :: token_alone ( token:: Semi , stmt. span )
335
- } else {
336
- TokenStream :: from_ast ( stmt)
337
- } ;
338
- mk_delimited ( stmt. span , MetaVarKind :: Stmt , stream)
339
- }
340
- MatchedSingle ( ParseNtResult :: Pat ( pat, pat_kind) ) => mk_delimited (
341
- pat. span ,
342
- MetaVarKind :: Pat ( * pat_kind) ,
343
- TokenStream :: from_ast ( pat) ,
344
- ) ,
345
- MatchedSingle ( ParseNtResult :: Expr ( expr, kind) ) => {
346
- let ( can_begin_literal_maybe_minus, can_begin_string_literal) =
347
- match & expr. kind {
348
- ExprKind :: Lit ( _) => ( true , true ) ,
349
- ExprKind :: Unary ( UnOp :: Neg , e)
350
- if matches ! ( & e. kind, ExprKind :: Lit ( _) ) =>
351
- {
352
- ( true , false )
353
- }
354
- _ => ( false , false ) ,
355
- } ;
356
- mk_delimited (
357
- expr. span ,
358
- MetaVarKind :: Expr {
359
- kind : * kind,
360
- can_begin_literal_maybe_minus,
361
- can_begin_string_literal,
362
- } ,
363
- TokenStream :: from_ast ( expr) ,
364
- )
365
- }
366
- MatchedSingle ( ParseNtResult :: Literal ( lit) ) => {
367
- mk_delimited ( lit. span , MetaVarKind :: Literal , TokenStream :: from_ast ( lit) )
368
- }
369
- MatchedSingle ( ParseNtResult :: Ty ( ty) ) => {
370
- let is_path = matches ! ( & ty. kind, TyKind :: Path ( None , _path) ) ;
371
- mk_delimited (
372
- ty. span ,
373
- MetaVarKind :: Ty { is_path } ,
374
- TokenStream :: from_ast ( ty) ,
375
- )
376
- }
377
- MatchedSingle ( ParseNtResult :: Meta ( attr_item) ) => {
378
- let has_meta_form = attr_item. meta_kind ( ) . is_some ( ) ;
379
- mk_delimited (
380
- attr_item. span ( ) ,
381
- MetaVarKind :: Meta { has_meta_form } ,
382
- TokenStream :: from_ast ( attr_item) ,
383
- )
384
- }
385
- MatchedSingle ( ParseNtResult :: Path ( path) ) => {
386
- mk_delimited ( path. span , MetaVarKind :: Path , TokenStream :: from_ast ( path) )
387
- }
388
- MatchedSingle ( ParseNtResult :: Vis ( vis) ) => {
389
- mk_delimited ( vis. span , MetaVarKind :: Vis , TokenStream :: from_ast ( vis) )
390
- }
391
- MatchedSeq ( ..) => {
392
- // We were unable to descend far enough. This is an error.
393
- return Err ( dcx. create_err ( VarStillRepeating { span : sp, ident } ) ) ;
394
- }
395
- } ;
396
- result. push ( tt)
397
- } else {
398
- // If we aren't able to match the meta-var, we push it back into the result but
399
- // with modified syntax context. (I believe this supports nested macros).
400
- marker. mark_span ( & mut sp) ;
401
- marker. mark_span ( & mut original_ident. span ) ;
402
- result. push ( TokenTree :: token_joint_hidden ( token:: Dollar , sp) ) ;
403
- result. push ( TokenTree :: Token (
404
- Token :: from_ast_ident ( original_ident) ,
405
- Spacing :: Alone ,
406
- ) ) ;
407
- }
265
+ & mbe:: TokenTree :: MetaVar ( sp, original_ident) => {
266
+ transcribe_metavar ( & mut mcx, sp, original_ident) ?;
408
267
}
409
268
410
269
// Replace meta-variable expressions with the result of their expansion.
411
- mbe:: TokenTree :: MetaVarExpr ( sp, expr) => {
412
- transcribe_metavar_expr (
413
- dcx,
414
- expr,
415
- interp,
416
- & mut marker,
417
- & repeats,
418
- & mut result,
419
- sp,
420
- & psess. symbol_gallery ,
421
- ) ?;
270
+ mbe:: TokenTree :: MetaVarExpr ( dspan, expr) => {
271
+ transcribe_metavar_expr ( & mut mcx, * dspan, expr) ?;
422
272
}
423
273
424
274
// If we are entering a new delimiter, we push its contents to the `stack` to be
@@ -427,21 +277,21 @@ pub(super) fn transcribe<'a>(
427
277
// jump back out of the Delimited, pop the result_stack and add the new results back to
428
278
// the previous results (from outside the Delimited).
429
279
& mbe:: TokenTree :: Delimited ( mut span, ref spacing, ref delimited) => {
430
- marker. mark_span ( & mut span. open ) ;
431
- marker. mark_span ( & mut span. close ) ;
432
- stack. push ( Frame :: new_delimited ( delimited, span, * spacing) ) ;
433
- result_stack. push ( mem:: take ( & mut result) ) ;
280
+ mcx . marker . mark_span ( & mut span. open ) ;
281
+ mcx . marker . mark_span ( & mut span. close ) ;
282
+ mcx . stack . push ( Frame :: new_delimited ( delimited, span, * spacing) ) ;
283
+ mcx . result_stack . push ( mem:: take ( & mut mcx . result ) ) ;
434
284
}
435
285
436
286
// Nothing much to do here. Just push the token to the result, being careful to
437
287
// preserve syntax context.
438
288
& mbe:: TokenTree :: Token ( mut token) => {
439
- marker. mark_span ( & mut token. span ) ;
289
+ mcx . marker . mark_span ( & mut token. span ) ;
440
290
if let token:: NtIdent ( ident, _) | token:: NtLifetime ( ident, _) = & mut token. kind {
441
- marker. mark_span ( & mut ident. span ) ;
291
+ mcx . marker . mark_span ( & mut ident. span ) ;
442
292
}
443
293
let tt = TokenTree :: Token ( token, Spacing :: Alone ) ;
444
- result. push ( tt) ;
294
+ mcx . result . push ( tt) ;
445
295
}
446
296
447
297
// There should be no meta-var declarations in the invocation of a macro.
@@ -450,6 +300,302 @@ pub(super) fn transcribe<'a>(
450
300
}
451
301
}
452
302
303
+ /// Turn `$(...)*` sequences into tokens.
304
+ fn transcribe_sequence < ' mcx , ' itp > (
305
+ mcx : & mut MacroTcbCtx < ' mcx , ' itp > ,
306
+ seq : & mbe:: TokenTree ,
307
+ seq_rep : & ' itp mbe:: SequenceRepetition ,
308
+ ) -> PResult < ' mcx , ( ) > {
309
+ let dcx = mcx. dcx ( ) ;
310
+
311
+ // We are descending into a sequence. We first make sure that the matchers in the RHS
312
+ // and the matches in `interp` have the same shape. Otherwise, either the caller or the
313
+ // macro writer has made a mistake.
314
+ match lockstep_iter_size ( seq, mcx. interp , & mcx. repeats ) {
315
+ LockstepIterSize :: Unconstrained => {
316
+ return Err ( dcx. create_err ( NoSyntaxVarsExprRepeat { span : seq. span ( ) } ) ) ;
317
+ }
318
+
319
+ LockstepIterSize :: Contradiction ( msg) => {
320
+ // FIXME: this really ought to be caught at macro definition time... It
321
+ // happens when two meta-variables are used in the same repetition in a
322
+ // sequence, but they come from different sequence matchers and repeat
323
+ // different amounts.
324
+ return Err ( dcx. create_err ( MetaVarsDifSeqMatchers { span : seq. span ( ) , msg } ) ) ;
325
+ }
326
+
327
+ LockstepIterSize :: Constraint ( len, _) => {
328
+ // We do this to avoid an extra clone above. We know that this is a
329
+ // sequence already.
330
+ let mbe:: TokenTree :: Sequence ( sp, seq) = seq else { unreachable ! ( ) } ;
331
+
332
+ // Is the repetition empty?
333
+ if len == 0 {
334
+ if seq. kleene . op == KleeneOp :: OneOrMore {
335
+ // FIXME: this really ought to be caught at macro definition
336
+ // time... It happens when the Kleene operator in the matcher and
337
+ // the body for the same meta-variable do not match.
338
+ return Err ( dcx. create_err ( MustRepeatOnce { span : sp. entire ( ) } ) ) ;
339
+ }
340
+ } else {
341
+ // 0 is the initial counter (we have done 0 repetitions so far). `len`
342
+ // is the total number of repetitions we should generate.
343
+ mcx. repeats . push ( ( 0 , len) ) ;
344
+
345
+ // The first time we encounter the sequence we push it to the stack. It
346
+ // then gets reused (see the beginning of the loop) until we are done
347
+ // repeating.
348
+ mcx. stack . push ( Frame :: new_sequence ( seq_rep, seq. separator . clone ( ) , seq. kleene . op ) ) ;
349
+ }
350
+ }
351
+ }
352
+
353
+ Ok ( ( ) )
354
+ }
355
+
356
+ /// Find the matched nonterminal from the macro invocation, and use it to replace
357
+ /// the meta-var.
358
+ ///
359
+ /// We use `Spacing::Alone` everywhere here, because that's the conservative choice
360
+ /// and spacing of declarative macros is tricky. E.g. in this macro:
361
+ /// ```
362
+ /// macro_rules! idents {
363
+ /// ($($a:ident,)*) => { stringify!($($a)*) }
364
+ /// }
365
+ /// ```
366
+ /// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
367
+ /// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
368
+ /// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
369
+ /// producing "xyz", which is bad because it effectively merges tokens.
370
+ /// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
371
+ /// some of the unnecessary whitespace.
372
+ fn transcribe_metavar < ' mcx > (
373
+ mcx : & mut MacroTcbCtx < ' mcx , ' _ > ,
374
+ mut sp : Span ,
375
+ mut original_ident : Ident ,
376
+ ) -> PResult < ' mcx , ( ) > {
377
+ let dcx = mcx. psess . dcx ( ) ;
378
+
379
+ let ident = MacroRulesNormalizedIdent :: new ( original_ident) ;
380
+ let Some ( cur_matched) = lookup_cur_matched ( ident, mcx. interp , & mcx. repeats ) else {
381
+ // If we aren't able to match the meta-var, we push it back into the result but
382
+ // with modified syntax context. (I believe this supports nested macros).
383
+ mcx. marker . mark_span ( & mut sp) ;
384
+ mcx. marker . mark_span ( & mut original_ident. span ) ;
385
+ mcx. result . push ( TokenTree :: token_joint_hidden ( token:: Dollar , sp) ) ;
386
+ mcx. result . push ( TokenTree :: Token ( Token :: from_ast_ident ( original_ident) , Spacing :: Alone ) ) ;
387
+ return Ok ( ( ) ) ;
388
+ } ;
389
+
390
+ // We wrap the tokens in invisible delimiters, unless they are already wrapped
391
+ // in invisible delimiters with the same `MetaVarKind`. Because some proc
392
+ // macros can't handle multiple layers of invisible delimiters of the same
393
+ // `MetaVarKind`. This loses some span info, though it hopefully won't matter.
394
+ let mut mk_delimited = |mk_span, mv_kind, mut stream : TokenStream | {
395
+ if stream. len ( ) == 1 {
396
+ let tree = stream. iter ( ) . next ( ) . unwrap ( ) ;
397
+ if let TokenTree :: Delimited ( _, _, delim, inner) = tree
398
+ && let Delimiter :: Invisible ( InvisibleOrigin :: MetaVar ( mvk) ) = delim
399
+ && mv_kind == * mvk
400
+ {
401
+ stream = inner. clone ( ) ;
402
+ }
403
+ }
404
+
405
+ // Emit as a token stream within `Delimiter::Invisible` to maintain
406
+ // parsing priorities.
407
+ mcx. marker . mark_span ( & mut sp) ;
408
+ with_metavar_spans ( |mspans| mspans. insert ( mk_span, sp) ) ;
409
+ // Both the open delim and close delim get the same span, which covers the
410
+ // `$foo` in the decl macro RHS.
411
+ TokenTree :: Delimited (
412
+ DelimSpan :: from_single ( sp) ,
413
+ DelimSpacing :: new ( Spacing :: Alone , Spacing :: Alone ) ,
414
+ Delimiter :: Invisible ( InvisibleOrigin :: MetaVar ( mv_kind) ) ,
415
+ stream,
416
+ )
417
+ } ;
418
+
419
+ let tt = match cur_matched {
420
+ MatchedSingle ( ParseNtResult :: Tt ( tt) ) => {
421
+ // `tt`s are emitted into the output stream directly as "raw tokens",
422
+ // without wrapping them into groups. Other variables are emitted into
423
+ // the output stream as groups with `Delimiter::Invisible` to maintain
424
+ // parsing priorities.
425
+ maybe_use_metavar_location ( mcx. psess , & mcx. stack , sp, tt, & mut mcx. marker )
426
+ }
427
+ MatchedSingle ( ParseNtResult :: Ident ( ident, is_raw) ) => {
428
+ mcx. marker . mark_span ( & mut sp) ;
429
+ with_metavar_spans ( |mspans| mspans. insert ( ident. span , sp) ) ;
430
+ let kind = token:: NtIdent ( * ident, * is_raw) ;
431
+ TokenTree :: token_alone ( kind, sp)
432
+ }
433
+ MatchedSingle ( ParseNtResult :: Lifetime ( ident, is_raw) ) => {
434
+ mcx. marker . mark_span ( & mut sp) ;
435
+ with_metavar_spans ( |mspans| mspans. insert ( ident. span , sp) ) ;
436
+ let kind = token:: NtLifetime ( * ident, * is_raw) ;
437
+ TokenTree :: token_alone ( kind, sp)
438
+ }
439
+ MatchedSingle ( ParseNtResult :: Item ( item) ) => {
440
+ mk_delimited ( item. span , MetaVarKind :: Item , TokenStream :: from_ast ( item) )
441
+ }
442
+ MatchedSingle ( ParseNtResult :: Block ( block) ) => {
443
+ mk_delimited ( block. span , MetaVarKind :: Block , TokenStream :: from_ast ( block) )
444
+ }
445
+ MatchedSingle ( ParseNtResult :: Stmt ( stmt) ) => {
446
+ let stream = if let StmtKind :: Empty = stmt. kind {
447
+ // FIXME: Properly collect tokens for empty statements.
448
+ TokenStream :: token_alone ( token:: Semi , stmt. span )
449
+ } else {
450
+ TokenStream :: from_ast ( stmt)
451
+ } ;
452
+ mk_delimited ( stmt. span , MetaVarKind :: Stmt , stream)
453
+ }
454
+ MatchedSingle ( ParseNtResult :: Pat ( pat, pat_kind) ) => {
455
+ mk_delimited ( pat. span , MetaVarKind :: Pat ( * pat_kind) , TokenStream :: from_ast ( pat) )
456
+ }
457
+ MatchedSingle ( ParseNtResult :: Expr ( expr, kind) ) => {
458
+ let ( can_begin_literal_maybe_minus, can_begin_string_literal) = match & expr. kind {
459
+ ExprKind :: Lit ( _) => ( true , true ) ,
460
+ ExprKind :: Unary ( UnOp :: Neg , e) if matches ! ( & e. kind, ExprKind :: Lit ( _) ) => {
461
+ ( true , false )
462
+ }
463
+ _ => ( false , false ) ,
464
+ } ;
465
+ mk_delimited (
466
+ expr. span ,
467
+ MetaVarKind :: Expr {
468
+ kind : * kind,
469
+ can_begin_literal_maybe_minus,
470
+ can_begin_string_literal,
471
+ } ,
472
+ TokenStream :: from_ast ( expr) ,
473
+ )
474
+ }
475
+ MatchedSingle ( ParseNtResult :: Literal ( lit) ) => {
476
+ mk_delimited ( lit. span , MetaVarKind :: Literal , TokenStream :: from_ast ( lit) )
477
+ }
478
+ MatchedSingle ( ParseNtResult :: Ty ( ty) ) => {
479
+ let is_path = matches ! ( & ty. kind, TyKind :: Path ( None , _path) ) ;
480
+ mk_delimited ( ty. span , MetaVarKind :: Ty { is_path } , TokenStream :: from_ast ( ty) )
481
+ }
482
+ MatchedSingle ( ParseNtResult :: Meta ( attr_item) ) => {
483
+ let has_meta_form = attr_item. meta_kind ( ) . is_some ( ) ;
484
+ mk_delimited (
485
+ attr_item. span ( ) ,
486
+ MetaVarKind :: Meta { has_meta_form } ,
487
+ TokenStream :: from_ast ( attr_item) ,
488
+ )
489
+ }
490
+ MatchedSingle ( ParseNtResult :: Path ( path) ) => {
491
+ mk_delimited ( path. span , MetaVarKind :: Path , TokenStream :: from_ast ( path) )
492
+ }
493
+ MatchedSingle ( ParseNtResult :: Vis ( vis) ) => {
494
+ mk_delimited ( vis. span , MetaVarKind :: Vis , TokenStream :: from_ast ( vis) )
495
+ }
496
+ MatchedSeq ( ..) => {
497
+ // We were unable to descend far enough. This is an error.
498
+ return Err ( dcx. create_err ( VarStillRepeating { span : sp, ident } ) ) ;
499
+ }
500
+ } ;
501
+
502
+ mcx. result . push ( tt) ;
503
+ Ok ( ( ) )
504
+ }
505
+
506
+ /// Turn `${expr(...)}` metavariable expressionss into tokens.
507
+ fn transcribe_metavar_expr < ' mcx > (
508
+ mcx : & mut MacroTcbCtx < ' mcx , ' _ > ,
509
+ dspan : DelimSpan ,
510
+ expr : & MetaVarExpr ,
511
+ ) -> PResult < ' mcx , ( ) > {
512
+ let dcx = mcx. psess . dcx ( ) ;
513
+ let tt = match * expr {
514
+ MetaVarExpr :: Concat ( ref elements) => metavar_expr_concat ( mcx, dspan, elements) ?,
515
+ MetaVarExpr :: Count ( original_ident, depth) => {
516
+ let matched = matched_from_ident ( dcx, original_ident, mcx. interp ) ?;
517
+ let count = count_repetitions ( & mcx, depth, matched, & dspan) ?;
518
+ TokenTree :: token_alone (
519
+ TokenKind :: lit ( token:: Integer , sym:: integer ( count) , None ) ,
520
+ mcx. visited_dspan ( dspan) ,
521
+ )
522
+ }
523
+ MetaVarExpr :: Ignore ( original_ident) => {
524
+ // Used to ensure that `original_ident` is present in the LHS
525
+ let _ = matched_from_ident ( dcx, original_ident, mcx. interp ) ?;
526
+ return Ok ( ( ) ) ;
527
+ }
528
+ MetaVarExpr :: Index ( depth) => match mcx. repeats . iter ( ) . nth_back ( depth) {
529
+ Some ( ( index, _) ) => TokenTree :: token_alone (
530
+ TokenKind :: lit ( token:: Integer , sym:: integer ( * index) , None ) ,
531
+ mcx. visited_dspan ( dspan) ,
532
+ ) ,
533
+ None => {
534
+ return Err ( out_of_bounds_err ( dcx, mcx. repeats . len ( ) , dspan. entire ( ) , "index" ) ) ;
535
+ }
536
+ } ,
537
+ MetaVarExpr :: Len ( depth) => match mcx. repeats . iter ( ) . nth_back ( depth) {
538
+ Some ( ( _, length) ) => TokenTree :: token_alone (
539
+ TokenKind :: lit ( token:: Integer , sym:: integer ( * length) , None ) ,
540
+ mcx. visited_dspan ( dspan) ,
541
+ ) ,
542
+ None => {
543
+ return Err ( out_of_bounds_err ( dcx, mcx. repeats . len ( ) , dspan. entire ( ) , "len" ) ) ;
544
+ }
545
+ } ,
546
+ } ;
547
+ mcx. result . push ( tt) ;
548
+ Ok ( ( ) )
549
+ }
550
+
551
+ /// Handle the `${concat(...)}` metavariable expression.
552
+ fn metavar_expr_concat < ' mcx > (
553
+ mcx : & mut MacroTcbCtx < ' mcx , ' _ > ,
554
+ dspan : DelimSpan ,
555
+ elements : & [ MetaVarExprConcatElem ] ,
556
+ ) -> PResult < ' mcx , TokenTree > {
557
+ let dcx = mcx. psess . dcx ( ) ;
558
+ let mut concatenated = String :: new ( ) ;
559
+ for element in elements. into_iter ( ) {
560
+ let symbol = match element {
561
+ MetaVarExprConcatElem :: Ident ( elem) => elem. name ,
562
+ MetaVarExprConcatElem :: Literal ( elem) => * elem,
563
+ MetaVarExprConcatElem :: Var ( ident) => match matched_from_ident ( dcx, * ident, mcx. interp ) ?
564
+ {
565
+ NamedMatch :: MatchedSeq ( named_matches) => {
566
+ let Some ( ( curr_idx, _) ) = mcx. repeats . last ( ) else {
567
+ return Err ( dcx. struct_span_err ( dspan. entire ( ) , "invalid syntax" ) ) ;
568
+ } ;
569
+ match & named_matches[ * curr_idx] {
570
+ // FIXME(c410-f3r) Nested repetitions are unimplemented
571
+ MatchedSeq ( _) => unimplemented ! ( ) ,
572
+ MatchedSingle ( pnr) => extract_symbol_from_pnr ( dcx, pnr, ident. span ) ?,
573
+ }
574
+ }
575
+ NamedMatch :: MatchedSingle ( pnr) => extract_symbol_from_pnr ( dcx, pnr, ident. span ) ?,
576
+ } ,
577
+ } ;
578
+ concatenated. push_str ( symbol. as_str ( ) ) ;
579
+ }
580
+ let symbol = nfc_normalize ( & concatenated) ;
581
+ let concatenated_span = mcx. visited_dspan ( dspan) ;
582
+ if !rustc_lexer:: is_ident ( symbol. as_str ( ) ) {
583
+ return Err ( dcx. struct_span_err (
584
+ concatenated_span,
585
+ "`${concat(..)}` is not generating a valid identifier" ,
586
+ ) ) ;
587
+ }
588
+ mcx. symbol_gallery ( ) . insert ( symbol, concatenated_span) ;
589
+
590
+ // The current implementation marks the span as coming from the macro regardless of
591
+ // contexts of the concatenated identifiers but this behavior may change in the
592
+ // future.
593
+ Ok ( TokenTree :: Token (
594
+ Token :: from_ast_ident ( Ident :: new ( symbol, concatenated_span) ) ,
595
+ Spacing :: Alone ,
596
+ ) )
597
+ }
598
+
453
599
/// Store the metavariable span for this original span into a side table.
454
600
/// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517).
455
601
/// An optimal encoding for inlined spans will need to be selected to minimize regressions.
@@ -671,13 +817,12 @@ fn lockstep_iter_size(
671
817
/// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]`
672
818
/// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
673
819
/// declared inside a single repetition and the index `1` implies two nested repetitions.
674
- fn count_repetitions < ' a > (
675
- dcx : DiagCtxtHandle < ' a > ,
820
+ fn count_repetitions < ' mcx , ' b > (
821
+ mcx : & MacroTcbCtx < ' mcx , ' b > ,
676
822
depth_user : usize ,
677
823
mut matched : & NamedMatch ,
678
- repeats : & [ ( usize , usize ) ] ,
679
824
sp : & DelimSpan ,
680
- ) -> PResult < ' a , usize > {
825
+ ) -> PResult < ' mcx , usize > {
681
826
// Recursively count the number of matches in `matched` at given depth
682
827
// (or at the top-level of `matched` if no depth is given).
683
828
fn count < ' a > ( depth_curr : usize , depth_max : usize , matched : & NamedMatch ) -> PResult < ' a , usize > {
@@ -706,10 +851,10 @@ fn count_repetitions<'a>(
706
851
707
852
let depth_max = depth ( 0 , matched)
708
853
. checked_sub ( 1 )
709
- . and_then ( |el| el. checked_sub ( repeats. len ( ) ) )
854
+ . and_then ( |el| el. checked_sub ( mcx . repeats . len ( ) ) )
710
855
. unwrap_or_default ( ) ;
711
856
if depth_user > depth_max {
712
- return Err ( out_of_bounds_err ( dcx, depth_max + 1 , sp. entire ( ) , "count" ) ) ;
857
+ return Err ( out_of_bounds_err ( mcx . dcx ( ) , depth_max + 1 , sp. entire ( ) , "count" ) ) ;
713
858
}
714
859
715
860
// `repeats` records all of the nested levels at which we are currently
@@ -718,14 +863,14 @@ fn count_repetitions<'a>(
718
863
// are currently transcribing, so we need to descend to that subtree
719
864
// before we start counting. `matched` contains the various levels of the
720
865
// tree as we descend, and its final value is the subtree we are currently at.
721
- for & ( idx, _) in repeats {
866
+ for & ( idx, _) in & mcx . repeats {
722
867
if let MatchedSeq ( ads) = matched {
723
868
matched = & ads[ idx] ;
724
869
}
725
870
}
726
871
727
872
if let MatchedSingle ( _) = matched {
728
- return Err ( dcx. create_err ( CountRepetitionMisplaced { span : sp. entire ( ) } ) ) ;
873
+ return Err ( mcx . dcx ( ) . create_err ( CountRepetitionMisplaced { span : sp. entire ( ) } ) ) ;
729
874
}
730
875
731
876
count ( depth_user, depth_max, matched)
@@ -762,102 +907,6 @@ fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &s
762
907
dcx. struct_span_err ( span, msg)
763
908
}
764
909
765
- fn transcribe_metavar_expr < ' a > (
766
- dcx : DiagCtxtHandle < ' a > ,
767
- expr : & MetaVarExpr ,
768
- interp : & FxHashMap < MacroRulesNormalizedIdent , NamedMatch > ,
769
- marker : & mut Marker ,
770
- repeats : & [ ( usize , usize ) ] ,
771
- result : & mut Vec < TokenTree > ,
772
- sp : & DelimSpan ,
773
- symbol_gallery : & SymbolGallery ,
774
- ) -> PResult < ' a , ( ) > {
775
- let mut visited_span = || {
776
- let mut span = sp. entire ( ) ;
777
- marker. mark_span ( & mut span) ;
778
- span
779
- } ;
780
- match * expr {
781
- MetaVarExpr :: Concat ( ref elements) => {
782
- let mut concatenated = String :: new ( ) ;
783
- for element in elements. into_iter ( ) {
784
- let symbol = match element {
785
- MetaVarExprConcatElem :: Ident ( elem) => elem. name ,
786
- MetaVarExprConcatElem :: Literal ( elem) => * elem,
787
- MetaVarExprConcatElem :: Var ( ident) => {
788
- match matched_from_ident ( dcx, * ident, interp) ? {
789
- NamedMatch :: MatchedSeq ( named_matches) => {
790
- let Some ( ( curr_idx, _) ) = repeats. last ( ) else {
791
- return Err ( dcx. struct_span_err ( sp. entire ( ) , "invalid syntax" ) ) ;
792
- } ;
793
- match & named_matches[ * curr_idx] {
794
- // FIXME(c410-f3r) Nested repetitions are unimplemented
795
- MatchedSeq ( _) => unimplemented ! ( ) ,
796
- MatchedSingle ( pnr) => {
797
- extract_symbol_from_pnr ( dcx, pnr, ident. span ) ?
798
- }
799
- }
800
- }
801
- NamedMatch :: MatchedSingle ( pnr) => {
802
- extract_symbol_from_pnr ( dcx, pnr, ident. span ) ?
803
- }
804
- }
805
- }
806
- } ;
807
- concatenated. push_str ( symbol. as_str ( ) ) ;
808
- }
809
- let symbol = nfc_normalize ( & concatenated) ;
810
- let concatenated_span = visited_span ( ) ;
811
- if !rustc_lexer:: is_ident ( symbol. as_str ( ) ) {
812
- return Err ( dcx. struct_span_err (
813
- concatenated_span,
814
- "`${concat(..)}` is not generating a valid identifier" ,
815
- ) ) ;
816
- }
817
- symbol_gallery. insert ( symbol, concatenated_span) ;
818
- // The current implementation marks the span as coming from the macro regardless of
819
- // contexts of the concatenated identifiers but this behavior may change in the
820
- // future.
821
- result. push ( TokenTree :: Token (
822
- Token :: from_ast_ident ( Ident :: new ( symbol, concatenated_span) ) ,
823
- Spacing :: Alone ,
824
- ) ) ;
825
- }
826
- MetaVarExpr :: Count ( original_ident, depth) => {
827
- let matched = matched_from_ident ( dcx, original_ident, interp) ?;
828
- let count = count_repetitions ( dcx, depth, matched, repeats, sp) ?;
829
- let tt = TokenTree :: token_alone (
830
- TokenKind :: lit ( token:: Integer , sym:: integer ( count) , None ) ,
831
- visited_span ( ) ,
832
- ) ;
833
- result. push ( tt) ;
834
- }
835
- MetaVarExpr :: Ignore ( original_ident) => {
836
- // Used to ensure that `original_ident` is present in the LHS
837
- let _ = matched_from_ident ( dcx, original_ident, interp) ?;
838
- }
839
- MetaVarExpr :: Index ( depth) => match repeats. iter ( ) . nth_back ( depth) {
840
- Some ( ( index, _) ) => {
841
- result. push ( TokenTree :: token_alone (
842
- TokenKind :: lit ( token:: Integer , sym:: integer ( * index) , None ) ,
843
- visited_span ( ) ,
844
- ) ) ;
845
- }
846
- None => return Err ( out_of_bounds_err ( dcx, repeats. len ( ) , sp. entire ( ) , "index" ) ) ,
847
- } ,
848
- MetaVarExpr :: Len ( depth) => match repeats. iter ( ) . nth_back ( depth) {
849
- Some ( ( _, length) ) => {
850
- result. push ( TokenTree :: token_alone (
851
- TokenKind :: lit ( token:: Integer , sym:: integer ( * length) , None ) ,
852
- visited_span ( ) ,
853
- ) ) ;
854
- }
855
- None => return Err ( out_of_bounds_err ( dcx, repeats. len ( ) , sp. entire ( ) , "len" ) ) ,
856
- } ,
857
- }
858
- Ok ( ( ) )
859
- }
860
-
861
910
/// Extracts an metavariable symbol that can be an identifier, a token tree or a literal.
862
911
fn extract_symbol_from_pnr < ' a > (
863
912
dcx : DiagCtxtHandle < ' a > ,
0 commit comments