Skip to main content

rustc_expand/mbe/
transcribe.rs

1use std::mem;
2
3use rustc_ast::token::{
4    self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind,
5};
6use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
7use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, listify, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15    BytePos, Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, kw, sym,
16    with_metavar_spans,
17};
18use smallvec::{SmallVec, smallvec};
19
20use crate::errors::{
21    ConcatInvalidIdent, CountRepetitionMisplaced, InvalidIdentReason, MacroVarStillRepeating,
22    MetaVarsDifSeqMatchers, MustRepeatOnce, MveUnrecognizedVar, NoRepeatableVar,
23    NoSyntaxVarsExprRepeat, VarNoTypo, VarTypoSuggestionRepeatable, VarTypoSuggestionUnrepeatable,
24    VarTypoSuggestionUnrepeatableLabel,
25};
26use crate::mbe::macro_parser::NamedMatch;
27use crate::mbe::macro_parser::NamedMatch::*;
28use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
29use crate::mbe::{self, KleeneOp, MetaVarExpr};
30
31/// Context needed to perform transcription of metavariable expressions.
32struct TranscrCtx<'psess, 'itp> {
33    psess: &'psess ParseSess,
34
35    /// Map from metavars to matched tokens
36    interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
37
38    /// Allow marking spans.
39    marker: Marker,
40
41    /// The stack of things yet to be completely expanded.
42    ///
43    /// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
44    /// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
45    /// choice of spacing values doesn't matter.
46    stack: SmallVec<[Frame<'itp>; 1]>,
47
48    /// A stack of where we are in the repeat expansion.
49    ///
50    /// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
51    /// `repeats` keeps track of where we are in matching at each level, with the last element
52    /// being the most deeply nested sequence. This is used as a stack.
53    repeats: Vec<(usize, usize)>,
54
55    /// The resulting token stream from the `TokenTree` we just finished processing.
56    ///
57    /// At the end, this will contain the full result of transcription, but at arbitrary points
58    /// during `transcribe`, `result` will contain subsets of the final result.
59    ///
60    /// Specifically, as we descend into each TokenTree, we will push the existing results onto the
61    /// `result_stack` and clear `results`. We will then produce the results of transcribing the
62    /// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
63    /// `result_stack` and append `results` too it to produce the new `results` up to that point.
64    ///
65    /// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
66    /// again, and we are done transcribing.
67    result: Vec<TokenTree>,
68
69    /// The in-progress `result` lives at the top of this stack. Each entered `TokenTree` adds a
70    /// new entry.
71    result_stack: Vec<Vec<TokenTree>>,
72}
73
74impl<'psess> TranscrCtx<'psess, '_> {
75    /// Span marked with the correct expansion and transparency.
76    fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
77        let mut span = dspan.entire();
78        self.marker.mark_span(&mut span);
79        span
80    }
81}
82
83/// A Marker adds the given mark to the syntax context.
84struct Marker {
85    expand_id: LocalExpnId,
86    transparency: Transparency,
87    cache: FxHashMap<SyntaxContext, SyntaxContext>,
88}
89
90impl Marker {
91    /// Mark a span with the stored expansion ID and transparency.
92    fn mark_span(&mut self, span: &mut Span) {
93        // `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and
94        // by itself. All tokens in a macro body typically have the same syntactic context, unless
95        // it's some advanced case with macro-generated macros. So if we cache the marked version
96        // of that context once, we'll typically have a 100% cache hit rate after that.
97        *span = span.map_ctxt(|ctxt| {
98            *self
99                .cache
100                .entry(ctxt)
101                .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
102        });
103    }
104}
105
106/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
107struct Frame<'a> {
108    tts: &'a [mbe::TokenTree],
109    idx: usize,
110    kind: FrameKind,
111}
112
113enum FrameKind {
114    Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
115    Sequence { sep: Option<Token>, kleene_op: KleeneOp },
116}
117
118impl<'a> Frame<'a> {
119    fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
120        Frame {
121            tts: &src.tts,
122            idx: 0,
123            kind: FrameKind::Delimited { delim: src.delim, span, spacing },
124        }
125    }
126
127    fn new_sequence(
128        src: &'a mbe::SequenceRepetition,
129        sep: Option<Token>,
130        kleene_op: KleeneOp,
131    ) -> Frame<'a> {
132        Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
133    }
134}
135
136impl<'a> Iterator for Frame<'a> {
137    type Item = &'a mbe::TokenTree;
138
139    fn next(&mut self) -> Option<&'a mbe::TokenTree> {
140        let res = self.tts.get(self.idx);
141        self.idx += 1;
142        res
143    }
144}
145
146/// This can do Macro-By-Example transcription.
147/// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
148///   invocation. We are assuming we already know there is a match.
149/// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
150///
151/// For example,
152///
153/// ```rust
154/// macro_rules! foo {
155///     ($id:ident) => { println!("{}", stringify!($id)); }
156/// }
157///
158/// foo!(bar);
159/// ```
160///
161/// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
162///
163/// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
164///
165/// Along the way, we do some additional error checking.
166pub(super) fn transcribe<'a>(
167    psess: &'a ParseSess,
168    interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
169    src: &mbe::Delimited,
170    src_span: DelimSpan,
171    transparency: Transparency,
172    expand_id: LocalExpnId,
173) -> PResult<'a, TokenStream> {
174    // Nothing for us to transcribe...
175    if src.tts.is_empty() {
176        return Ok(TokenStream::default());
177    }
178
179    let mut tscx = TranscrCtx {
180        psess,
181        interp,
182        marker: Marker { expand_id, transparency, cache: Default::default() },
183        repeats: Vec::new(),
184        stack: {
    let count = 0usize + 1usize;
    let mut vec = ::smallvec::SmallVec::new();
    if count <= vec.inline_size() {
        vec.push(Frame::new_delimited(src, src_span,
                DelimSpacing::new(Spacing::Alone, Spacing::Alone)));
        vec
    } else {
        ::smallvec::SmallVec::from_vec(::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
                    [Frame::new_delimited(src, src_span,
                                DelimSpacing::new(Spacing::Alone, Spacing::Alone))])))
    }
}smallvec![Frame::new_delimited(
185            src,
186            src_span,
187            DelimSpacing::new(Spacing::Alone, Spacing::Alone)
188        )],
189        result: Vec::new(),
190        result_stack: Vec::new(),
191    };
192
193    loop {
194        // Look at the last frame on the stack.
195        // If it still has a TokenTree we have not looked at yet, use that tree.
196        let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
197            // This else-case never produces a value for `tree` (it `continue`s or `return`s).
198
199            // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
200            // go back to the beginning of the sequence.
201            let frame = tscx.stack.last_mut().unwrap();
202            if let FrameKind::Sequence { sep, .. } = &frame.kind {
203                let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
204                *repeat_idx += 1;
205                if repeat_idx < repeat_len {
206                    frame.idx = 0;
207                    if let Some(sep) = sep {
208                        tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
209                    }
210                    continue;
211                }
212            }
213
214            // We are done with the top of the stack. Pop it. Depending on what it was, we do
215            // different things. Note that the outermost item must be the delimited, wrapped RHS
216            // that was passed in originally to `transcribe`.
217            match tscx.stack.pop().unwrap().kind {
218                // Done with a sequence. Pop from repeats.
219                FrameKind::Sequence { .. } => {
220                    tscx.repeats.pop();
221                }
222
223                // We are done processing a Delimited. If this is the top-level delimited, we are
224                // done. Otherwise, we unwind the result_stack to append what we have produced to
225                // any previous results.
226                FrameKind::Delimited { delim, span, mut spacing, .. } => {
227                    // Hack to force-insert a space after `]` in certain case.
228                    // See discussion of the `hex-literal` crate in #114571.
229                    if delim == Delimiter::Bracket {
230                        spacing.close = Spacing::Alone;
231                    }
232                    if tscx.result_stack.is_empty() {
233                        // No results left to compute! We are back at the top-level.
234                        return Ok(TokenStream::new(tscx.result));
235                    }
236
237                    // Step back into the parent Delimited.
238                    let tree =
239                        TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
240                    tscx.result = tscx.result_stack.pop().unwrap();
241                    tscx.result.push(tree);
242                }
243            }
244            continue;
245        };
246
247        // At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
248        // `tree` contains the next `TokenTree` to be processed.
249        match tree {
250            // Replace the sequence with its expansion.
251            seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
252                transcribe_sequence(&mut tscx, seq, seq_rep, interp)?;
253            }
254
255            // Replace the meta-var with the matched token tree from the invocation.
256            &mbe::TokenTree::MetaVar(sp, original_ident) => {
257                transcribe_metavar(&mut tscx, sp, original_ident)?;
258            }
259
260            // Replace meta-variable expressions with the result of their expansion.
261            mbe::TokenTree::MetaVarExpr(dspan, expr) => {
262                transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
263            }
264
265            // If we are entering a new delimiter, we push its contents to the `stack` to be
266            // processed, and we push all of the currently produced results to the `result_stack`.
267            // We will produce all of the results of the inside of the `Delimited` and then we will
268            // jump back out of the Delimited, pop the result_stack and add the new results back to
269            // the previous results (from outside the Delimited).
270            &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
271                tscx.marker.mark_span(&mut span.open);
272                tscx.marker.mark_span(&mut span.close);
273                tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
274                tscx.result_stack.push(mem::take(&mut tscx.result));
275            }
276
277            // Nothing much to do here. Just push the token to the result, being careful to
278            // preserve syntax context.
279            &mbe::TokenTree::Token(mut token) => {
280                tscx.marker.mark_span(&mut token.span);
281                if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
282                    tscx.marker.mark_span(&mut ident.span);
283                }
284                let tt = TokenTree::Token(token, Spacing::Alone);
285                tscx.result.push(tt);
286            }
287
288            // There should be no meta-var declarations in the invocation of a macro.
289            mbe::TokenTree::MetaVarDecl { .. } => {
    ::core::panicking::panic_fmt(format_args!("unexpected `TokenTree::MetaVarDecl`"));
}panic!("unexpected `TokenTree::MetaVarDecl`"),
290        }
291    }
292}
293
294/// Turn `$(...)*` sequences into tokens.
295fn transcribe_sequence<'tx, 'itp>(
296    tscx: &mut TranscrCtx<'tx, 'itp>,
297    seq: &mbe::TokenTree,
298    seq_rep: &'itp mbe::SequenceRepetition,
299    // Used only for better diagnostics in the face of typos.
300    interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
301) -> PResult<'tx, ()> {
302    let dcx = tscx.psess.dcx();
303
304    // We are descending into a sequence. We first make sure that the matchers in the RHS
305    // and the matches in `interp` have the same shape. Otherwise, either the caller or the
306    // macro writer has made a mistake.
307    match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
308        LockstepIterSize::Unconstrained => {
309            let mut repeatables = Vec::new();
310            let mut non_repeatables = Vec::new();
311
312            #[allow(rustc::potential_query_instability)]
313            for (name, matcher) in interp.iter() {
314                if matcher.is_repeatable() {
315                    repeatables.push(name);
316                } else {
317                    non_repeatables.push(name);
318                }
319            }
320
321            let repeatable_names: Vec<Symbol> =
322                repeatables.iter().map(|&name| name.symbol()).collect();
323            let non_repeatable_names: Vec<Symbol> =
324                non_repeatables.iter().map(|&name| name.symbol()).collect();
325            let mut meta_vars = ::alloc::vec::Vec::new()vec![];
326            seq.meta_vars(&mut meta_vars);
327            let mut typo_repeatable = None;
328            let mut typo_unrepeatable = None;
329            let mut typo_unrepeatable_label = None;
330            let mut var_no_typo = None;
331            let mut no_repeatable_var = None;
332
333            for ident in meta_vars {
334                if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
335                    &repeatable_names[..],
336                    ident.name,
337                    None,
338                ) {
339                    typo_repeatable = Some(VarTypoSuggestionRepeatable { span: ident.span, name });
340                } else if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
341                    &non_repeatable_names[..],
342                    ident.name,
343                    None,
344                ) {
345                    typo_unrepeatable = Some(VarTypoSuggestionUnrepeatable { span: ident.span });
346                    if let Some(&orig_ident) = non_repeatables.iter().find(|n| n.symbol() == name) {
347                        typo_unrepeatable_label = Some(VarTypoSuggestionUnrepeatableLabel {
348                            span: orig_ident.ident().span,
349                        });
350                    }
351                } else {
352                    if !repeatable_names.is_empty()
353                        && let Some(msg) = listify(&repeatable_names, |s| ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("`${0}`", s))
    })format!("`${s}`"))
354                    {
355                        var_no_typo = Some(VarNoTypo { span: ident.span, msg });
356                    } else {
357                        no_repeatable_var = Some(NoRepeatableVar { span: ident.span });
358                    }
359                }
360            }
361            return Err(dcx.create_err(NoSyntaxVarsExprRepeat {
362                span: seq.span(),
363                typo_unrepeatable,
364                typo_repeatable,
365                typo_unrepeatable_label,
366                var_no_typo,
367                no_repeatable_var,
368            }));
369        }
370
371        LockstepIterSize::Contradiction(msg) => {
372            // FIXME: this really ought to be caught at macro definition time... It
373            // happens when two meta-variables are used in the same repetition in a
374            // sequence, but they come from different sequence matchers and repeat
375            // different amounts.
376            return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
377        }
378
379        LockstepIterSize::Constraint(len, _) => {
380            // We do this to avoid an extra clone above. We know that this is a
381            // sequence already.
382            let mbe::TokenTree::Sequence(sp, seq) = seq else { ::core::panicking::panic("internal error: entered unreachable code")unreachable!() };
383
384            // Is the repetition empty?
385            if len == 0 {
386                if seq.kleene.op == KleeneOp::OneOrMore {
387                    // FIXME: this really ought to be caught at macro definition
388                    // time... It happens when the Kleene operator in the matcher and
389                    // the body for the same meta-variable do not match.
390                    return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
391                }
392            } else {
393                // 0 is the initial counter (we have done 0 repetitions so far). `len`
394                // is the total number of repetitions we should generate.
395                tscx.repeats.push((0, len));
396
397                // The first time we encounter the sequence we push it to the stack. It
398                // then gets reused (see the beginning of the loop) until we are done
399                // repeating.
400                tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator, seq.kleene.op));
401            }
402        }
403    }
404
405    Ok(())
406}
407
408/// Find the matched nonterminal from the macro invocation, and use it to replace
409/// the meta-var.
410///
411/// We use `Spacing::Alone` everywhere here, because that's the conservative choice
412/// and spacing of declarative macros is tricky. E.g. in this macro:
413/// ```
414/// macro_rules! idents {
415///     ($($a:ident,)*) => { stringify!($($a)*) }
416/// }
417/// ```
418/// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
419/// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
420/// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
421/// producing "xyz", which is bad because it effectively merges tokens.
422/// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
423/// some of the unnecessary whitespace.
424fn transcribe_metavar<'tx>(
425    tscx: &mut TranscrCtx<'tx, '_>,
426    mut sp: Span,
427    mut original_ident: Ident,
428) -> PResult<'tx, ()> {
429    let dcx = tscx.psess.dcx();
430
431    let ident = MacroRulesNormalizedIdent::new(original_ident);
432    let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
433        // If we aren't able to match the meta-var, we push it back into the result but
434        // with modified syntax context. (I believe this supports nested macros).
435        tscx.marker.mark_span(&mut sp);
436        tscx.marker.mark_span(&mut original_ident.span);
437        tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
438        tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
439        return Ok(());
440    };
441
442    let MatchedSingle(pnr) = cur_matched else {
443        // We were unable to descend far enough. This is an error.
444        return Err(dcx.create_err(MacroVarStillRepeating { span: sp, ident }));
445    };
446
447    transcribe_pnr(tscx, sp, pnr)
448}
449
450fn transcribe_pnr<'tx>(
451    tscx: &mut TranscrCtx<'tx, '_>,
452    mut sp: Span,
453    pnr: &ParseNtResult,
454) -> PResult<'tx, ()> {
455    // We wrap the tokens in invisible delimiters, unless they are already wrapped
456    // in invisible delimiters with the same `MetaVarKind`. Because some proc
457    // macros can't handle multiple layers of invisible delimiters of the same
458    // `MetaVarKind`. This loses some span info, though it hopefully won't matter.
459    let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
460        if stream.len() == 1 {
461            let tree = stream.iter().next().unwrap();
462            if let TokenTree::Delimited(_, _, delim, inner) = tree
463                && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
464                && mv_kind == *mvk
465            {
466                stream = inner.clone();
467            }
468        }
469
470        // Emit as a token stream within `Delimiter::Invisible` to maintain
471        // parsing priorities.
472        tscx.marker.mark_span(&mut sp);
473        with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
474        // Both the open delim and close delim get the same span, which covers the
475        // `$foo` in the decl macro RHS.
476        TokenTree::Delimited(
477            DelimSpan::from_single(sp),
478            DelimSpacing::new(Spacing::Alone, Spacing::Alone),
479            Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
480            stream,
481        )
482    };
483
484    let tt = match pnr {
485        ParseNtResult::Tt(tt) => {
486            // `tt`s are emitted into the output stream directly as "raw tokens",
487            // without wrapping them into groups. Other variables are emitted into
488            // the output stream as groups with `Delimiter::Invisible` to maintain
489            // parsing priorities.
490            maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
491        }
492        ParseNtResult::Ident(ident, is_raw) => {
493            tscx.marker.mark_span(&mut sp);
494            with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
495            let kind = token::NtIdent(*ident, *is_raw);
496            TokenTree::token_alone(kind, sp)
497        }
498        ParseNtResult::Lifetime(ident, is_raw) => {
499            tscx.marker.mark_span(&mut sp);
500            with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
501            let kind = token::NtLifetime(*ident, *is_raw);
502            TokenTree::token_alone(kind, sp)
503        }
504        ParseNtResult::Item(item) => {
505            mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
506        }
507        ParseNtResult::Block(block) => {
508            mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
509        }
510        ParseNtResult::Stmt(stmt) => {
511            let stream = if let StmtKind::Empty = stmt.kind {
512                // FIXME: Properly collect tokens for empty statements.
513                TokenStream::token_alone(token::Semi, stmt.span)
514            } else {
515                TokenStream::from_ast(stmt)
516            };
517            mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
518        }
519        ParseNtResult::Pat(pat, pat_kind) => {
520            mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
521        }
522        ParseNtResult::Expr(expr, kind) => {
523            let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
524                ExprKind::Lit(_) => (true, true),
525                ExprKind::Unary(UnOp::Neg, e) if #[allow(non_exhaustive_omitted_patterns)] match &e.kind {
    ExprKind::Lit(_) => true,
    _ => false,
}matches!(&e.kind, ExprKind::Lit(_)) => {
526                    (true, false)
527                }
528                _ => (false, false),
529            };
530            mk_delimited(
531                expr.span,
532                MetaVarKind::Expr {
533                    kind: *kind,
534                    can_begin_literal_maybe_minus,
535                    can_begin_string_literal,
536                },
537                TokenStream::from_ast(expr),
538            )
539        }
540        ParseNtResult::Literal(lit) => {
541            mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
542        }
543        ParseNtResult::Ty(ty) => {
544            let is_path = #[allow(non_exhaustive_omitted_patterns)] match &ty.kind {
    TyKind::Path(None, _path) => true,
    _ => false,
}matches!(&ty.kind, TyKind::Path(None, _path));
545            mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
546        }
547        ParseNtResult::Meta(attr_item) => {
548            let has_meta_form = attr_item.meta_kind().is_some();
549            mk_delimited(
550                attr_item.span(),
551                MetaVarKind::Meta { has_meta_form },
552                TokenStream::from_ast(attr_item),
553            )
554        }
555        ParseNtResult::Path(path) => {
556            mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
557        }
558        ParseNtResult::Vis(vis) => {
559            mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
560        }
561        ParseNtResult::Guard(guard) => {
562            // FIXME(macro_guard_matcher):
563            // Perhaps it would be better to treat the leading `if` as part of `ast::Guard` during parsing?
564            // Currently they are separate, but in macros we match and emit the leading `if` for `:guard` matchers, which creates some inconsistency.
565
566            let leading_if_span =
567                guard.span_with_leading_if.with_hi(guard.span_with_leading_if.lo() + BytePos(2));
568            let mut ts =
569                TokenStream::token_alone(token::Ident(kw::If, IdentIsRaw::No), leading_if_span);
570            ts.push_stream(TokenStream::from_ast(&guard.cond));
571
572            mk_delimited(guard.span_with_leading_if, MetaVarKind::Guard, ts)
573        }
574    };
575
576    tscx.result.push(tt);
577    Ok(())
578}
579
580/// Turn `${expr(...)}` metavariable expressionss into tokens.
581fn transcribe_metavar_expr<'tx>(
582    tscx: &mut TranscrCtx<'tx, '_>,
583    dspan: DelimSpan,
584    expr: &MetaVarExpr,
585) -> PResult<'tx, ()> {
586    let dcx = tscx.psess.dcx();
587    let tt = match *expr {
588        MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
589        MetaVarExpr::Count(original_ident, depth) => {
590            let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
591            let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
592            TokenTree::token_alone(
593                TokenKind::lit(token::Integer, sym::integer(count), None),
594                tscx.visited_dspan(dspan),
595            )
596        }
597        MetaVarExpr::Ignore(original_ident) => {
598            // Used to ensure that `original_ident` is present in the LHS
599            let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
600            return Ok(());
601        }
602        MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
603            Some((index, _)) => TokenTree::token_alone(
604                TokenKind::lit(token::Integer, sym::integer(*index), None),
605                tscx.visited_dspan(dspan),
606            ),
607            None => {
608                return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
609            }
610        },
611        MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
612            Some((_, length)) => TokenTree::token_alone(
613                TokenKind::lit(token::Integer, sym::integer(*length), None),
614                tscx.visited_dspan(dspan),
615            ),
616            None => {
617                return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
618            }
619        },
620    };
621    tscx.result.push(tt);
622    Ok(())
623}
624
625/// Handle the `${concat(...)}` metavariable expression.
626fn metavar_expr_concat<'tx>(
627    tscx: &mut TranscrCtx<'tx, '_>,
628    dspan: DelimSpan,
629    elements: &[MetaVarExprConcatElem],
630) -> PResult<'tx, TokenTree> {
631    let dcx = tscx.psess.dcx();
632    let mut concatenated = String::new();
633    for element in elements {
634        let symbol = match element {
635            MetaVarExprConcatElem::Ident(elem) => elem.name,
636            MetaVarExprConcatElem::Literal(elem) => *elem,
637            MetaVarExprConcatElem::Var(ident) => {
638                let key = MacroRulesNormalizedIdent::new(*ident);
639                match lookup_cur_matched(key, tscx.interp, &tscx.repeats) {
640                    Some(NamedMatch::MatchedSingle(pnr)) => {
641                        extract_symbol_from_pnr(dcx, pnr, ident.span)?
642                    }
643                    Some(NamedMatch::MatchedSeq(..)) => {
644                        return Err(dcx.struct_span_err(
645                            ident.span,
646                            "`${concat(...)}` variable is still repeating at this depth",
647                        ));
648                    }
649                    None => {
650                        return Err(dcx.create_err(MveUnrecognizedVar { span: ident.span, key }));
651                    }
652                }
653            }
654        };
655        concatenated.push_str(symbol.as_str());
656    }
657    let symbol = nfc_normalize(&concatenated);
658    let concatenated_span = tscx.visited_dspan(dspan);
659    if !rustc_lexer::is_ident(symbol.as_str()) {
660        return Err(dcx.create_err(ConcatInvalidIdent {
661            span: concatenated_span,
662            reason: InvalidIdentReason::new(symbol),
663        }));
664    }
665    tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
666
667    // The current implementation marks the span as coming from the macro regardless of
668    // contexts of the concatenated identifiers but this behavior may change in the
669    // future.
670    Ok(TokenTree::Token(
671        Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
672        Spacing::Alone,
673    ))
674}
675
676/// Store the metavariable span for this original span into a side table.
677/// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517).
678/// An optimal encoding for inlined spans will need to be selected to minimize regressions.
679/// The side table approach is relatively good, but not perfect due to collisions.
680/// In particular, collisions happen when token is passed as an argument through several macro
681/// calls, like in recursive macros.
682/// The old heuristic below is used to improve spans in case of collisions, but diagnostics are
683/// still degraded sometimes in those cases.
684///
685/// The old heuristic:
686///
687/// Usually metavariables `$var` produce interpolated tokens, which have an additional place for
688/// keeping both the original span and the metavariable span. For `tt` metavariables that's not the
689/// case however, and there's no place for keeping a second span. So we try to give the single
690/// produced span a location that would be most useful in practice (the hygiene part of the span
691/// must not be changed).
692///
693/// Different locations are useful for different purposes:
694/// - The original location is useful when we need to report a diagnostic for the original token in
695///   isolation, without combining it with any surrounding tokens. This case occurs, but it is not
696///   very common in practice.
697/// - The metavariable location is useful when we need to somehow combine the token span with spans
698///   of its surrounding tokens. This is the most common way to use token spans.
699///
700/// So this function replaces the original location with the metavariable location in all cases
701/// except these two:
702/// - The metavariable is an element of undelimited sequence `$($tt)*`.
703///   These are typically used for passing larger amounts of code, and tokens in that code usually
704///   combine with each other and not with tokens outside of the sequence.
705/// - The metavariable span comes from a different crate, then we prefer the more local span.
706fn maybe_use_metavar_location(
707    psess: &ParseSess,
708    stack: &[Frame<'_>],
709    mut metavar_span: Span,
710    orig_tt: &TokenTree,
711    marker: &mut Marker,
712) -> TokenTree {
713    let undelimited_seq = #[allow(non_exhaustive_omitted_patterns)] match stack.last() {
    Some(Frame {
        tts: [_],
        kind: FrameKind::Sequence {
            sep: None, kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
            ..
            }, .. }) => true,
    _ => false,
}matches!(
714        stack.last(),
715        Some(Frame {
716            tts: [_],
717            kind: FrameKind::Sequence {
718                sep: None,
719                kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
720                ..
721            },
722            ..
723        })
724    );
725    if undelimited_seq {
726        // Do not record metavar spans for tokens from undelimited sequences, for perf reasons.
727        return orig_tt.clone();
728    }
729
730    marker.mark_span(&mut metavar_span);
731    let no_collision = match orig_tt {
732        TokenTree::Token(token, ..) => {
733            with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
734        }
735        TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
736            mspans.insert(dspan.open, metavar_span)
737                && mspans.insert(dspan.close, metavar_span)
738                && mspans.insert(dspan.entire(), metavar_span)
739        }),
740    };
741    if no_collision || psess.source_map().is_imported(metavar_span) {
742        return orig_tt.clone();
743    }
744
745    // Setting metavar spans for the heuristic spans gives better opportunities for combining them
746    // with neighboring spans even despite their different syntactic contexts.
747    match orig_tt {
748        TokenTree::Token(Token { kind, span }, spacing) => {
749            let span = metavar_span.with_ctxt(span.ctxt());
750            with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
751            TokenTree::Token(Token { kind: *kind, span }, *spacing)
752        }
753        TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
754            let open = metavar_span.with_ctxt(dspan.open.ctxt());
755            let close = metavar_span.with_ctxt(dspan.close.ctxt());
756            with_metavar_spans(|mspans| {
757                mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
758            });
759            let dspan = DelimSpan::from_pair(open, close);
760            TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
761        }
762    }
763}
764
765/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
766/// the set of matches `interpolations`.
767///
768/// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
769/// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
770/// made a mistake, and we return `None`.
771fn lookup_cur_matched<'a>(
772    ident: MacroRulesNormalizedIdent,
773    interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
774    repeats: &[(usize, usize)],
775) -> Option<&'a NamedMatch> {
776    interpolations.get(&ident).map(|mut matched| {
777        for &(idx, _) in repeats {
778            match matched {
779                MatchedSingle(_) => break,
780                MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
781            }
782        }
783
784        matched
785    })
786}
787
788/// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
789/// sure that the size of each sequence and all of its nested sequences are the same as the sizes
790/// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
791/// has made a mistake (either the macro writer or caller).
792#[derive(#[automatically_derived]
impl ::core::clone::Clone for LockstepIterSize {
    #[inline]
    fn clone(&self) -> LockstepIterSize {
        match self {
            LockstepIterSize::Unconstrained =>
                LockstepIterSize::Unconstrained,
            LockstepIterSize::Constraint(__self_0, __self_1) =>
                LockstepIterSize::Constraint(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1)),
            LockstepIterSize::Contradiction(__self_0) =>
                LockstepIterSize::Contradiction(::core::clone::Clone::clone(__self_0)),
        }
    }
}Clone)]
793enum LockstepIterSize {
794    /// No constraints on length of matcher. This is true for any TokenTree variants except a
795    /// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
796    Unconstrained,
797
798    /// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
799    /// meta-var are returned.
800    Constraint(usize, MacroRulesNormalizedIdent),
801
802    /// Two `Constraint`s on the same sequence had different lengths. This is an error.
803    Contradiction(String),
804}
805
806impl LockstepIterSize {
807    /// Find incompatibilities in matcher/invocation sizes.
808    /// - `Unconstrained` is compatible with everything.
809    /// - `Contradiction` is incompatible with everything.
810    /// - `Constraint(len)` is only compatible with other constraints of the same length.
811    fn with(self, other: LockstepIterSize) -> LockstepIterSize {
812        match self {
813            LockstepIterSize::Unconstrained => other,
814            LockstepIterSize::Contradiction(_) => self,
815            LockstepIterSize::Constraint(l_len, l_id) => match other {
816                LockstepIterSize::Unconstrained => self,
817                LockstepIterSize::Contradiction(_) => other,
818                LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
819                LockstepIterSize::Constraint(r_len, r_id) => {
820                    let msg = ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("meta-variable `{0}` repeats {1} time{2}, but `{3}` repeats {4} time{5}",
                l_id, l_len, if l_len == 1 { "" } else { "s" }, r_id, r_len,
                if r_len == 1 { "" } else { "s" }))
    })format!(
821                        "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
822                        l_id,
823                        l_len,
824                        pluralize!(l_len),
825                        r_id,
826                        r_len,
827                        pluralize!(r_len),
828                    );
829                    LockstepIterSize::Contradiction(msg)
830                }
831            },
832        }
833    }
834}
835
836/// Given a `tree`, make sure that all sequences have the same length as the matches for the
837/// appropriate meta-vars in `interpolations`.
838///
839/// Note that if `repeats` does not match the exact correct depth of a meta-var,
840/// `lookup_cur_matched` will return `None`, which is why this still works even in the presence of
841/// multiple nested matcher sequences.
842///
843/// Example: `$($($x $y)+*);+` -- we need to make sure that `x` and `y` repeat the same amount as
844/// each other at the given depth when the macro was invoked. If they don't it might mean they were
845/// declared at depths which weren't equal or there was a compiler bug. For example, if we have 3 repetitions of
846/// the outer sequence and 4 repetitions of the inner sequence for `x`, we should have the same for
847/// `y`; otherwise, we can't transcribe them both at the given depth.
848fn lockstep_iter_size(
849    tree: &mbe::TokenTree,
850    interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
851    repeats: &[(usize, usize)],
852) -> LockstepIterSize {
853    use mbe::TokenTree;
854    match tree {
855        TokenTree::Delimited(.., delimited) => {
856            delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
857                size.with(lockstep_iter_size(tt, interpolations, repeats))
858            })
859        }
860        TokenTree::Sequence(_, seq) => {
861            seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
862                size.with(lockstep_iter_size(tt, interpolations, repeats))
863            })
864        }
865        TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
866            let name = MacroRulesNormalizedIdent::new(*name);
867            match lookup_cur_matched(name, interpolations, repeats) {
868                Some(matched) => match matched {
869                    MatchedSingle(_) => LockstepIterSize::Unconstrained,
870                    MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
871                },
872                _ => LockstepIterSize::Unconstrained,
873            }
874        }
875        TokenTree::MetaVarExpr(_, expr) => {
876            expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
877                lis.with(lockstep_iter_size(
878                    &TokenTree::MetaVar(ident.span, *ident),
879                    interpolations,
880                    repeats,
881                ))
882            })
883        }
884        TokenTree::Token(..) => LockstepIterSize::Unconstrained,
885    }
886}
887
888/// Used solely by the `count` meta-variable expression, counts the outermost repetitions at a
889/// given optional nested depth.
890///
891/// For example, a macro parameter of `$( { $( $foo:ident ),* } )*` called with `{ a, b } { c }`:
892///
893/// * `[ $( ${count(foo)} ),* ]` will return [2, 1] with a, b = 2 and c = 1
894/// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]`
895/// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
896///   declared inside a single repetition and the index `1` implies two nested repetitions.
897fn count_repetitions<'dx>(
898    dcx: DiagCtxtHandle<'dx>,
899    depth_user: usize,
900    mut matched: &NamedMatch,
901    repeats: &[(usize, usize)],
902    sp: &DelimSpan,
903) -> PResult<'dx, usize> {
904    // Recursively count the number of matches in `matched` at given depth
905    // (or at the top-level of `matched` if no depth is given).
906    fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
907        match matched {
908            MatchedSingle(_) => Ok(1),
909            MatchedSeq(named_matches) => {
910                if depth_curr == depth_max {
911                    Ok(named_matches.len())
912                } else {
913                    named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
914                }
915            }
916        }
917    }
918
919    /// Maximum depth
920    fn depth(counter: usize, matched: &NamedMatch) -> usize {
921        match matched {
922            MatchedSingle(_) => counter,
923            MatchedSeq(named_matches) => {
924                let rslt = counter + 1;
925                if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
926            }
927        }
928    }
929
930    let depth_max = depth(0, matched)
931        .checked_sub(1)
932        .and_then(|el| el.checked_sub(repeats.len()))
933        .unwrap_or_default();
934    if depth_user > depth_max {
935        return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
936    }
937
938    // `repeats` records all of the nested levels at which we are currently
939    // matching meta-variables. The meta-var-expr `count($x)` only counts
940    // matches that occur in this "subtree" of the `NamedMatch` where we
941    // are currently transcribing, so we need to descend to that subtree
942    // before we start counting. `matched` contains the various levels of the
943    // tree as we descend, and its final value is the subtree we are currently at.
944    for &(idx, _) in repeats {
945        if let MatchedSeq(ads) = matched {
946            matched = &ads[idx];
947        }
948    }
949
950    if let MatchedSingle(_) = matched {
951        return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
952    }
953
954    count(depth_user, depth_max, matched)
955}
956
957/// Returns a `NamedMatch` item declared on the LHS given an arbitrary [Ident]
958fn matched_from_ident<'ctx, 'interp, 'rslt>(
959    dcx: DiagCtxtHandle<'ctx>,
960    ident: Ident,
961    interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
962) -> PResult<'ctx, &'rslt NamedMatch>
963where
964    'interp: 'rslt,
965{
966    let span = ident.span;
967    let key = MacroRulesNormalizedIdent::new(ident);
968    interp.get(&key).ok_or_else(|| dcx.create_err(MveUnrecognizedVar { span, key }))
969}
970
971/// Used by meta-variable expressions when an user input is out of the actual declared bounds. For
972/// example, index(999999) in an repetition of only three elements.
973fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
974    let msg = if max == 0 {
975        ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("meta-variable expression `{0}` with depth parameter must be called inside of a macro repetition",
                ty))
    })format!(
976            "meta-variable expression `{ty}` with depth parameter \
977             must be called inside of a macro repetition"
978        )
979    } else {
980        ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("depth parameter of meta-variable expression `{0}` must be less than {1}",
                ty, max))
    })format!(
981            "depth parameter of meta-variable expression `{ty}` \
982             must be less than {max}"
983        )
984    };
985    dcx.struct_span_err(span, msg)
986}
987
988/// Extracts an metavariable symbol that can be an identifier, a token tree or a literal.
989fn extract_symbol_from_pnr<'a>(
990    dcx: DiagCtxtHandle<'a>,
991    pnr: &ParseNtResult,
992    span_err: Span,
993) -> PResult<'a, Symbol> {
994    match pnr {
995        ParseNtResult::Ident(nt_ident, is_raw) => {
996            if let IdentIsRaw::Yes = is_raw {
997                Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
998            } else {
999                Ok(nt_ident.name)
1000            }
1001        }
1002        ParseNtResult::Tt(TokenTree::Token(
1003            Token { kind: TokenKind::Ident(symbol, is_raw), .. },
1004            _,
1005        )) => {
1006            if let IdentIsRaw::Yes = is_raw {
1007                Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
1008            } else {
1009                Ok(*symbol)
1010            }
1011        }
1012        ParseNtResult::Tt(TokenTree::Token(
1013            Token {
1014                kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
1015                ..
1016            },
1017            _,
1018        )) => Ok(*symbol),
1019        ParseNtResult::Literal(expr)
1020            if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind =>
1021        {
1022            Ok(*symbol)
1023        }
1024        ParseNtResult::Literal(expr)
1025            if let ExprKind::Lit(lit @ Lit { kind: LitKind::Integer, symbol, suffix }) =
1026                &expr.kind =>
1027        {
1028            if lit.is_semantic_float() {
1029                Err(dcx
1030                    .struct_err("floats are not supported as metavariables of `${concat(..)}`")
1031                    .with_span(span_err))
1032            } else if suffix.is_none() {
1033                Ok(*symbol)
1034            } else {
1035                Err(dcx
1036                    .struct_err("integer metavariables of `${concat(..)}` must not be suffixed")
1037                    .with_span(span_err))
1038            }
1039        }
1040        _ => Err(dcx
1041            .struct_err(
1042                "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
1043            )
1044            .with_note("currently only string and integer literals are supported")
1045            .with_span(span_err)),
1046    }
1047}