1use std::mem;
2
3use rustc_ast::token::{
4 self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind,
5};
6use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
7use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, listify, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15 BytePos, Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, kw, sym,
16 with_metavar_spans,
17};
18use smallvec::{SmallVec, smallvec};
19
20use crate::errors::{
21 ConcatInvalidIdent, CountRepetitionMisplaced, InvalidIdentReason, MacroVarStillRepeating,
22 MetaVarsDifSeqMatchers, MustRepeatOnce, MveUnrecognizedVar, NoRepeatableVar,
23 NoSyntaxVarsExprRepeat, VarNoTypo, VarTypoSuggestionRepeatable, VarTypoSuggestionUnrepeatable,
24 VarTypoSuggestionUnrepeatableLabel,
25};
26use crate::mbe::macro_parser::NamedMatch;
27use crate::mbe::macro_parser::NamedMatch::*;
28use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
29use crate::mbe::{self, KleeneOp, MetaVarExpr};
30
31struct TranscrCtx<'psess, 'itp> {
33 psess: &'psess ParseSess,
34
35 interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
37
38 marker: Marker,
40
41 stack: SmallVec<[Frame<'itp>; 1]>,
47
48 repeats: Vec<(usize, usize)>,
54
55 result: Vec<TokenTree>,
68
69 result_stack: Vec<Vec<TokenTree>>,
72}
73
74impl<'psess> TranscrCtx<'psess, '_> {
75 fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
77 let mut span = dspan.entire();
78 self.marker.mark_span(&mut span);
79 span
80 }
81}
82
83struct Marker {
85 expand_id: LocalExpnId,
86 transparency: Transparency,
87 cache: FxHashMap<SyntaxContext, SyntaxContext>,
88}
89
90impl Marker {
91 fn mark_span(&mut self, span: &mut Span) {
93 *span = span.map_ctxt(|ctxt| {
98 *self
99 .cache
100 .entry(ctxt)
101 .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
102 });
103 }
104}
105
106struct Frame<'a> {
108 tts: &'a [mbe::TokenTree],
109 idx: usize,
110 kind: FrameKind,
111}
112
113enum FrameKind {
114 Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
115 Sequence { sep: Option<Token>, kleene_op: KleeneOp },
116}
117
118impl<'a> Frame<'a> {
119 fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
120 Frame {
121 tts: &src.tts,
122 idx: 0,
123 kind: FrameKind::Delimited { delim: src.delim, span, spacing },
124 }
125 }
126
127 fn new_sequence(
128 src: &'a mbe::SequenceRepetition,
129 sep: Option<Token>,
130 kleene_op: KleeneOp,
131 ) -> Frame<'a> {
132 Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
133 }
134}
135
136impl<'a> Iterator for Frame<'a> {
137 type Item = &'a mbe::TokenTree;
138
139 fn next(&mut self) -> Option<&'a mbe::TokenTree> {
140 let res = self.tts.get(self.idx);
141 self.idx += 1;
142 res
143 }
144}
145
146pub(super) fn transcribe<'a>(
167 psess: &'a ParseSess,
168 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
169 src: &mbe::Delimited,
170 src_span: DelimSpan,
171 transparency: Transparency,
172 expand_id: LocalExpnId,
173) -> PResult<'a, TokenStream> {
174 if src.tts.is_empty() {
176 return Ok(TokenStream::default());
177 }
178
179 let mut tscx = TranscrCtx {
180 psess,
181 interp,
182 marker: Marker { expand_id, transparency, cache: Default::default() },
183 repeats: Vec::new(),
184 stack: {
let count = 0usize + 1usize;
let mut vec = ::smallvec::SmallVec::new();
if count <= vec.inline_size() {
vec.push(Frame::new_delimited(src, src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone)));
vec
} else {
::smallvec::SmallVec::from_vec(::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[Frame::new_delimited(src, src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone))])))
}
}smallvec![Frame::new_delimited(
185 src,
186 src_span,
187 DelimSpacing::new(Spacing::Alone, Spacing::Alone)
188 )],
189 result: Vec::new(),
190 result_stack: Vec::new(),
191 };
192
193 loop {
194 let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
197 let frame = tscx.stack.last_mut().unwrap();
202 if let FrameKind::Sequence { sep, .. } = &frame.kind {
203 let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
204 *repeat_idx += 1;
205 if repeat_idx < repeat_len {
206 frame.idx = 0;
207 if let Some(sep) = sep {
208 tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
209 }
210 continue;
211 }
212 }
213
214 match tscx.stack.pop().unwrap().kind {
218 FrameKind::Sequence { .. } => {
220 tscx.repeats.pop();
221 }
222
223 FrameKind::Delimited { delim, span, mut spacing, .. } => {
227 if delim == Delimiter::Bracket {
230 spacing.close = Spacing::Alone;
231 }
232 if tscx.result_stack.is_empty() {
233 return Ok(TokenStream::new(tscx.result));
235 }
236
237 let tree =
239 TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
240 tscx.result = tscx.result_stack.pop().unwrap();
241 tscx.result.push(tree);
242 }
243 }
244 continue;
245 };
246
247 match tree {
250 seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
252 transcribe_sequence(&mut tscx, seq, seq_rep, interp)?;
253 }
254
255 &mbe::TokenTree::MetaVar(sp, original_ident) => {
257 transcribe_metavar(&mut tscx, sp, original_ident)?;
258 }
259
260 mbe::TokenTree::MetaVarExpr(dspan, expr) => {
262 transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
263 }
264
265 &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
271 tscx.marker.mark_span(&mut span.open);
272 tscx.marker.mark_span(&mut span.close);
273 tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
274 tscx.result_stack.push(mem::take(&mut tscx.result));
275 }
276
277 &mbe::TokenTree::Token(mut token) => {
280 tscx.marker.mark_span(&mut token.span);
281 if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
282 tscx.marker.mark_span(&mut ident.span);
283 }
284 let tt = TokenTree::Token(token, Spacing::Alone);
285 tscx.result.push(tt);
286 }
287
288 mbe::TokenTree::MetaVarDecl { .. } => {
::core::panicking::panic_fmt(format_args!("unexpected `TokenTree::MetaVarDecl`"));
}panic!("unexpected `TokenTree::MetaVarDecl`"),
290 }
291 }
292}
293
294fn transcribe_sequence<'tx, 'itp>(
296 tscx: &mut TranscrCtx<'tx, 'itp>,
297 seq: &mbe::TokenTree,
298 seq_rep: &'itp mbe::SequenceRepetition,
299 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
301) -> PResult<'tx, ()> {
302 let dcx = tscx.psess.dcx();
303
304 match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
308 LockstepIterSize::Unconstrained => {
309 let mut repeatables = Vec::new();
310 let mut non_repeatables = Vec::new();
311
312 #[allow(rustc::potential_query_instability)]
313 for (name, matcher) in interp.iter() {
314 if matcher.is_repeatable() {
315 repeatables.push(name);
316 } else {
317 non_repeatables.push(name);
318 }
319 }
320
321 let repeatable_names: Vec<Symbol> =
322 repeatables.iter().map(|&name| name.symbol()).collect();
323 let non_repeatable_names: Vec<Symbol> =
324 non_repeatables.iter().map(|&name| name.symbol()).collect();
325 let mut meta_vars = ::alloc::vec::Vec::new()vec![];
326 seq.meta_vars(&mut meta_vars);
327 let mut typo_repeatable = None;
328 let mut typo_unrepeatable = None;
329 let mut typo_unrepeatable_label = None;
330 let mut var_no_typo = None;
331 let mut no_repeatable_var = None;
332
333 for ident in meta_vars {
334 if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
335 &repeatable_names[..],
336 ident.name,
337 None,
338 ) {
339 typo_repeatable = Some(VarTypoSuggestionRepeatable { span: ident.span, name });
340 } else if let Some(name) = rustc_span::edit_distance::find_best_match_for_name(
341 &non_repeatable_names[..],
342 ident.name,
343 None,
344 ) {
345 typo_unrepeatable = Some(VarTypoSuggestionUnrepeatable { span: ident.span });
346 if let Some(&orig_ident) = non_repeatables.iter().find(|n| n.symbol() == name) {
347 typo_unrepeatable_label = Some(VarTypoSuggestionUnrepeatableLabel {
348 span: orig_ident.ident().span,
349 });
350 }
351 } else {
352 if !repeatable_names.is_empty()
353 && let Some(msg) = listify(&repeatable_names, |s| ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`${0}`", s))
})format!("`${s}`"))
354 {
355 var_no_typo = Some(VarNoTypo { span: ident.span, msg });
356 } else {
357 no_repeatable_var = Some(NoRepeatableVar { span: ident.span });
358 }
359 }
360 }
361 return Err(dcx.create_err(NoSyntaxVarsExprRepeat {
362 span: seq.span(),
363 typo_unrepeatable,
364 typo_repeatable,
365 typo_unrepeatable_label,
366 var_no_typo,
367 no_repeatable_var,
368 }));
369 }
370
371 LockstepIterSize::Contradiction(msg) => {
372 return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
377 }
378
379 LockstepIterSize::Constraint(len, _) => {
380 let mbe::TokenTree::Sequence(sp, seq) = seq else { ::core::panicking::panic("internal error: entered unreachable code")unreachable!() };
383
384 if len == 0 {
386 if seq.kleene.op == KleeneOp::OneOrMore {
387 return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
391 }
392 } else {
393 tscx.repeats.push((0, len));
396
397 tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator, seq.kleene.op));
401 }
402 }
403 }
404
405 Ok(())
406}
407
408fn transcribe_metavar<'tx>(
425 tscx: &mut TranscrCtx<'tx, '_>,
426 mut sp: Span,
427 mut original_ident: Ident,
428) -> PResult<'tx, ()> {
429 let dcx = tscx.psess.dcx();
430
431 let ident = MacroRulesNormalizedIdent::new(original_ident);
432 let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
433 tscx.marker.mark_span(&mut sp);
436 tscx.marker.mark_span(&mut original_ident.span);
437 tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
438 tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
439 return Ok(());
440 };
441
442 let MatchedSingle(pnr) = cur_matched else {
443 return Err(dcx.create_err(MacroVarStillRepeating { span: sp, ident }));
445 };
446
447 transcribe_pnr(tscx, sp, pnr)
448}
449
450fn transcribe_pnr<'tx>(
451 tscx: &mut TranscrCtx<'tx, '_>,
452 mut sp: Span,
453 pnr: &ParseNtResult,
454) -> PResult<'tx, ()> {
455 let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
460 if stream.len() == 1 {
461 let tree = stream.iter().next().unwrap();
462 if let TokenTree::Delimited(_, _, delim, inner) = tree
463 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
464 && mv_kind == *mvk
465 {
466 stream = inner.clone();
467 }
468 }
469
470 tscx.marker.mark_span(&mut sp);
473 with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
474 TokenTree::Delimited(
477 DelimSpan::from_single(sp),
478 DelimSpacing::new(Spacing::Alone, Spacing::Alone),
479 Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
480 stream,
481 )
482 };
483
484 let tt = match pnr {
485 ParseNtResult::Tt(tt) => {
486 maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
491 }
492 ParseNtResult::Ident(ident, is_raw) => {
493 tscx.marker.mark_span(&mut sp);
494 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
495 let kind = token::NtIdent(*ident, *is_raw);
496 TokenTree::token_alone(kind, sp)
497 }
498 ParseNtResult::Lifetime(ident, is_raw) => {
499 tscx.marker.mark_span(&mut sp);
500 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
501 let kind = token::NtLifetime(*ident, *is_raw);
502 TokenTree::token_alone(kind, sp)
503 }
504 ParseNtResult::Item(item) => {
505 mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
506 }
507 ParseNtResult::Block(block) => {
508 mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
509 }
510 ParseNtResult::Stmt(stmt) => {
511 let stream = if let StmtKind::Empty = stmt.kind {
512 TokenStream::token_alone(token::Semi, stmt.span)
514 } else {
515 TokenStream::from_ast(stmt)
516 };
517 mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
518 }
519 ParseNtResult::Pat(pat, pat_kind) => {
520 mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
521 }
522 ParseNtResult::Expr(expr, kind) => {
523 let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
524 ExprKind::Lit(_) => (true, true),
525 ExprKind::Unary(UnOp::Neg, e) if #[allow(non_exhaustive_omitted_patterns)] match &e.kind {
ExprKind::Lit(_) => true,
_ => false,
}matches!(&e.kind, ExprKind::Lit(_)) => {
526 (true, false)
527 }
528 _ => (false, false),
529 };
530 mk_delimited(
531 expr.span,
532 MetaVarKind::Expr {
533 kind: *kind,
534 can_begin_literal_maybe_minus,
535 can_begin_string_literal,
536 },
537 TokenStream::from_ast(expr),
538 )
539 }
540 ParseNtResult::Literal(lit) => {
541 mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
542 }
543 ParseNtResult::Ty(ty) => {
544 let is_path = #[allow(non_exhaustive_omitted_patterns)] match &ty.kind {
TyKind::Path(None, _path) => true,
_ => false,
}matches!(&ty.kind, TyKind::Path(None, _path));
545 mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
546 }
547 ParseNtResult::Meta(attr_item) => {
548 let has_meta_form = attr_item.meta_kind().is_some();
549 mk_delimited(
550 attr_item.span(),
551 MetaVarKind::Meta { has_meta_form },
552 TokenStream::from_ast(attr_item),
553 )
554 }
555 ParseNtResult::Path(path) => {
556 mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
557 }
558 ParseNtResult::Vis(vis) => {
559 mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
560 }
561 ParseNtResult::Guard(guard) => {
562 let leading_if_span =
567 guard.span_with_leading_if.with_hi(guard.span_with_leading_if.lo() + BytePos(2));
568 let mut ts =
569 TokenStream::token_alone(token::Ident(kw::If, IdentIsRaw::No), leading_if_span);
570 ts.push_stream(TokenStream::from_ast(&guard.cond));
571
572 mk_delimited(guard.span_with_leading_if, MetaVarKind::Guard, ts)
573 }
574 };
575
576 tscx.result.push(tt);
577 Ok(())
578}
579
580fn transcribe_metavar_expr<'tx>(
582 tscx: &mut TranscrCtx<'tx, '_>,
583 dspan: DelimSpan,
584 expr: &MetaVarExpr,
585) -> PResult<'tx, ()> {
586 let dcx = tscx.psess.dcx();
587 let tt = match *expr {
588 MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
589 MetaVarExpr::Count(original_ident, depth) => {
590 let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
591 let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
592 TokenTree::token_alone(
593 TokenKind::lit(token::Integer, sym::integer(count), None),
594 tscx.visited_dspan(dspan),
595 )
596 }
597 MetaVarExpr::Ignore(original_ident) => {
598 let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
600 return Ok(());
601 }
602 MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
603 Some((index, _)) => TokenTree::token_alone(
604 TokenKind::lit(token::Integer, sym::integer(*index), None),
605 tscx.visited_dspan(dspan),
606 ),
607 None => {
608 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
609 }
610 },
611 MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
612 Some((_, length)) => TokenTree::token_alone(
613 TokenKind::lit(token::Integer, sym::integer(*length), None),
614 tscx.visited_dspan(dspan),
615 ),
616 None => {
617 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
618 }
619 },
620 };
621 tscx.result.push(tt);
622 Ok(())
623}
624
625fn metavar_expr_concat<'tx>(
627 tscx: &mut TranscrCtx<'tx, '_>,
628 dspan: DelimSpan,
629 elements: &[MetaVarExprConcatElem],
630) -> PResult<'tx, TokenTree> {
631 let dcx = tscx.psess.dcx();
632 let mut concatenated = String::new();
633 for element in elements {
634 let symbol = match element {
635 MetaVarExprConcatElem::Ident(elem) => elem.name,
636 MetaVarExprConcatElem::Literal(elem) => *elem,
637 MetaVarExprConcatElem::Var(ident) => {
638 let key = MacroRulesNormalizedIdent::new(*ident);
639 match lookup_cur_matched(key, tscx.interp, &tscx.repeats) {
640 Some(NamedMatch::MatchedSingle(pnr)) => {
641 extract_symbol_from_pnr(dcx, pnr, ident.span)?
642 }
643 Some(NamedMatch::MatchedSeq(..)) => {
644 return Err(dcx.struct_span_err(
645 ident.span,
646 "`${concat(...)}` variable is still repeating at this depth",
647 ));
648 }
649 None => {
650 return Err(dcx.create_err(MveUnrecognizedVar { span: ident.span, key }));
651 }
652 }
653 }
654 };
655 concatenated.push_str(symbol.as_str());
656 }
657 let symbol = nfc_normalize(&concatenated);
658 let concatenated_span = tscx.visited_dspan(dspan);
659 if !rustc_lexer::is_ident(symbol.as_str()) {
660 return Err(dcx.create_err(ConcatInvalidIdent {
661 span: concatenated_span,
662 reason: InvalidIdentReason::new(symbol),
663 }));
664 }
665 tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
666
667 Ok(TokenTree::Token(
671 Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
672 Spacing::Alone,
673 ))
674}
675
676fn maybe_use_metavar_location(
707 psess: &ParseSess,
708 stack: &[Frame<'_>],
709 mut metavar_span: Span,
710 orig_tt: &TokenTree,
711 marker: &mut Marker,
712) -> TokenTree {
713 let undelimited_seq = #[allow(non_exhaustive_omitted_patterns)] match stack.last() {
Some(Frame {
tts: [_],
kind: FrameKind::Sequence {
sep: None, kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
..
}, .. }) => true,
_ => false,
}matches!(
714 stack.last(),
715 Some(Frame {
716 tts: [_],
717 kind: FrameKind::Sequence {
718 sep: None,
719 kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
720 ..
721 },
722 ..
723 })
724 );
725 if undelimited_seq {
726 return orig_tt.clone();
728 }
729
730 marker.mark_span(&mut metavar_span);
731 let no_collision = match orig_tt {
732 TokenTree::Token(token, ..) => {
733 with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
734 }
735 TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
736 mspans.insert(dspan.open, metavar_span)
737 && mspans.insert(dspan.close, metavar_span)
738 && mspans.insert(dspan.entire(), metavar_span)
739 }),
740 };
741 if no_collision || psess.source_map().is_imported(metavar_span) {
742 return orig_tt.clone();
743 }
744
745 match orig_tt {
748 TokenTree::Token(Token { kind, span }, spacing) => {
749 let span = metavar_span.with_ctxt(span.ctxt());
750 with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
751 TokenTree::Token(Token { kind: *kind, span }, *spacing)
752 }
753 TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
754 let open = metavar_span.with_ctxt(dspan.open.ctxt());
755 let close = metavar_span.with_ctxt(dspan.close.ctxt());
756 with_metavar_spans(|mspans| {
757 mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
758 });
759 let dspan = DelimSpan::from_pair(open, close);
760 TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
761 }
762 }
763}
764
765fn lookup_cur_matched<'a>(
772 ident: MacroRulesNormalizedIdent,
773 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
774 repeats: &[(usize, usize)],
775) -> Option<&'a NamedMatch> {
776 interpolations.get(&ident).map(|mut matched| {
777 for &(idx, _) in repeats {
778 match matched {
779 MatchedSingle(_) => break,
780 MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
781 }
782 }
783
784 matched
785 })
786}
787
788#[derive(#[automatically_derived]
impl ::core::clone::Clone for LockstepIterSize {
#[inline]
fn clone(&self) -> LockstepIterSize {
match self {
LockstepIterSize::Unconstrained =>
LockstepIterSize::Unconstrained,
LockstepIterSize::Constraint(__self_0, __self_1) =>
LockstepIterSize::Constraint(::core::clone::Clone::clone(__self_0),
::core::clone::Clone::clone(__self_1)),
LockstepIterSize::Contradiction(__self_0) =>
LockstepIterSize::Contradiction(::core::clone::Clone::clone(__self_0)),
}
}
}Clone)]
793enum LockstepIterSize {
794 Unconstrained,
797
798 Constraint(usize, MacroRulesNormalizedIdent),
801
802 Contradiction(String),
804}
805
806impl LockstepIterSize {
807 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
812 match self {
813 LockstepIterSize::Unconstrained => other,
814 LockstepIterSize::Contradiction(_) => self,
815 LockstepIterSize::Constraint(l_len, l_id) => match other {
816 LockstepIterSize::Unconstrained => self,
817 LockstepIterSize::Contradiction(_) => other,
818 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
819 LockstepIterSize::Constraint(r_len, r_id) => {
820 let msg = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("meta-variable `{0}` repeats {1} time{2}, but `{3}` repeats {4} time{5}",
l_id, l_len, if l_len == 1 { "" } else { "s" }, r_id, r_len,
if r_len == 1 { "" } else { "s" }))
})format!(
821 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
822 l_id,
823 l_len,
824 pluralize!(l_len),
825 r_id,
826 r_len,
827 pluralize!(r_len),
828 );
829 LockstepIterSize::Contradiction(msg)
830 }
831 },
832 }
833 }
834}
835
836fn lockstep_iter_size(
849 tree: &mbe::TokenTree,
850 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
851 repeats: &[(usize, usize)],
852) -> LockstepIterSize {
853 use mbe::TokenTree;
854 match tree {
855 TokenTree::Delimited(.., delimited) => {
856 delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
857 size.with(lockstep_iter_size(tt, interpolations, repeats))
858 })
859 }
860 TokenTree::Sequence(_, seq) => {
861 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
862 size.with(lockstep_iter_size(tt, interpolations, repeats))
863 })
864 }
865 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
866 let name = MacroRulesNormalizedIdent::new(*name);
867 match lookup_cur_matched(name, interpolations, repeats) {
868 Some(matched) => match matched {
869 MatchedSingle(_) => LockstepIterSize::Unconstrained,
870 MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
871 },
872 _ => LockstepIterSize::Unconstrained,
873 }
874 }
875 TokenTree::MetaVarExpr(_, expr) => {
876 expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
877 lis.with(lockstep_iter_size(
878 &TokenTree::MetaVar(ident.span, *ident),
879 interpolations,
880 repeats,
881 ))
882 })
883 }
884 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
885 }
886}
887
888fn count_repetitions<'dx>(
898 dcx: DiagCtxtHandle<'dx>,
899 depth_user: usize,
900 mut matched: &NamedMatch,
901 repeats: &[(usize, usize)],
902 sp: &DelimSpan,
903) -> PResult<'dx, usize> {
904 fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
907 match matched {
908 MatchedSingle(_) => Ok(1),
909 MatchedSeq(named_matches) => {
910 if depth_curr == depth_max {
911 Ok(named_matches.len())
912 } else {
913 named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
914 }
915 }
916 }
917 }
918
919 fn depth(counter: usize, matched: &NamedMatch) -> usize {
921 match matched {
922 MatchedSingle(_) => counter,
923 MatchedSeq(named_matches) => {
924 let rslt = counter + 1;
925 if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
926 }
927 }
928 }
929
930 let depth_max = depth(0, matched)
931 .checked_sub(1)
932 .and_then(|el| el.checked_sub(repeats.len()))
933 .unwrap_or_default();
934 if depth_user > depth_max {
935 return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
936 }
937
938 for &(idx, _) in repeats {
945 if let MatchedSeq(ads) = matched {
946 matched = &ads[idx];
947 }
948 }
949
950 if let MatchedSingle(_) = matched {
951 return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
952 }
953
954 count(depth_user, depth_max, matched)
955}
956
957fn matched_from_ident<'ctx, 'interp, 'rslt>(
959 dcx: DiagCtxtHandle<'ctx>,
960 ident: Ident,
961 interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
962) -> PResult<'ctx, &'rslt NamedMatch>
963where
964 'interp: 'rslt,
965{
966 let span = ident.span;
967 let key = MacroRulesNormalizedIdent::new(ident);
968 interp.get(&key).ok_or_else(|| dcx.create_err(MveUnrecognizedVar { span, key }))
969}
970
971fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
974 let msg = if max == 0 {
975 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("meta-variable expression `{0}` with depth parameter must be called inside of a macro repetition",
ty))
})format!(
976 "meta-variable expression `{ty}` with depth parameter \
977 must be called inside of a macro repetition"
978 )
979 } else {
980 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("depth parameter of meta-variable expression `{0}` must be less than {1}",
ty, max))
})format!(
981 "depth parameter of meta-variable expression `{ty}` \
982 must be less than {max}"
983 )
984 };
985 dcx.struct_span_err(span, msg)
986}
987
988fn extract_symbol_from_pnr<'a>(
990 dcx: DiagCtxtHandle<'a>,
991 pnr: &ParseNtResult,
992 span_err: Span,
993) -> PResult<'a, Symbol> {
994 match pnr {
995 ParseNtResult::Ident(nt_ident, is_raw) => {
996 if let IdentIsRaw::Yes = is_raw {
997 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
998 } else {
999 Ok(nt_ident.name)
1000 }
1001 }
1002 ParseNtResult::Tt(TokenTree::Token(
1003 Token { kind: TokenKind::Ident(symbol, is_raw), .. },
1004 _,
1005 )) => {
1006 if let IdentIsRaw::Yes = is_raw {
1007 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
1008 } else {
1009 Ok(*symbol)
1010 }
1011 }
1012 ParseNtResult::Tt(TokenTree::Token(
1013 Token {
1014 kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
1015 ..
1016 },
1017 _,
1018 )) => Ok(*symbol),
1019 ParseNtResult::Literal(expr)
1020 if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind =>
1021 {
1022 Ok(*symbol)
1023 }
1024 ParseNtResult::Literal(expr)
1025 if let ExprKind::Lit(lit @ Lit { kind: LitKind::Integer, symbol, suffix }) =
1026 &expr.kind =>
1027 {
1028 if lit.is_semantic_float() {
1029 Err(dcx
1030 .struct_err("floats are not supported as metavariables of `${concat(..)}`")
1031 .with_span(span_err))
1032 } else if suffix.is_none() {
1033 Ok(*symbol)
1034 } else {
1035 Err(dcx
1036 .struct_err("integer metavariables of `${concat(..)}` must not be suffixed")
1037 .with_span(span_err))
1038 }
1039 }
1040 _ => Err(dcx
1041 .struct_err(
1042 "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
1043 )
1044 .with_note("currently only string and integer literals are supported")
1045 .with_span(span_err)),
1046 }
1047}