1 //! This is an NFA-based parser, which calls out to the main Rust parser for named non-terminals
2 //! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
3 //! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
4 //! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
5 //! fit for Macro-by-Example-style rules.
6 //!
7 //! (In order to prevent the pathological case, we'd need to lazily construct the resulting
8 //! `NamedMatch`es at the very end. It'd be a pain, and require more memory to keep around old
9 //! matcher positions, but it would also save overhead)
10 //!
11 //! We don't say this parser uses the Earley algorithm, because it's unnecessarily inaccurate.
12 //! The macro parser restricts itself to the features of finite state automata. Earley parsers
13 //! can be described as an extension of NFAs with completion rules, prediction rules, and recursion.
14 //!
15 //! Quick intro to how the parser works:
16 //!
17 //! A "matcher position" (a.k.a. "position" or "mp") is a dot in the middle of a matcher, usually
18 //! written as a `·`. For example `· a $( a )* a b` is one, as is `a $( · a )* a b`.
19 //!
20 //! The parser walks through the input a token at a time, maintaining a list
21 //! of threads consistent with the current position in the input string: `cur_mps`.
22 //!
23 //! As it processes them, it fills up `eof_mps` with threads that would be valid if
24 //! the macro invocation is now over, `bb_mps` with threads that are waiting on
25 //! a Rust non-terminal like `$e:expr`, and `next_mps` with threads that are waiting
26 //! on a particular token. Most of the logic concerns moving the · through the
27 //! repetitions indicated by Kleene stars. The rules for moving the · without
28 //! consuming any input are called epsilon transitions. It only advances or calls
29 //! out to the real Rust parser when no `cur_mps` threads remain.
30 //!
31 //! Example:
32 //!
33 //! ```text, ignore
34 //! Start parsing a a a a b against [· a $( a )* a b].
35 //!
36 //! Remaining input: a a a a b
37 //! next: [· a $( a )* a b]
38 //!
39 //! - - - Advance over an a. - - -
40 //!
41 //! Remaining input: a a a b
42 //! cur: [a · $( a )* a b]
43 //! Descend/Skip (first position).
44 //! next: [a $( · a )* a b] [a $( a )* · a b].
45 //!
46 //! - - - Advance over an a. - - -
47 //!
48 //! Remaining input: a a b
49 //! cur: [a $( a · )* a b] [a $( a )* a · b]
50 //! Follow epsilon transition: Finish/Repeat (first position)
51 //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
52 //!
53 //! - - - Advance over an a. - - - (this looks exactly like the last step)
54 //!
55 //! Remaining input: a b
56 //! cur: [a $( a · )* a b] [a $( a )* a · b]
57 //! Follow epsilon transition: Finish/Repeat (first position)
58 //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
59 //!
60 //! - - - Advance over an a. - - - (this looks exactly like the last step)
61 //!
62 //! Remaining input: b
63 //! cur: [a $( a · )* a b] [a $( a )* a · b]
64 //! Follow epsilon transition: Finish/Repeat (first position)
65 //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
66 //!
67 //! - - - Advance over a b. - - -
68 //!
69 //! Remaining input: ''
70 //! eof: [a $( a )* a b ·]
71 //! ```
72
73 pub(crate) use NamedMatch::*;
74 pub(crate) use ParseResult::*;
75
76 use crate::mbe::{macro_rules::Tracker, KleeneOp, TokenTree};
77
78 use rustc_ast::token::{self, DocComment, Nonterminal, NonterminalKind, Token};
79 use rustc_ast_pretty::pprust;
80 use rustc_data_structures::fx::FxHashMap;
81 use rustc_data_structures::sync::Lrc;
82 use rustc_errors::ErrorGuaranteed;
83 use rustc_lint_defs::pluralize;
84 use rustc_parse::parser::{NtOrTt, Parser};
85 use rustc_span::symbol::Ident;
86 use rustc_span::symbol::MacroRulesNormalizedIdent;
87 use rustc_span::Span;
88 use std::borrow::Cow;
89 use std::collections::hash_map::Entry::{Occupied, Vacant};
90 use std::fmt::Display;
91 use std::rc::Rc;
92
93 /// A unit within a matcher that a `MatcherPos` can refer to. Similar to (and derived from)
94 /// `mbe::TokenTree`, but designed specifically for fast and easy traversal during matching.
95 /// Notable differences to `mbe::TokenTree`:
96 /// - It is non-recursive, i.e. there is no nesting.
97 /// - The end pieces of each sequence (the separator, if present, and the Kleene op) are
98 /// represented explicitly, as is the very end of the matcher.
99 ///
100 /// This means a matcher can be represented by `&[MatcherLoc]`, and traversal mostly involves
101 /// simply incrementing the current matcher position index by one.
102 #[derive(Debug, PartialEq, Clone)]
103 pub(crate) enum MatcherLoc {
104 Token {
105 token: Token,
106 },
107 Delimited,
108 Sequence {
109 op: KleeneOp,
110 num_metavar_decls: usize,
111 idx_first_after: usize,
112 next_metavar: usize,
113 seq_depth: usize,
114 },
115 SequenceKleeneOpNoSep {
116 op: KleeneOp,
117 idx_first: usize,
118 },
119 SequenceSep {
120 separator: Token,
121 },
122 SequenceKleeneOpAfterSep {
123 idx_first: usize,
124 },
125 MetaVarDecl {
126 span: Span,
127 bind: Ident,
128 kind: Option<NonterminalKind>,
129 next_metavar: usize,
130 seq_depth: usize,
131 },
132 Eof,
133 }
134
135 impl MatcherLoc {
span(&self) -> Option<Span>136 pub(super) fn span(&self) -> Option<Span> {
137 match self {
138 MatcherLoc::Token { token } => Some(token.span),
139 MatcherLoc::Delimited => None,
140 MatcherLoc::Sequence { .. } => None,
141 MatcherLoc::SequenceKleeneOpNoSep { .. } => None,
142 MatcherLoc::SequenceSep { .. } => None,
143 MatcherLoc::SequenceKleeneOpAfterSep { .. } => None,
144 MatcherLoc::MetaVarDecl { span, .. } => Some(*span),
145 MatcherLoc::Eof => None,
146 }
147 }
148 }
149
150 impl Display for MatcherLoc {
fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result151 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
152 match self {
153 MatcherLoc::Token { token } | MatcherLoc::SequenceSep { separator: token } => {
154 write!(f, "`{}`", pprust::token_to_string(token))
155 }
156 MatcherLoc::MetaVarDecl { bind, kind, .. } => {
157 write!(f, "meta-variable `${bind}")?;
158 if let Some(kind) = kind {
159 write!(f, ":{}", kind)?;
160 }
161 write!(f, "`")?;
162 Ok(())
163 }
164 MatcherLoc::Eof => f.write_str("end of macro"),
165
166 // These are not printed in the diagnostic
167 MatcherLoc::Delimited => f.write_str("delimiter"),
168 MatcherLoc::Sequence { .. } => f.write_str("sequence start"),
169 MatcherLoc::SequenceKleeneOpNoSep { .. } => f.write_str("sequence end"),
170 MatcherLoc::SequenceKleeneOpAfterSep { .. } => f.write_str("sequence end"),
171 }
172 }
173 }
174
compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc>175 pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
176 fn inner(
177 tts: &[TokenTree],
178 locs: &mut Vec<MatcherLoc>,
179 next_metavar: &mut usize,
180 seq_depth: usize,
181 ) {
182 for tt in tts {
183 match tt {
184 TokenTree::Token(token) => {
185 locs.push(MatcherLoc::Token { token: token.clone() });
186 }
187 TokenTree::Delimited(span, delimited) => {
188 let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
189 let close_token = Token::new(token::CloseDelim(delimited.delim), span.close);
190
191 locs.push(MatcherLoc::Delimited);
192 locs.push(MatcherLoc::Token { token: open_token });
193 inner(&delimited.tts, locs, next_metavar, seq_depth);
194 locs.push(MatcherLoc::Token { token: close_token });
195 }
196 TokenTree::Sequence(_, seq) => {
197 // We can't determine `idx_first_after` and construct the final
198 // `MatcherLoc::Sequence` until after `inner()` is called and the sequence end
199 // pieces are processed. So we push a dummy value (`Eof` is cheapest to
200 // construct) now, and overwrite it with the proper value below.
201 let dummy = MatcherLoc::Eof;
202 locs.push(dummy);
203
204 let next_metavar_orig = *next_metavar;
205 let op = seq.kleene.op;
206 let idx_first = locs.len();
207 let idx_seq = idx_first - 1;
208 inner(&seq.tts, locs, next_metavar, seq_depth + 1);
209
210 if let Some(separator) = &seq.separator {
211 locs.push(MatcherLoc::SequenceSep { separator: separator.clone() });
212 locs.push(MatcherLoc::SequenceKleeneOpAfterSep { idx_first });
213 } else {
214 locs.push(MatcherLoc::SequenceKleeneOpNoSep { op, idx_first });
215 }
216
217 // Overwrite the dummy value pushed above with the proper value.
218 locs[idx_seq] = MatcherLoc::Sequence {
219 op,
220 num_metavar_decls: seq.num_captures,
221 idx_first_after: locs.len(),
222 next_metavar: next_metavar_orig,
223 seq_depth,
224 };
225 }
226 &TokenTree::MetaVarDecl(span, bind, kind) => {
227 locs.push(MatcherLoc::MetaVarDecl {
228 span,
229 bind,
230 kind,
231 next_metavar: *next_metavar,
232 seq_depth,
233 });
234 *next_metavar += 1;
235 }
236 TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
237 }
238 }
239 }
240
241 let mut locs = vec![];
242 let mut next_metavar = 0;
243 inner(matcher, &mut locs, &mut next_metavar, /* seq_depth */ 0);
244
245 // A final entry is needed for eof.
246 locs.push(MatcherLoc::Eof);
247
248 locs
249 }
250
251 /// A single matcher position, representing the state of matching.
252 #[derive(Debug)]
253 struct MatcherPos {
254 /// The index into `TtParser::locs`, which represents the "dot".
255 idx: usize,
256
257 /// The matches made against metavar decls so far. On a successful match, this vector ends up
258 /// with one element per metavar decl in the matcher. Each element records token trees matched
259 /// against the relevant metavar by the black box parser. An element will be a `MatchedSeq` if
260 /// the corresponding metavar decl is within a sequence.
261 ///
262 /// It is critical to performance that this is an `Rc`, because it gets cloned frequently when
263 /// processing sequences. Mostly for sequence-ending possibilities that must be tried but end
264 /// up failing.
265 matches: Rc<Vec<NamedMatch>>,
266 }
267
268 // This type is used a lot. Make sure it doesn't unintentionally get bigger.
269 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
270 rustc_data_structures::static_assert_size!(MatcherPos, 16);
271
272 impl MatcherPos {
273 /// Adds `m` as a named match for the `metavar_idx`-th metavar. There are only two call sites,
274 /// and both are hot enough to be always worth inlining.
275 #[inline(always)]
push_match(&mut self, metavar_idx: usize, seq_depth: usize, m: NamedMatch)276 fn push_match(&mut self, metavar_idx: usize, seq_depth: usize, m: NamedMatch) {
277 let matches = Rc::make_mut(&mut self.matches);
278 match seq_depth {
279 0 => {
280 // We are not within a sequence. Just append `m`.
281 assert_eq!(metavar_idx, matches.len());
282 matches.push(m);
283 }
284 _ => {
285 // We are within a sequence. Find the final `MatchedSeq` at the appropriate depth
286 // and append `m` to its vector.
287 let mut curr = &mut matches[metavar_idx];
288 for _ in 0..seq_depth - 1 {
289 match curr {
290 MatchedSeq(seq) => curr = seq.last_mut().unwrap(),
291 _ => unreachable!(),
292 }
293 }
294 match curr {
295 MatchedSeq(seq) => seq.push(m),
296 _ => unreachable!(),
297 }
298 }
299 }
300 }
301 }
302
303 enum EofMatcherPositions {
304 None,
305 One(MatcherPos),
306 Multiple,
307 }
308
309 /// Represents the possible results of an attempted parse.
310 pub(crate) enum ParseResult<T, F> {
311 /// Parsed successfully.
312 Success(T),
313 /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
314 /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
315 /// The usize is the approximate position of the token in the input token stream.
316 Failure(F),
317 /// Fatal error (malformed macro?). Abort compilation.
318 Error(rustc_span::Span, String),
319 ErrorReported(ErrorGuaranteed),
320 }
321
322 /// A `ParseResult` where the `Success` variant contains a mapping of
323 /// `MacroRulesNormalizedIdent`s to `NamedMatch`es. This represents the mapping
324 /// of metavars to the token trees they bind to.
325 pub(crate) type NamedParseResult<F> = ParseResult<NamedMatches, F>;
326
327 /// Contains a mapping of `MacroRulesNormalizedIdent`s to `NamedMatch`es.
328 /// This represents the mapping of metavars to the token trees they bind to.
329 pub(crate) type NamedMatches = FxHashMap<MacroRulesNormalizedIdent, NamedMatch>;
330
331 /// Count how many metavars declarations are in `matcher`.
count_metavar_decls(matcher: &[TokenTree]) -> usize332 pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
333 matcher
334 .iter()
335 .map(|tt| match tt {
336 TokenTree::MetaVarDecl(..) => 1,
337 TokenTree::Sequence(_, seq) => seq.num_captures,
338 TokenTree::Delimited(_, delim) => count_metavar_decls(&delim.tts),
339 TokenTree::Token(..) => 0,
340 TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
341 })
342 .sum()
343 }
344
345 /// `NamedMatch` is a pattern-match result for a single metavar. All
346 /// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type
347 /// (expr, item, etc).
348 ///
349 /// The in-memory structure of a particular `NamedMatch` represents the match
350 /// that occurred when a particular subset of a matcher was applied to a
351 /// particular token tree.
352 ///
353 /// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
354 /// the `MatchedNtNonTts`s, will depend on the token tree it was applied
355 /// to: each `MatchedSeq` corresponds to a single repetition in the originating
356 /// token tree. The depth of the `NamedMatch` structure will therefore depend
357 /// only on the nesting depth of repetitions in the originating token tree it
358 /// was derived from.
359 ///
360 /// In layperson's terms: `NamedMatch` will form a tree representing nested matches of a particular
361 /// meta variable. For example, if we are matching the following macro against the following
362 /// invocation...
363 ///
364 /// ```rust
365 /// macro_rules! foo {
366 /// ($($($x:ident),+);+) => {}
367 /// }
368 ///
369 /// foo!(a, b, c, d; a, b, c, d, e);
370 /// ```
371 ///
372 /// Then, the tree will have the following shape:
373 ///
374 /// ```ignore (private-internal)
375 /// # use NamedMatch::*;
376 /// MatchedSeq([
377 /// MatchedSeq([
378 /// MatchedNonterminal(a),
379 /// MatchedNonterminal(b),
380 /// MatchedNonterminal(c),
381 /// MatchedNonterminal(d),
382 /// ]),
383 /// MatchedSeq([
384 /// MatchedNonterminal(a),
385 /// MatchedNonterminal(b),
386 /// MatchedNonterminal(c),
387 /// MatchedNonterminal(d),
388 /// MatchedNonterminal(e),
389 /// ])
390 /// ])
391 /// ```
392 #[derive(Debug, Clone)]
393 pub(crate) enum NamedMatch {
394 MatchedSeq(Vec<NamedMatch>),
395
396 // A metavar match of type `tt`.
397 MatchedTokenTree(rustc_ast::tokenstream::TokenTree),
398
399 // A metavar match of any type other than `tt`.
400 MatchedNonterminal(Lrc<Nonterminal>),
401 }
402
403 /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
token_name_eq(t1: &Token, t2: &Token) -> bool404 fn token_name_eq(t1: &Token, t2: &Token) -> bool {
405 if let (Some((ident1, is_raw1)), Some((ident2, is_raw2))) = (t1.ident(), t2.ident()) {
406 ident1.name == ident2.name && is_raw1 == is_raw2
407 } else if let (Some(ident1), Some(ident2)) = (t1.lifetime(), t2.lifetime()) {
408 ident1.name == ident2.name
409 } else {
410 t1.kind == t2.kind
411 }
412 }
413
414 // Note: the vectors could be created and dropped within `parse_tt`, but to avoid excess
415 // allocations we have a single vector for each kind that is cleared and reused repeatedly.
416 pub struct TtParser {
417 macro_name: Ident,
418
419 /// The set of current mps to be processed. This should be empty by the end of a successful
420 /// execution of `parse_tt_inner`.
421 cur_mps: Vec<MatcherPos>,
422
423 /// The set of newly generated mps. These are used to replenish `cur_mps` in the function
424 /// `parse_tt`.
425 next_mps: Vec<MatcherPos>,
426
427 /// The set of mps that are waiting for the black-box parser.
428 bb_mps: Vec<MatcherPos>,
429
430 /// Pre-allocate an empty match array, so it can be cloned cheaply for macros with many rules
431 /// that have no metavars.
432 empty_matches: Rc<Vec<NamedMatch>>,
433 }
434
435 impl TtParser {
new(macro_name: Ident) -> TtParser436 pub(super) fn new(macro_name: Ident) -> TtParser {
437 TtParser {
438 macro_name,
439 cur_mps: vec![],
440 next_mps: vec![],
441 bb_mps: vec![],
442 empty_matches: Rc::new(vec![]),
443 }
444 }
445
has_no_remaining_items_for_step(&self) -> bool446 pub(super) fn has_no_remaining_items_for_step(&self) -> bool {
447 self.cur_mps.is_empty()
448 }
449
450 /// Process the matcher positions of `cur_mps` until it is empty. In the process, this will
451 /// produce more mps in `next_mps` and `bb_mps`.
452 ///
453 /// # Returns
454 ///
455 /// `Some(result)` if everything is finished, `None` otherwise. Note that matches are kept
456 /// track of through the mps generated.
parse_tt_inner<'matcher, T: Tracker<'matcher>>( &mut self, matcher: &'matcher [MatcherLoc], token: &Token, approx_position: usize, track: &mut T, ) -> Option<NamedParseResult<T::Failure>>457 fn parse_tt_inner<'matcher, T: Tracker<'matcher>>(
458 &mut self,
459 matcher: &'matcher [MatcherLoc],
460 token: &Token,
461 approx_position: usize,
462 track: &mut T,
463 ) -> Option<NamedParseResult<T::Failure>> {
464 // Matcher positions that would be valid if the macro invocation was over now. Only
465 // modified if `token == Eof`.
466 let mut eof_mps = EofMatcherPositions::None;
467
468 while let Some(mut mp) = self.cur_mps.pop() {
469 let matcher_loc = &matcher[mp.idx];
470 track.before_match_loc(self, matcher_loc);
471
472 match matcher_loc {
473 MatcherLoc::Token { token: t } => {
474 // If it's a doc comment, we just ignore it and move on to the next tt in the
475 // matcher. This is a bug, but #95267 showed that existing programs rely on
476 // this behaviour, and changing it would require some care and a transition
477 // period.
478 //
479 // If the token matches, we can just advance the parser.
480 //
481 // Otherwise, this match has failed, there is nothing to do, and hopefully
482 // another mp in `cur_mps` will match.
483 if matches!(t, Token { kind: DocComment(..), .. }) {
484 mp.idx += 1;
485 self.cur_mps.push(mp);
486 } else if token_name_eq(&t, token) {
487 mp.idx += 1;
488 self.next_mps.push(mp);
489 }
490 }
491 MatcherLoc::Delimited => {
492 // Entering the delimiter is trivial.
493 mp.idx += 1;
494 self.cur_mps.push(mp);
495 }
496 &MatcherLoc::Sequence {
497 op,
498 num_metavar_decls,
499 idx_first_after,
500 next_metavar,
501 seq_depth,
502 } => {
503 // Install an empty vec for each metavar within the sequence.
504 for metavar_idx in next_metavar..next_metavar + num_metavar_decls {
505 mp.push_match(metavar_idx, seq_depth, MatchedSeq(vec![]));
506 }
507
508 if matches!(op, KleeneOp::ZeroOrMore | KleeneOp::ZeroOrOne) {
509 // Try zero matches of this sequence, by skipping over it.
510 self.cur_mps.push(MatcherPos {
511 idx: idx_first_after,
512 matches: Rc::clone(&mp.matches),
513 });
514 }
515
516 // Try one or more matches of this sequence, by entering it.
517 mp.idx += 1;
518 self.cur_mps.push(mp);
519 }
520 &MatcherLoc::SequenceKleeneOpNoSep { op, idx_first } => {
521 // We are past the end of a sequence with no separator. Try ending the
522 // sequence. If that's not possible, `ending_mp` will fail quietly when it is
523 // processed next time around the loop.
524 let ending_mp = MatcherPos {
525 idx: mp.idx + 1, // +1 skips the Kleene op
526 matches: Rc::clone(&mp.matches),
527 };
528 self.cur_mps.push(ending_mp);
529
530 if op != KleeneOp::ZeroOrOne {
531 // Try another repetition.
532 mp.idx = idx_first;
533 self.cur_mps.push(mp);
534 }
535 }
536 MatcherLoc::SequenceSep { separator } => {
537 // We are past the end of a sequence with a separator but we haven't seen the
538 // separator yet. Try ending the sequence. If that's not possible, `ending_mp`
539 // will fail quietly when it is processed next time around the loop.
540 let ending_mp = MatcherPos {
541 idx: mp.idx + 2, // +2 skips the separator and the Kleene op
542 matches: Rc::clone(&mp.matches),
543 };
544 self.cur_mps.push(ending_mp);
545
546 if token_name_eq(token, separator) {
547 // The separator matches the current token. Advance past it.
548 mp.idx += 1;
549 self.next_mps.push(mp);
550 }
551 }
552 &MatcherLoc::SequenceKleeneOpAfterSep { idx_first } => {
553 // We are past the sequence separator. This can't be a `?` Kleene op, because
554 // they don't permit separators. Try another repetition.
555 mp.idx = idx_first;
556 self.cur_mps.push(mp);
557 }
558 &MatcherLoc::MetaVarDecl { span, kind, .. } => {
559 // Built-in nonterminals never start with these tokens, so we can eliminate
560 // them from consideration. We use the span of the metavariable declaration
561 // to determine any edition-specific matching behavior for non-terminals.
562 if let Some(kind) = kind {
563 if Parser::nonterminal_may_begin_with(kind, token) {
564 self.bb_mps.push(mp);
565 }
566 } else {
567 // E.g. `$e` instead of `$e:expr`, reported as a hard error if actually used.
568 // Both this check and the one in `nameize` are necessary, surprisingly.
569 return Some(Error(span, "missing fragment specifier".to_string()));
570 }
571 }
572 MatcherLoc::Eof => {
573 // We are past the matcher's end, and not in a sequence. Try to end things.
574 debug_assert_eq!(mp.idx, matcher.len() - 1);
575 if *token == token::Eof {
576 eof_mps = match eof_mps {
577 EofMatcherPositions::None => EofMatcherPositions::One(mp),
578 EofMatcherPositions::One(_) | EofMatcherPositions::Multiple => {
579 EofMatcherPositions::Multiple
580 }
581 }
582 }
583 }
584 }
585 }
586
587 // If we reached the end of input, check that there is EXACTLY ONE possible matcher.
588 // Otherwise, either the parse is ambiguous (which is an error) or there is a syntax error.
589 if *token == token::Eof {
590 Some(match eof_mps {
591 EofMatcherPositions::One(mut eof_mp) => {
592 // Need to take ownership of the matches from within the `Rc`.
593 Rc::make_mut(&mut eof_mp.matches);
594 let matches = Rc::try_unwrap(eof_mp.matches).unwrap().into_iter();
595 self.nameize(matcher, matches)
596 }
597 EofMatcherPositions::Multiple => {
598 Error(token.span, "ambiguity: multiple successful parses".to_string())
599 }
600 EofMatcherPositions::None => Failure(T::build_failure(
601 Token::new(
602 token::Eof,
603 if token.span.is_dummy() { token.span } else { token.span.shrink_to_hi() },
604 ),
605 approx_position,
606 "missing tokens in macro arguments",
607 )),
608 })
609 } else {
610 None
611 }
612 }
613
614 /// Match the token stream from `parser` against `matcher`.
parse_tt<'matcher, T: Tracker<'matcher>>( &mut self, parser: &mut Cow<'_, Parser<'_>>, matcher: &'matcher [MatcherLoc], track: &mut T, ) -> NamedParseResult<T::Failure>615 pub(super) fn parse_tt<'matcher, T: Tracker<'matcher>>(
616 &mut self,
617 parser: &mut Cow<'_, Parser<'_>>,
618 matcher: &'matcher [MatcherLoc],
619 track: &mut T,
620 ) -> NamedParseResult<T::Failure> {
621 // A queue of possible matcher positions. We initialize it with the matcher position in
622 // which the "dot" is before the first token of the first token tree in `matcher`.
623 // `parse_tt_inner` then processes all of these possible matcher positions and produces
624 // possible next positions into `next_mps`. After some post-processing, the contents of
625 // `next_mps` replenish `cur_mps` and we start over again.
626 self.cur_mps.clear();
627 self.cur_mps.push(MatcherPos { idx: 0, matches: self.empty_matches.clone() });
628
629 loop {
630 self.next_mps.clear();
631 self.bb_mps.clear();
632
633 // Process `cur_mps` until either we have finished the input or we need to get some
634 // parsing from the black-box parser done.
635 let res = self.parse_tt_inner(
636 matcher,
637 &parser.token,
638 parser.approx_token_stream_pos(),
639 track,
640 );
641 if let Some(res) = res {
642 return res;
643 }
644
645 // `parse_tt_inner` handled all of `cur_mps`, so it's empty.
646 assert!(self.cur_mps.is_empty());
647
648 // Error messages here could be improved with links to original rules.
649 match (self.next_mps.len(), self.bb_mps.len()) {
650 (0, 0) => {
651 // There are no possible next positions AND we aren't waiting for the black-box
652 // parser: syntax error.
653 return Failure(T::build_failure(
654 parser.token.clone(),
655 parser.approx_token_stream_pos(),
656 "no rules expected this token in macro call",
657 ));
658 }
659
660 (_, 0) => {
661 // Dump all possible `next_mps` into `cur_mps` for the next iteration. Then
662 // process the next token.
663 self.cur_mps.append(&mut self.next_mps);
664 parser.to_mut().bump();
665 }
666
667 (0, 1) => {
668 // We need to call the black-box parser to get some nonterminal.
669 let mut mp = self.bb_mps.pop().unwrap();
670 let loc = &matcher[mp.idx];
671 if let &MatcherLoc::MetaVarDecl {
672 span,
673 kind: Some(kind),
674 next_metavar,
675 seq_depth,
676 ..
677 } = loc
678 {
679 // We use the span of the metavariable declaration to determine any
680 // edition-specific matching behavior for non-terminals.
681 let nt = match parser.to_mut().parse_nonterminal(kind) {
682 Err(mut err) => {
683 let guarantee = err.span_label(
684 span,
685 format!(
686 "while parsing argument for this `{kind}` macro fragment"
687 ),
688 )
689 .emit();
690 return ErrorReported(guarantee);
691 }
692 Ok(nt) => nt,
693 };
694 let m = match nt {
695 NtOrTt::Nt(nt) => MatchedNonterminal(Lrc::new(nt)),
696 NtOrTt::Tt(tt) => MatchedTokenTree(tt),
697 };
698 mp.push_match(next_metavar, seq_depth, m);
699 mp.idx += 1;
700 } else {
701 unreachable!()
702 }
703 self.cur_mps.push(mp);
704 }
705
706 (_, _) => {
707 // Too many possibilities!
708 return self.ambiguity_error(matcher, parser.token.span);
709 }
710 }
711
712 assert!(!self.cur_mps.is_empty());
713 }
714 }
715
ambiguity_error<F>( &self, matcher: &[MatcherLoc], token_span: rustc_span::Span, ) -> NamedParseResult<F>716 fn ambiguity_error<F>(
717 &self,
718 matcher: &[MatcherLoc],
719 token_span: rustc_span::Span,
720 ) -> NamedParseResult<F> {
721 let nts = self
722 .bb_mps
723 .iter()
724 .map(|mp| match &matcher[mp.idx] {
725 MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
726 format!("{} ('{}')", kind, bind)
727 }
728 _ => unreachable!(),
729 })
730 .collect::<Vec<String>>()
731 .join(" or ");
732
733 Error(
734 token_span,
735 format!(
736 "local ambiguity when calling macro `{}`: multiple parsing options: {}",
737 self.macro_name,
738 match self.next_mps.len() {
739 0 => format!("built-in NTs {}.", nts),
740 n => format!("built-in NTs {} or {n} other option{s}.", nts, s = pluralize!(n)),
741 }
742 ),
743 )
744 }
745
nameize<I: Iterator<Item = NamedMatch>, F>( &self, matcher: &[MatcherLoc], mut res: I, ) -> NamedParseResult<F>746 fn nameize<I: Iterator<Item = NamedMatch>, F>(
747 &self,
748 matcher: &[MatcherLoc],
749 mut res: I,
750 ) -> NamedParseResult<F> {
751 // Make that each metavar has _exactly one_ binding. If so, insert the binding into the
752 // `NamedParseResult`. Otherwise, it's an error.
753 let mut ret_val = FxHashMap::default();
754 for loc in matcher {
755 if let &MatcherLoc::MetaVarDecl { span, bind, kind, .. } = loc {
756 if kind.is_some() {
757 match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
758 Vacant(spot) => spot.insert(res.next().unwrap()),
759 Occupied(..) => {
760 return Error(span, format!("duplicated bind name: {}", bind));
761 }
762 };
763 } else {
764 // E.g. `$e` instead of `$e:expr`, reported as a hard error if actually used.
765 // Both this check and the one in `parse_tt_inner` are necessary, surprisingly.
766 return Error(span, "missing fragment specifier".to_string());
767 }
768 }
769 }
770 Success(ret_val)
771 }
772 }
773