• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #![allow(
2     clippy::assertions_on_result_states,
3     clippy::items_after_statements,
4     clippy::non_ascii_literal
5 )]
6 
7 use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
8 use std::iter;
9 use std::panic;
10 use std::str::{self, FromStr};
11 
12 #[test]
idents()13 fn idents() {
14     assert_eq!(
15         Ident::new("String", Span::call_site()).to_string(),
16         "String"
17     );
18     assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
19     assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
20 }
21 
22 #[test]
raw_idents()23 fn raw_idents() {
24     assert_eq!(
25         Ident::new_raw("String", Span::call_site()).to_string(),
26         "r#String"
27     );
28     assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
29 }
30 
31 #[test]
32 #[should_panic(expected = "`r#_` cannot be a raw identifier")]
ident_raw_underscore()33 fn ident_raw_underscore() {
34     Ident::new_raw("_", Span::call_site());
35 }
36 
37 #[test]
38 #[should_panic(expected = "`r#super` cannot be a raw identifier")]
ident_raw_reserved()39 fn ident_raw_reserved() {
40     Ident::new_raw("super", Span::call_site());
41 }
42 
43 #[test]
44 #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
ident_empty()45 fn ident_empty() {
46     Ident::new("", Span::call_site());
47 }
48 
49 #[test]
50 #[should_panic(expected = "Ident cannot be a number; use Literal instead")]
ident_number()51 fn ident_number() {
52     Ident::new("255", Span::call_site());
53 }
54 
55 #[test]
56 #[should_panic(expected = "\"a#\" is not a valid Ident")]
ident_invalid()57 fn ident_invalid() {
58     Ident::new("a#", Span::call_site());
59 }
60 
61 #[test]
62 #[should_panic(expected = "not a valid Ident")]
raw_ident_empty()63 fn raw_ident_empty() {
64     Ident::new("r#", Span::call_site());
65 }
66 
67 #[test]
68 #[should_panic(expected = "not a valid Ident")]
raw_ident_number()69 fn raw_ident_number() {
70     Ident::new("r#255", Span::call_site());
71 }
72 
73 #[test]
74 #[should_panic(expected = "\"r#a#\" is not a valid Ident")]
raw_ident_invalid()75 fn raw_ident_invalid() {
76     Ident::new("r#a#", Span::call_site());
77 }
78 
79 #[test]
80 #[should_panic(expected = "not a valid Ident")]
lifetime_empty()81 fn lifetime_empty() {
82     Ident::new("'", Span::call_site());
83 }
84 
85 #[test]
86 #[should_panic(expected = "not a valid Ident")]
lifetime_number()87 fn lifetime_number() {
88     Ident::new("'255", Span::call_site());
89 }
90 
91 #[test]
lifetime_invalid()92 fn lifetime_invalid() {
93     let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site()));
94     match result {
95         Err(box_any) => {
96             let message = box_any.downcast_ref::<String>().unwrap();
97             let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0
98             let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 ..
99             assert!(
100                 message == expected1 || message == expected2,
101                 "panic message does not match expected string\n\
102                  \x20   panic message: `{:?}`\n\
103                  \x20expected message: `{:?}`",
104                 message,
105                 expected2,
106             );
107         }
108         Ok(_) => panic!("test did not panic as expected"),
109     }
110 }
111 
112 #[test]
literal_string()113 fn literal_string() {
114     assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
115     assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
116     assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
117 }
118 
119 #[test]
literal_raw_string()120 fn literal_raw_string() {
121     "r\"\r\n\"".parse::<TokenStream>().unwrap();
122 
123     fn raw_string_literal_with_hashes(n: usize) -> String {
124         let mut literal = String::new();
125         literal.push('r');
126         literal.extend(iter::repeat('#').take(n));
127         literal.push('"');
128         literal.push('"');
129         literal.extend(iter::repeat('#').take(n));
130         literal
131     }
132 
133     raw_string_literal_with_hashes(255)
134         .parse::<TokenStream>()
135         .unwrap();
136 
137     // https://github.com/rust-lang/rust/pull/95251
138     raw_string_literal_with_hashes(256)
139         .parse::<TokenStream>()
140         .unwrap_err();
141 }
142 
143 #[test]
literal_byte_string()144 fn literal_byte_string() {
145     assert_eq!(Literal::byte_string(b"").to_string(), "b\"\"");
146     assert_eq!(
147         Literal::byte_string(b"\0\t\n\r\"\\2\x10").to_string(),
148         "b\"\\0\\t\\n\\r\\\"\\\\2\\x10\"",
149     );
150 }
151 
152 #[test]
literal_character()153 fn literal_character() {
154     assert_eq!(Literal::character('x').to_string(), "'x'");
155     assert_eq!(Literal::character('\'').to_string(), "'\\''");
156     assert_eq!(Literal::character('"').to_string(), "'\"'");
157 }
158 
159 #[test]
literal_integer()160 fn literal_integer() {
161     assert_eq!(Literal::u8_suffixed(10).to_string(), "10u8");
162     assert_eq!(Literal::u16_suffixed(10).to_string(), "10u16");
163     assert_eq!(Literal::u32_suffixed(10).to_string(), "10u32");
164     assert_eq!(Literal::u64_suffixed(10).to_string(), "10u64");
165     assert_eq!(Literal::u128_suffixed(10).to_string(), "10u128");
166     assert_eq!(Literal::usize_suffixed(10).to_string(), "10usize");
167 
168     assert_eq!(Literal::i8_suffixed(10).to_string(), "10i8");
169     assert_eq!(Literal::i16_suffixed(10).to_string(), "10i16");
170     assert_eq!(Literal::i32_suffixed(10).to_string(), "10i32");
171     assert_eq!(Literal::i64_suffixed(10).to_string(), "10i64");
172     assert_eq!(Literal::i128_suffixed(10).to_string(), "10i128");
173     assert_eq!(Literal::isize_suffixed(10).to_string(), "10isize");
174 
175     assert_eq!(Literal::u8_unsuffixed(10).to_string(), "10");
176     assert_eq!(Literal::u16_unsuffixed(10).to_string(), "10");
177     assert_eq!(Literal::u32_unsuffixed(10).to_string(), "10");
178     assert_eq!(Literal::u64_unsuffixed(10).to_string(), "10");
179     assert_eq!(Literal::u128_unsuffixed(10).to_string(), "10");
180     assert_eq!(Literal::usize_unsuffixed(10).to_string(), "10");
181 
182     assert_eq!(Literal::i8_unsuffixed(10).to_string(), "10");
183     assert_eq!(Literal::i16_unsuffixed(10).to_string(), "10");
184     assert_eq!(Literal::i32_unsuffixed(10).to_string(), "10");
185     assert_eq!(Literal::i64_unsuffixed(10).to_string(), "10");
186     assert_eq!(Literal::i128_unsuffixed(10).to_string(), "10");
187     assert_eq!(Literal::isize_unsuffixed(10).to_string(), "10");
188 }
189 
190 #[test]
literal_float()191 fn literal_float() {
192     assert_eq!(Literal::f32_suffixed(10.0).to_string(), "10f32");
193     assert_eq!(Literal::f64_suffixed(10.0).to_string(), "10f64");
194 
195     assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
196     assert_eq!(Literal::f64_unsuffixed(10.0).to_string(), "10.0");
197 }
198 
199 #[test]
literal_suffix()200 fn literal_suffix() {
201     fn token_count(p: &str) -> usize {
202         p.parse::<TokenStream>().unwrap().into_iter().count()
203     }
204 
205     assert_eq!(token_count("999u256"), 1);
206     assert_eq!(token_count("999r#u256"), 3);
207     assert_eq!(token_count("1."), 1);
208     assert_eq!(token_count("1.f32"), 3);
209     assert_eq!(token_count("1.0_0"), 1);
210     assert_eq!(token_count("1._0"), 3);
211     assert_eq!(token_count("1._m"), 3);
212     assert_eq!(token_count("\"\"s"), 1);
213     assert_eq!(token_count("r\"\"r"), 1);
214     assert_eq!(token_count("b\"\"b"), 1);
215     assert_eq!(token_count("br\"\"br"), 1);
216     assert_eq!(token_count("r#\"\"#r"), 1);
217     assert_eq!(token_count("'c'c"), 1);
218     assert_eq!(token_count("b'b'b"), 1);
219     assert_eq!(token_count("0E"), 1);
220     assert_eq!(token_count("0o0A"), 1);
221     assert_eq!(token_count("0E--0"), 4);
222     assert_eq!(token_count("0.0ECMA"), 1);
223 }
224 
225 #[test]
literal_iter_negative()226 fn literal_iter_negative() {
227     let negative_literal = Literal::i32_suffixed(-3);
228     let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
229     let mut iter = tokens.into_iter();
230     match iter.next().unwrap() {
231         TokenTree::Punct(punct) => {
232             assert_eq!(punct.as_char(), '-');
233             assert_eq!(punct.spacing(), Spacing::Alone);
234         }
235         unexpected => panic!("unexpected token {:?}", unexpected),
236     }
237     match iter.next().unwrap() {
238         TokenTree::Literal(literal) => {
239             assert_eq!(literal.to_string(), "3i32");
240         }
241         unexpected => panic!("unexpected token {:?}", unexpected),
242     }
243     assert!(iter.next().is_none());
244 }
245 
246 #[test]
literal_parse()247 fn literal_parse() {
248     assert!("1".parse::<Literal>().is_ok());
249     assert!("-1".parse::<Literal>().is_ok());
250     assert!("-1u12".parse::<Literal>().is_ok());
251     assert!("1.0".parse::<Literal>().is_ok());
252     assert!("-1.0".parse::<Literal>().is_ok());
253     assert!("-1.0f12".parse::<Literal>().is_ok());
254     assert!("'a'".parse::<Literal>().is_ok());
255     assert!("\"\n\"".parse::<Literal>().is_ok());
256     assert!("0 1".parse::<Literal>().is_err());
257     assert!(" 0".parse::<Literal>().is_err());
258     assert!("0 ".parse::<Literal>().is_err());
259     assert!("/* comment */0".parse::<Literal>().is_err());
260     assert!("0/* comment */".parse::<Literal>().is_err());
261     assert!("0// comment".parse::<Literal>().is_err());
262     assert!("- 1".parse::<Literal>().is_err());
263     assert!("- 1.0".parse::<Literal>().is_err());
264     assert!("-\"\"".parse::<Literal>().is_err());
265 }
266 
267 #[test]
roundtrip()268 fn roundtrip() {
269     fn roundtrip(p: &str) {
270         println!("parse: {}", p);
271         let s = p.parse::<TokenStream>().unwrap().to_string();
272         println!("first: {}", s);
273         let s2 = s.parse::<TokenStream>().unwrap().to_string();
274         assert_eq!(s, s2);
275     }
276     roundtrip("a");
277     roundtrip("<<");
278     roundtrip("<<=");
279     roundtrip(
280         "
281         1
282         1.0
283         1f32
284         2f64
285         1usize
286         4isize
287         4e10
288         1_000
289         1_0i32
290         8u8
291         9
292         0
293         0xffffffffffffffffffffffffffffffff
294         1x
295         1u80
296         1f320
297     ",
298     );
299     roundtrip("'a");
300     roundtrip("'_");
301     roundtrip("'static");
302     roundtrip("'\\u{10__FFFF}'");
303     roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
304 }
305 
306 #[test]
fail()307 fn fail() {
308     fn fail(p: &str) {
309         if let Ok(s) = p.parse::<TokenStream>() {
310             panic!("should have failed to parse: {}\n{:#?}", p, s);
311         }
312     }
313     fail("' static");
314     fail("r#1");
315     fail("r#_");
316     fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
317     fail("\"\\u{999999}\""); // outside of valid range of char
318     fail("\"\\u{_0}\""); // leading underscore
319     fail("\"\\u{}\""); // empty
320     fail("b\"\r\""); // bare carriage return in byte string
321     fail("r\"\r\""); // bare carriage return in raw string
322     fail("\"\\\r  \""); // backslash carriage return
323     fail("'aa'aa");
324     fail("br##\"\"#");
325     fail("\"\\\n\u{85}\r\"");
326 }
327 
328 #[cfg(span_locations)]
329 #[test]
span_test()330 fn span_test() {
331     check_spans(
332         "\
333 /// This is a document comment
334 testing 123
335 {
336   testing 234
337 }",
338         &[
339             (1, 0, 1, 30),  // #
340             (1, 0, 1, 30),  // [ ... ]
341             (1, 0, 1, 30),  // doc
342             (1, 0, 1, 30),  // =
343             (1, 0, 1, 30),  // "This is..."
344             (2, 0, 2, 7),   // testing
345             (2, 8, 2, 11),  // 123
346             (3, 0, 5, 1),   // { ... }
347             (4, 2, 4, 9),   // testing
348             (4, 10, 4, 13), // 234
349         ],
350     );
351 }
352 
353 #[cfg(procmacro2_semver_exempt)]
354 #[cfg(not(nightly))]
355 #[test]
default_span()356 fn default_span() {
357     let start = Span::call_site().start();
358     assert_eq!(start.line, 1);
359     assert_eq!(start.column, 0);
360     let end = Span::call_site().end();
361     assert_eq!(end.line, 1);
362     assert_eq!(end.column, 0);
363     let source_file = Span::call_site().source_file();
364     assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
365     assert!(!source_file.is_real());
366 }
367 
368 #[cfg(procmacro2_semver_exempt)]
369 #[test]
span_join()370 fn span_join() {
371     let source1 = "aaa\nbbb"
372         .parse::<TokenStream>()
373         .unwrap()
374         .into_iter()
375         .collect::<Vec<_>>();
376     let source2 = "ccc\nddd"
377         .parse::<TokenStream>()
378         .unwrap()
379         .into_iter()
380         .collect::<Vec<_>>();
381 
382     assert!(source1[0].span().source_file() != source2[0].span().source_file());
383     assert_eq!(
384         source1[0].span().source_file(),
385         source1[1].span().source_file()
386     );
387 
388     let joined1 = source1[0].span().join(source1[1].span());
389     let joined2 = source1[0].span().join(source2[0].span());
390     assert!(joined1.is_some());
391     assert!(joined2.is_none());
392 
393     let start = joined1.unwrap().start();
394     let end = joined1.unwrap().end();
395     assert_eq!(start.line, 1);
396     assert_eq!(start.column, 0);
397     assert_eq!(end.line, 2);
398     assert_eq!(end.column, 3);
399 
400     assert_eq!(
401         joined1.unwrap().source_file(),
402         source1[0].span().source_file()
403     );
404 }
405 
406 #[test]
no_panic()407 fn no_panic() {
408     let s = str::from_utf8(b"b\'\xc2\x86  \x00\x00\x00^\"").unwrap();
409     assert!(s.parse::<TokenStream>().is_err());
410 }
411 
412 #[test]
punct_before_comment()413 fn punct_before_comment() {
414     let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
415     match tts.next().unwrap() {
416         TokenTree::Punct(tt) => {
417             assert_eq!(tt.as_char(), '~');
418             assert_eq!(tt.spacing(), Spacing::Alone);
419         }
420         wrong => panic!("wrong token {:?}", wrong),
421     }
422 }
423 
424 #[test]
joint_last_token()425 fn joint_last_token() {
426     // This test verifies that we match the behavior of libproc_macro *not* in
427     // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
428     // behavior was temporarily broken.
429     // See https://github.com/rust-lang/rust/issues/76399
430 
431     let joint_punct = Punct::new(':', Spacing::Joint);
432     let stream = TokenStream::from(TokenTree::Punct(joint_punct));
433     let punct = match stream.into_iter().next().unwrap() {
434         TokenTree::Punct(punct) => punct,
435         _ => unreachable!(),
436     };
437     assert_eq!(punct.spacing(), Spacing::Joint);
438 }
439 
440 #[test]
raw_identifier()441 fn raw_identifier() {
442     let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
443     match tts.next().unwrap() {
444         TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
445         wrong => panic!("wrong token {:?}", wrong),
446     }
447     assert!(tts.next().is_none());
448 }
449 
450 #[test]
test_debug_ident()451 fn test_debug_ident() {
452     let ident = Ident::new("proc_macro", Span::call_site());
453 
454     #[cfg(not(span_locations))]
455     let expected = "Ident(proc_macro)";
456 
457     #[cfg(span_locations)]
458     let expected = "Ident { sym: proc_macro }";
459 
460     assert_eq!(expected, format!("{:?}", ident));
461 }
462 
463 #[test]
test_debug_tokenstream()464 fn test_debug_tokenstream() {
465     let tts = TokenStream::from_str("[a + 1]").unwrap();
466 
467     #[cfg(not(span_locations))]
468     let expected = "\
469 TokenStream [
470     Group {
471         delimiter: Bracket,
472         stream: TokenStream [
473             Ident {
474                 sym: a,
475             },
476             Punct {
477                 char: '+',
478                 spacing: Alone,
479             },
480             Literal {
481                 lit: 1,
482             },
483         ],
484     },
485 ]\
486     ";
487 
488     #[cfg(not(span_locations))]
489     let expected_before_trailing_commas = "\
490 TokenStream [
491     Group {
492         delimiter: Bracket,
493         stream: TokenStream [
494             Ident {
495                 sym: a
496             },
497             Punct {
498                 char: '+',
499                 spacing: Alone
500             },
501             Literal {
502                 lit: 1
503             }
504         ]
505     }
506 ]\
507     ";
508 
509     #[cfg(span_locations)]
510     let expected = "\
511 TokenStream [
512     Group {
513         delimiter: Bracket,
514         stream: TokenStream [
515             Ident {
516                 sym: a,
517                 span: bytes(2..3),
518             },
519             Punct {
520                 char: '+',
521                 spacing: Alone,
522                 span: bytes(4..5),
523             },
524             Literal {
525                 lit: 1,
526                 span: bytes(6..7),
527             },
528         ],
529         span: bytes(1..8),
530     },
531 ]\
532     ";
533 
534     #[cfg(span_locations)]
535     let expected_before_trailing_commas = "\
536 TokenStream [
537     Group {
538         delimiter: Bracket,
539         stream: TokenStream [
540             Ident {
541                 sym: a,
542                 span: bytes(2..3)
543             },
544             Punct {
545                 char: '+',
546                 spacing: Alone,
547                 span: bytes(4..5)
548             },
549             Literal {
550                 lit: 1,
551                 span: bytes(6..7)
552             }
553         ],
554         span: bytes(1..8)
555     }
556 ]\
557     ";
558 
559     let actual = format!("{:#?}", tts);
560     if actual.ends_with(",\n]") {
561         assert_eq!(expected, actual);
562     } else {
563         assert_eq!(expected_before_trailing_commas, actual);
564     }
565 }
566 
567 #[test]
default_tokenstream_is_empty()568 fn default_tokenstream_is_empty() {
569     let default_token_stream = <TokenStream as Default>::default();
570 
571     assert!(default_token_stream.is_empty());
572 }
573 
574 #[test]
tokenstream_size_hint()575 fn tokenstream_size_hint() {
576     let tokens = "a b (c d) e".parse::<TokenStream>().unwrap();
577 
578     assert_eq!(tokens.into_iter().size_hint(), (4, Some(4)));
579 }
580 
581 #[test]
tuple_indexing()582 fn tuple_indexing() {
583     // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
584     let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
585     assert_eq!("tuple", tokens.next().unwrap().to_string());
586     assert_eq!(".", tokens.next().unwrap().to_string());
587     assert_eq!("0.0", tokens.next().unwrap().to_string());
588     assert!(tokens.next().is_none());
589 }
590 
591 #[cfg(span_locations)]
592 #[test]
non_ascii_tokens()593 fn non_ascii_tokens() {
594     check_spans("// abc", &[]);
595     check_spans("// ábc", &[]);
596     check_spans("// abc x", &[]);
597     check_spans("// ábc x", &[]);
598     check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
599     check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
600     check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
601     check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
602     check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
603     check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
604     check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
605     check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
606     check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
607     check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
608     check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
609     check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
610     check_spans("'a'", &[(1, 0, 1, 3)]);
611     check_spans("'á'", &[(1, 0, 1, 3)]);
612     check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
613     check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
614     check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
615     check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
616     check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
617     check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
618     check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
619     check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
620     check_spans("abc", &[(1, 0, 1, 3)]);
621     check_spans("ábc", &[(1, 0, 1, 3)]);
622     check_spans("ábć", &[(1, 0, 1, 3)]);
623     check_spans("abc// foo", &[(1, 0, 1, 3)]);
624     check_spans("ábc// foo", &[(1, 0, 1, 3)]);
625     check_spans("ábć// foo", &[(1, 0, 1, 3)]);
626     check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
627     check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
628 }
629 
630 #[cfg(span_locations)]
check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)])631 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
632     let ts = p.parse::<TokenStream>().unwrap();
633     check_spans_internal(ts, &mut lines);
634     assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
635 }
636 
637 #[cfg(span_locations)]
check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)])638 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
639     for i in ts {
640         if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
641             *lines = rest;
642 
643             let start = i.span().start();
644             assert_eq!(start.line, sline, "sline did not match for {}", i);
645             assert_eq!(start.column, scol, "scol did not match for {}", i);
646 
647             let end = i.span().end();
648             assert_eq!(end.line, eline, "eline did not match for {}", i);
649             assert_eq!(end.column, ecol, "ecol did not match for {}", i);
650 
651             if let TokenTree::Group(g) = i {
652                 check_spans_internal(g.stream().clone(), lines);
653             }
654         }
655     }
656 }
657 
658 #[test]
byte_order_mark()659 fn byte_order_mark() {
660     let string = "\u{feff}foo";
661     let tokens = string.parse::<TokenStream>().unwrap();
662     match tokens.into_iter().next().unwrap() {
663         TokenTree::Ident(ident) => assert_eq!(ident, "foo"),
664         _ => unreachable!(),
665     }
666 
667     let string = "foo\u{feff}";
668     string.parse::<TokenStream>().unwrap_err();
669 }
670