• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
2 use std::panic;
3 use std::str::{self, FromStr};
4 
5 #[test]
idents()6 fn idents() {
7     assert_eq!(
8         Ident::new("String", Span::call_site()).to_string(),
9         "String"
10     );
11     assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
12     assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
13 }
14 
15 #[test]
16 #[cfg(procmacro2_semver_exempt)]
raw_idents()17 fn raw_idents() {
18     assert_eq!(
19         Ident::new_raw("String", Span::call_site()).to_string(),
20         "r#String"
21     );
22     assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
23     assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
24 }
25 
26 #[test]
27 #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
ident_empty()28 fn ident_empty() {
29     Ident::new("", Span::call_site());
30 }
31 
32 #[test]
33 #[should_panic(expected = "Ident cannot be a number; use Literal instead")]
ident_number()34 fn ident_number() {
35     Ident::new("255", Span::call_site());
36 }
37 
38 #[test]
39 #[should_panic(expected = "\"a#\" is not a valid Ident")]
ident_invalid()40 fn ident_invalid() {
41     Ident::new("a#", Span::call_site());
42 }
43 
44 #[test]
45 #[should_panic(expected = "not a valid Ident")]
raw_ident_empty()46 fn raw_ident_empty() {
47     Ident::new("r#", Span::call_site());
48 }
49 
50 #[test]
51 #[should_panic(expected = "not a valid Ident")]
raw_ident_number()52 fn raw_ident_number() {
53     Ident::new("r#255", Span::call_site());
54 }
55 
56 #[test]
57 #[should_panic(expected = "\"r#a#\" is not a valid Ident")]
raw_ident_invalid()58 fn raw_ident_invalid() {
59     Ident::new("r#a#", Span::call_site());
60 }
61 
62 #[test]
63 #[should_panic(expected = "not a valid Ident")]
lifetime_empty()64 fn lifetime_empty() {
65     Ident::new("'", Span::call_site());
66 }
67 
68 #[test]
69 #[should_panic(expected = "not a valid Ident")]
lifetime_number()70 fn lifetime_number() {
71     Ident::new("'255", Span::call_site());
72 }
73 
74 #[test]
lifetime_invalid()75 fn lifetime_invalid() {
76     let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site()));
77     match result {
78         Err(box_any) => {
79             let message = box_any.downcast_ref::<String>().unwrap();
80             let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0
81             let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 ..
82             assert!(
83                 message == expected1 || message == expected2,
84                 "panic message does not match expected string\n\
85                  \x20   panic message: `{:?}`\n\
86                  \x20expected message: `{:?}`",
87                 message,
88                 expected2,
89             );
90         }
91         Ok(_) => panic!("test did not panic as expected"),
92     }
93 }
94 
95 #[test]
literal_string()96 fn literal_string() {
97     assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
98     assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
99     assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
100 }
101 
102 #[test]
literal_raw_string()103 fn literal_raw_string() {
104     "r\"\r\n\"".parse::<TokenStream>().unwrap();
105 }
106 
107 #[test]
literal_character()108 fn literal_character() {
109     assert_eq!(Literal::character('x').to_string(), "'x'");
110     assert_eq!(Literal::character('\'').to_string(), "'\\''");
111     assert_eq!(Literal::character('"').to_string(), "'\"'");
112 }
113 
114 #[test]
literal_float()115 fn literal_float() {
116     assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
117 }
118 
119 #[test]
literal_suffix()120 fn literal_suffix() {
121     fn token_count(p: &str) -> usize {
122         p.parse::<TokenStream>().unwrap().into_iter().count()
123     }
124 
125     assert_eq!(token_count("999u256"), 1);
126     assert_eq!(token_count("999r#u256"), 3);
127     assert_eq!(token_count("1."), 1);
128     assert_eq!(token_count("1.f32"), 3);
129     assert_eq!(token_count("1.0_0"), 1);
130     assert_eq!(token_count("1._0"), 3);
131     assert_eq!(token_count("1._m"), 3);
132     assert_eq!(token_count("\"\"s"), 1);
133     assert_eq!(token_count("r\"\"r"), 1);
134     assert_eq!(token_count("b\"\"b"), 1);
135     assert_eq!(token_count("br\"\"br"), 1);
136     assert_eq!(token_count("r#\"\"#r"), 1);
137     assert_eq!(token_count("'c'c"), 1);
138     assert_eq!(token_count("b'b'b"), 1);
139     assert_eq!(token_count("0E"), 1);
140     assert_eq!(token_count("0o0A"), 1);
141     assert_eq!(token_count("0E--0"), 4);
142     assert_eq!(token_count("0.0ECMA"), 1);
143 }
144 
145 #[test]
literal_iter_negative()146 fn literal_iter_negative() {
147     let negative_literal = Literal::i32_suffixed(-3);
148     let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
149     let mut iter = tokens.into_iter();
150     match iter.next().unwrap() {
151         TokenTree::Punct(punct) => {
152             assert_eq!(punct.as_char(), '-');
153             assert_eq!(punct.spacing(), Spacing::Alone);
154         }
155         unexpected => panic!("unexpected token {:?}", unexpected),
156     }
157     match iter.next().unwrap() {
158         TokenTree::Literal(literal) => {
159             assert_eq!(literal.to_string(), "3i32");
160         }
161         unexpected => panic!("unexpected token {:?}", unexpected),
162     }
163     assert!(iter.next().is_none());
164 }
165 
166 #[test]
roundtrip()167 fn roundtrip() {
168     fn roundtrip(p: &str) {
169         println!("parse: {}", p);
170         let s = p.parse::<TokenStream>().unwrap().to_string();
171         println!("first: {}", s);
172         let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
173         assert_eq!(s, s2);
174     }
175     roundtrip("a");
176     roundtrip("<<");
177     roundtrip("<<=");
178     roundtrip(
179         "
180         1
181         1.0
182         1f32
183         2f64
184         1usize
185         4isize
186         4e10
187         1_000
188         1_0i32
189         8u8
190         9
191         0
192         0xffffffffffffffffffffffffffffffff
193         1x
194         1u80
195         1f320
196     ",
197     );
198     roundtrip("'a");
199     roundtrip("'_");
200     roundtrip("'static");
201     roundtrip("'\\u{10__FFFF}'");
202     roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
203 }
204 
205 #[test]
fail()206 fn fail() {
207     fn fail(p: &str) {
208         if let Ok(s) = p.parse::<TokenStream>() {
209             panic!("should have failed to parse: {}\n{:#?}", p, s);
210         }
211     }
212     fail("' static");
213     fail("r#1");
214     fail("r#_");
215     fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
216     fail("\"\\u{999999}\""); // outside of valid range of char
217     fail("\"\\u{_0}\""); // leading underscore
218     fail("\"\\u{}\""); // empty
219     fail("b\"\r\""); // bare carriage return in byte string
220     fail("r\"\r\""); // bare carriage return in raw string
221     fail("\"\\\r  \""); // backslash carriage return
222     fail("'aa'aa");
223     fail("br##\"\"#");
224     fail("\"\\\n\u{85}\r\"");
225 }
226 
227 #[cfg(span_locations)]
228 #[test]
span_test()229 fn span_test() {
230     check_spans(
231         "\
232 /// This is a document comment
233 testing 123
234 {
235   testing 234
236 }",
237         &[
238             (1, 0, 1, 30),  // #
239             (1, 0, 1, 30),  // [ ... ]
240             (1, 0, 1, 30),  // doc
241             (1, 0, 1, 30),  // =
242             (1, 0, 1, 30),  // "This is..."
243             (2, 0, 2, 7),   // testing
244             (2, 8, 2, 11),  // 123
245             (3, 0, 5, 1),   // { ... }
246             (4, 2, 4, 9),   // testing
247             (4, 10, 4, 13), // 234
248         ],
249     );
250 }
251 
252 #[cfg(procmacro2_semver_exempt)]
253 #[cfg(not(nightly))]
254 #[test]
default_span()255 fn default_span() {
256     let start = Span::call_site().start();
257     assert_eq!(start.line, 1);
258     assert_eq!(start.column, 0);
259     let end = Span::call_site().end();
260     assert_eq!(end.line, 1);
261     assert_eq!(end.column, 0);
262     let source_file = Span::call_site().source_file();
263     assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
264     assert!(!source_file.is_real());
265 }
266 
267 #[cfg(procmacro2_semver_exempt)]
268 #[test]
span_join()269 fn span_join() {
270     let source1 = "aaa\nbbb"
271         .parse::<TokenStream>()
272         .unwrap()
273         .into_iter()
274         .collect::<Vec<_>>();
275     let source2 = "ccc\nddd"
276         .parse::<TokenStream>()
277         .unwrap()
278         .into_iter()
279         .collect::<Vec<_>>();
280 
281     assert!(source1[0].span().source_file() != source2[0].span().source_file());
282     assert_eq!(
283         source1[0].span().source_file(),
284         source1[1].span().source_file()
285     );
286 
287     let joined1 = source1[0].span().join(source1[1].span());
288     let joined2 = source1[0].span().join(source2[0].span());
289     assert!(joined1.is_some());
290     assert!(joined2.is_none());
291 
292     let start = joined1.unwrap().start();
293     let end = joined1.unwrap().end();
294     assert_eq!(start.line, 1);
295     assert_eq!(start.column, 0);
296     assert_eq!(end.line, 2);
297     assert_eq!(end.column, 3);
298 
299     assert_eq!(
300         joined1.unwrap().source_file(),
301         source1[0].span().source_file()
302     );
303 }
304 
305 #[test]
no_panic()306 fn no_panic() {
307     let s = str::from_utf8(b"b\'\xc2\x86  \x00\x00\x00^\"").unwrap();
308     assert!(s.parse::<TokenStream>().is_err());
309 }
310 
311 #[test]
punct_before_comment()312 fn punct_before_comment() {
313     let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
314     match tts.next().unwrap() {
315         TokenTree::Punct(tt) => {
316             assert_eq!(tt.as_char(), '~');
317             assert_eq!(tt.spacing(), Spacing::Alone);
318         }
319         wrong => panic!("wrong token {:?}", wrong),
320     }
321 }
322 
323 #[test]
joint_last_token()324 fn joint_last_token() {
325     // This test verifies that we match the behavior of libproc_macro *not* in
326     // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
327     // behavior was temporarily broken.
328     // See https://github.com/rust-lang/rust/issues/76399
329 
330     let joint_punct = Punct::new(':', Spacing::Joint);
331     let stream = TokenStream::from(TokenTree::Punct(joint_punct));
332     let punct = match stream.into_iter().next().unwrap() {
333         TokenTree::Punct(punct) => punct,
334         _ => unreachable!(),
335     };
336     assert_eq!(punct.spacing(), Spacing::Joint);
337 }
338 
339 #[test]
raw_identifier()340 fn raw_identifier() {
341     let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
342     match tts.next().unwrap() {
343         TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
344         wrong => panic!("wrong token {:?}", wrong),
345     }
346     assert!(tts.next().is_none());
347 }
348 
349 #[test]
test_debug_ident()350 fn test_debug_ident() {
351     let ident = Ident::new("proc_macro", Span::call_site());
352 
353     #[cfg(not(span_locations))]
354     let expected = "Ident(proc_macro)";
355 
356     #[cfg(span_locations)]
357     let expected = "Ident { sym: proc_macro }";
358 
359     assert_eq!(expected, format!("{:?}", ident));
360 }
361 
362 #[test]
test_debug_tokenstream()363 fn test_debug_tokenstream() {
364     let tts = TokenStream::from_str("[a + 1]").unwrap();
365 
366     #[cfg(not(span_locations))]
367     let expected = "\
368 TokenStream [
369     Group {
370         delimiter: Bracket,
371         stream: TokenStream [
372             Ident {
373                 sym: a,
374             },
375             Punct {
376                 char: '+',
377                 spacing: Alone,
378             },
379             Literal {
380                 lit: 1,
381             },
382         ],
383     },
384 ]\
385     ";
386 
387     #[cfg(not(span_locations))]
388     let expected_before_trailing_commas = "\
389 TokenStream [
390     Group {
391         delimiter: Bracket,
392         stream: TokenStream [
393             Ident {
394                 sym: a
395             },
396             Punct {
397                 char: '+',
398                 spacing: Alone
399             },
400             Literal {
401                 lit: 1
402             }
403         ]
404     }
405 ]\
406     ";
407 
408     #[cfg(span_locations)]
409     let expected = "\
410 TokenStream [
411     Group {
412         delimiter: Bracket,
413         stream: TokenStream [
414             Ident {
415                 sym: a,
416                 span: bytes(2..3),
417             },
418             Punct {
419                 char: '+',
420                 spacing: Alone,
421                 span: bytes(4..5),
422             },
423             Literal {
424                 lit: 1,
425                 span: bytes(6..7),
426             },
427         ],
428         span: bytes(1..8),
429     },
430 ]\
431     ";
432 
433     #[cfg(span_locations)]
434     let expected_before_trailing_commas = "\
435 TokenStream [
436     Group {
437         delimiter: Bracket,
438         stream: TokenStream [
439             Ident {
440                 sym: a,
441                 span: bytes(2..3)
442             },
443             Punct {
444                 char: '+',
445                 spacing: Alone,
446                 span: bytes(4..5)
447             },
448             Literal {
449                 lit: 1,
450                 span: bytes(6..7)
451             }
452         ],
453         span: bytes(1..8)
454     }
455 ]\
456     ";
457 
458     let actual = format!("{:#?}", tts);
459     if actual.ends_with(",\n]") {
460         assert_eq!(expected, actual);
461     } else {
462         assert_eq!(expected_before_trailing_commas, actual);
463     }
464 }
465 
466 #[test]
default_tokenstream_is_empty()467 fn default_tokenstream_is_empty() {
468     let default_token_stream: TokenStream = Default::default();
469 
470     assert!(default_token_stream.is_empty());
471 }
472 
473 #[test]
tuple_indexing()474 fn tuple_indexing() {
475     // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
476     let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
477     assert_eq!("tuple", tokens.next().unwrap().to_string());
478     assert_eq!(".", tokens.next().unwrap().to_string());
479     assert_eq!("0.0", tokens.next().unwrap().to_string());
480     assert!(tokens.next().is_none());
481 }
482 
483 #[cfg(span_locations)]
484 #[test]
non_ascii_tokens()485 fn non_ascii_tokens() {
486     check_spans("// abc", &[]);
487     check_spans("// ábc", &[]);
488     check_spans("// abc x", &[]);
489     check_spans("// ábc x", &[]);
490     check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
491     check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
492     check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
493     check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
494     check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
495     check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
496     check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
497     check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
498     check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
499     check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
500     check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
501     check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
502     check_spans("'a'", &[(1, 0, 1, 3)]);
503     check_spans("'á'", &[(1, 0, 1, 3)]);
504     check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
505     check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
506     check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
507     check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
508     check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
509     check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
510     check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
511     check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
512     check_spans("abc", &[(1, 0, 1, 3)]);
513     check_spans("ábc", &[(1, 0, 1, 3)]);
514     check_spans("ábć", &[(1, 0, 1, 3)]);
515     check_spans("abc// foo", &[(1, 0, 1, 3)]);
516     check_spans("ábc// foo", &[(1, 0, 1, 3)]);
517     check_spans("ábć// foo", &[(1, 0, 1, 3)]);
518     check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
519     check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
520 }
521 
522 #[cfg(span_locations)]
check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)])523 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
524     let ts = p.parse::<TokenStream>().unwrap();
525     check_spans_internal(ts, &mut lines);
526     assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
527 }
528 
529 #[cfg(span_locations)]
check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)])530 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
531     for i in ts {
532         if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
533             *lines = rest;
534 
535             let start = i.span().start();
536             assert_eq!(start.line, sline, "sline did not match for {}", i);
537             assert_eq!(start.column, scol, "scol did not match for {}", i);
538 
539             let end = i.span().end();
540             assert_eq!(end.line, eline, "eline did not match for {}", i);
541             assert_eq!(end.column, ecol, "ecol did not match for {}", i);
542 
543             if let TokenTree::Group(g) = i {
544                 check_spans_internal(g.stream().clone(), lines);
545             }
546         }
547     }
548 }
549