1 use std::str::{self, FromStr};
2
3 use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
4
5 #[test]
idents()6 fn idents() {
7 assert_eq!(
8 Ident::new("String", Span::call_site()).to_string(),
9 "String"
10 );
11 assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
12 assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
13 }
14
15 #[test]
16 #[cfg(procmacro2_semver_exempt)]
raw_idents()17 fn raw_idents() {
18 assert_eq!(
19 Ident::new_raw("String", Span::call_site()).to_string(),
20 "r#String"
21 );
22 assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
23 assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
24 }
25
26 #[test]
27 #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
ident_empty()28 fn ident_empty() {
29 Ident::new("", Span::call_site());
30 }
31
32 #[test]
33 #[should_panic(expected = "Ident cannot be a number; use Literal instead")]
ident_number()34 fn ident_number() {
35 Ident::new("255", Span::call_site());
36 }
37
38 #[test]
39 #[should_panic(expected = "\"a#\" is not a valid Ident")]
ident_invalid()40 fn ident_invalid() {
41 Ident::new("a#", Span::call_site());
42 }
43
44 #[test]
45 #[should_panic(expected = "not a valid Ident")]
raw_ident_empty()46 fn raw_ident_empty() {
47 Ident::new("r#", Span::call_site());
48 }
49
50 #[test]
51 #[should_panic(expected = "not a valid Ident")]
raw_ident_number()52 fn raw_ident_number() {
53 Ident::new("r#255", Span::call_site());
54 }
55
56 #[test]
57 #[should_panic(expected = "\"r#a#\" is not a valid Ident")]
raw_ident_invalid()58 fn raw_ident_invalid() {
59 Ident::new("r#a#", Span::call_site());
60 }
61
62 #[test]
63 #[should_panic(expected = "not a valid Ident")]
lifetime_empty()64 fn lifetime_empty() {
65 Ident::new("'", Span::call_site());
66 }
67
68 #[test]
69 #[should_panic(expected = "not a valid Ident")]
lifetime_number()70 fn lifetime_number() {
71 Ident::new("'255", Span::call_site());
72 }
73
74 #[test]
75 #[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
lifetime_invalid()76 fn lifetime_invalid() {
77 Ident::new("'a#", Span::call_site());
78 }
79
80 #[test]
literal_string()81 fn literal_string() {
82 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
83 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
84 assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
85 }
86
87 #[test]
literal_character()88 fn literal_character() {
89 assert_eq!(Literal::character('x').to_string(), "'x'");
90 assert_eq!(Literal::character('\'').to_string(), "'\\''");
91 assert_eq!(Literal::character('"').to_string(), "'\"'");
92 }
93
94 #[test]
literal_float()95 fn literal_float() {
96 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
97 }
98
99 #[test]
literal_suffix()100 fn literal_suffix() {
101 fn token_count(p: &str) -> usize {
102 p.parse::<TokenStream>().unwrap().into_iter().count()
103 }
104
105 assert_eq!(token_count("999u256"), 1);
106 assert_eq!(token_count("999r#u256"), 3);
107 assert_eq!(token_count("1."), 1);
108 assert_eq!(token_count("1.f32"), 3);
109 assert_eq!(token_count("1.0_0"), 1);
110 assert_eq!(token_count("1._0"), 3);
111 assert_eq!(token_count("1._m"), 3);
112 assert_eq!(token_count("\"\"s"), 1);
113 }
114
115 #[test]
roundtrip()116 fn roundtrip() {
117 fn roundtrip(p: &str) {
118 println!("parse: {}", p);
119 let s = p.parse::<TokenStream>().unwrap().to_string();
120 println!("first: {}", s);
121 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
122 assert_eq!(s, s2);
123 }
124 roundtrip("a");
125 roundtrip("<<");
126 roundtrip("<<=");
127 roundtrip(
128 "
129 1
130 1.0
131 1f32
132 2f64
133 1usize
134 4isize
135 4e10
136 1_000
137 1_0i32
138 8u8
139 9
140 0
141 0xffffffffffffffffffffffffffffffff
142 1x
143 1u80
144 1f320
145 ",
146 );
147 roundtrip("'a");
148 roundtrip("'_");
149 roundtrip("'static");
150 roundtrip("'\\u{10__FFFF}'");
151 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
152 }
153
154 #[test]
fail()155 fn fail() {
156 fn fail(p: &str) {
157 if let Ok(s) = p.parse::<TokenStream>() {
158 panic!("should have failed to parse: {}\n{:#?}", p, s);
159 }
160 }
161 fail("' static");
162 fail("r#1");
163 fail("r#_");
164 }
165
166 #[cfg(span_locations)]
167 #[test]
span_test()168 fn span_test() {
169 use proc_macro2::TokenTree;
170
171 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
172 let ts = p.parse::<TokenStream>().unwrap();
173 check_spans_internal(ts, &mut lines);
174 }
175
176 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
177 for i in ts {
178 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
179 *lines = rest;
180
181 let start = i.span().start();
182 assert_eq!(start.line, sline, "sline did not match for {}", i);
183 assert_eq!(start.column, scol, "scol did not match for {}", i);
184
185 let end = i.span().end();
186 assert_eq!(end.line, eline, "eline did not match for {}", i);
187 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
188
189 match i {
190 TokenTree::Group(ref g) => {
191 check_spans_internal(g.stream().clone(), lines);
192 }
193 _ => {}
194 }
195 }
196 }
197 }
198
199 check_spans(
200 "\
201 /// This is a document comment
202 testing 123
203 {
204 testing 234
205 }",
206 &[
207 (1, 0, 1, 30), // #
208 (1, 0, 1, 30), // [ ... ]
209 (1, 0, 1, 30), // doc
210 (1, 0, 1, 30), // =
211 (1, 0, 1, 30), // "This is..."
212 (2, 0, 2, 7), // testing
213 (2, 8, 2, 11), // 123
214 (3, 0, 5, 1), // { ... }
215 (4, 2, 4, 9), // testing
216 (4, 10, 4, 13), // 234
217 ],
218 );
219 }
220
221 #[cfg(procmacro2_semver_exempt)]
222 #[cfg(not(nightly))]
223 #[test]
default_span()224 fn default_span() {
225 let start = Span::call_site().start();
226 assert_eq!(start.line, 1);
227 assert_eq!(start.column, 0);
228 let end = Span::call_site().end();
229 assert_eq!(end.line, 1);
230 assert_eq!(end.column, 0);
231 let source_file = Span::call_site().source_file();
232 assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
233 assert!(!source_file.is_real());
234 }
235
236 #[cfg(procmacro2_semver_exempt)]
237 #[test]
span_join()238 fn span_join() {
239 let source1 = "aaa\nbbb"
240 .parse::<TokenStream>()
241 .unwrap()
242 .into_iter()
243 .collect::<Vec<_>>();
244 let source2 = "ccc\nddd"
245 .parse::<TokenStream>()
246 .unwrap()
247 .into_iter()
248 .collect::<Vec<_>>();
249
250 assert!(source1[0].span().source_file() != source2[0].span().source_file());
251 assert_eq!(
252 source1[0].span().source_file(),
253 source1[1].span().source_file()
254 );
255
256 let joined1 = source1[0].span().join(source1[1].span());
257 let joined2 = source1[0].span().join(source2[0].span());
258 assert!(joined1.is_some());
259 assert!(joined2.is_none());
260
261 let start = joined1.unwrap().start();
262 let end = joined1.unwrap().end();
263 assert_eq!(start.line, 1);
264 assert_eq!(start.column, 0);
265 assert_eq!(end.line, 2);
266 assert_eq!(end.column, 3);
267
268 assert_eq!(
269 joined1.unwrap().source_file(),
270 source1[0].span().source_file()
271 );
272 }
273
274 #[test]
no_panic()275 fn no_panic() {
276 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
277 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
278 }
279
280 #[test]
tricky_doc_comment()281 fn tricky_doc_comment() {
282 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
283 let tokens = stream.into_iter().collect::<Vec<_>>();
284 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
285
286 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
287 let tokens = stream.into_iter().collect::<Vec<_>>();
288 assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
289 match tokens[0] {
290 proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
291 _ => panic!("wrong token {:?}", tokens[0]),
292 }
293 let mut tokens = match tokens[1] {
294 proc_macro2::TokenTree::Group(ref tt) => {
295 assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
296 tt.stream().into_iter()
297 }
298 _ => panic!("wrong token {:?}", tokens[0]),
299 };
300
301 match tokens.next().unwrap() {
302 proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
303 t => panic!("wrong token {:?}", t),
304 }
305 match tokens.next().unwrap() {
306 proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
307 t => panic!("wrong token {:?}", t),
308 }
309 match tokens.next().unwrap() {
310 proc_macro2::TokenTree::Literal(ref tt) => {
311 assert_eq!(tt.to_string(), "\" doc\"");
312 }
313 t => panic!("wrong token {:?}", t),
314 }
315 assert!(tokens.next().is_none());
316
317 let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
318 let tokens = stream.into_iter().collect::<Vec<_>>();
319 assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
320 }
321
322 #[test]
op_before_comment()323 fn op_before_comment() {
324 let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
325 match tts.next().unwrap() {
326 TokenTree::Punct(tt) => {
327 assert_eq!(tt.as_char(), '~');
328 assert_eq!(tt.spacing(), Spacing::Alone);
329 }
330 wrong => panic!("wrong token {:?}", wrong),
331 }
332 }
333
334 #[test]
raw_identifier()335 fn raw_identifier() {
336 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
337 match tts.next().unwrap() {
338 TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
339 wrong => panic!("wrong token {:?}", wrong),
340 }
341 assert!(tts.next().is_none());
342 }
343
344 #[test]
test_debug_ident()345 fn test_debug_ident() {
346 let ident = Ident::new("proc_macro", Span::call_site());
347
348 #[cfg(not(procmacro2_semver_exempt))]
349 let expected = "Ident(proc_macro)";
350
351 #[cfg(procmacro2_semver_exempt)]
352 let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
353
354 assert_eq!(expected, format!("{:?}", ident));
355 }
356
357 #[test]
test_debug_tokenstream()358 fn test_debug_tokenstream() {
359 let tts = TokenStream::from_str("[a + 1]").unwrap();
360
361 #[cfg(not(procmacro2_semver_exempt))]
362 let expected = "\
363 TokenStream [
364 Group {
365 delimiter: Bracket,
366 stream: TokenStream [
367 Ident {
368 sym: a,
369 },
370 Punct {
371 op: '+',
372 spacing: Alone,
373 },
374 Literal {
375 lit: 1,
376 },
377 ],
378 },
379 ]\
380 ";
381
382 #[cfg(not(procmacro2_semver_exempt))]
383 let expected_before_trailing_commas = "\
384 TokenStream [
385 Group {
386 delimiter: Bracket,
387 stream: TokenStream [
388 Ident {
389 sym: a
390 },
391 Punct {
392 op: '+',
393 spacing: Alone
394 },
395 Literal {
396 lit: 1
397 }
398 ]
399 }
400 ]\
401 ";
402
403 #[cfg(procmacro2_semver_exempt)]
404 let expected = "\
405 TokenStream [
406 Group {
407 delimiter: Bracket,
408 stream: TokenStream [
409 Ident {
410 sym: a,
411 span: bytes(2..3),
412 },
413 Punct {
414 op: '+',
415 spacing: Alone,
416 span: bytes(4..5),
417 },
418 Literal {
419 lit: 1,
420 span: bytes(6..7),
421 },
422 ],
423 span: bytes(1..8),
424 },
425 ]\
426 ";
427
428 #[cfg(procmacro2_semver_exempt)]
429 let expected_before_trailing_commas = "\
430 TokenStream [
431 Group {
432 delimiter: Bracket,
433 stream: TokenStream [
434 Ident {
435 sym: a,
436 span: bytes(2..3)
437 },
438 Punct {
439 op: '+',
440 spacing: Alone,
441 span: bytes(4..5)
442 },
443 Literal {
444 lit: 1,
445 span: bytes(6..7)
446 }
447 ],
448 span: bytes(1..8)
449 }
450 ]\
451 ";
452
453 let actual = format!("{:#?}", tts);
454 if actual.ends_with(",\n]") {
455 assert_eq!(expected, actual);
456 } else {
457 assert_eq!(expected_before_trailing_commas, actual);
458 }
459 }
460
461 #[test]
default_tokenstream_is_empty()462 fn default_tokenstream_is_empty() {
463 let default_token_stream: TokenStream = Default::default();
464
465 assert!(default_token_stream.is_empty());
466 }
467