1 #![allow(clippy::float_cmp, clippy::non_ascii_literal)]
2
3 #[macro_use]
4 mod macros;
5
6 use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
7 use quote::ToTokens;
8 use std::iter::FromIterator;
9 use std::str::FromStr;
10 use syn::{Lit, LitFloat, LitInt, LitStr};
11
lit(s: &str) -> Lit12 fn lit(s: &str) -> Lit {
13 match TokenStream::from_str(s)
14 .unwrap()
15 .into_iter()
16 .next()
17 .unwrap()
18 {
19 TokenTree::Literal(lit) => Lit::new(lit),
20 _ => panic!(),
21 }
22 }
23
24 #[test]
strings()25 fn strings() {
26 fn test_string(s: &str, value: &str) {
27 match lit(s) {
28 Lit::Str(lit) => {
29 assert_eq!(lit.value(), value);
30 let again = lit.into_token_stream().to_string();
31 if again != s {
32 test_string(&again, value);
33 }
34 }
35 wrong => panic!("{:?}", wrong),
36 }
37 }
38
39 test_string("\"a\"", "a");
40 test_string("\"\\n\"", "\n");
41 test_string("\"\\r\"", "\r");
42 test_string("\"\\t\"", "\t");
43 test_string("\"\"", ""); // NOTE: This is an emoji
44 test_string("\"\\\"\"", "\"");
45 test_string("\"'\"", "'");
46 test_string("\"\"", "");
47 test_string("\"\\u{1F415}\"", "\u{1F415}");
48 test_string("\"\\u{1_2__3_}\"", "\u{123}");
49 test_string(
50 "\"contains\nnewlines\\\nescaped newlines\"",
51 "contains\nnewlinesescaped newlines",
52 );
53 test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
54 test_string("\"...\"q", "...");
55 test_string("r\"...\"q", "...");
56 test_string("r##\"...\"##q", "...");
57 }
58
59 #[test]
byte_strings()60 fn byte_strings() {
61 fn test_byte_string(s: &str, value: &[u8]) {
62 match lit(s) {
63 Lit::ByteStr(lit) => {
64 assert_eq!(lit.value(), value);
65 let again = lit.into_token_stream().to_string();
66 if again != s {
67 test_byte_string(&again, value);
68 }
69 }
70 wrong => panic!("{:?}", wrong),
71 }
72 }
73
74 test_byte_string("b\"a\"", b"a");
75 test_byte_string("b\"\\n\"", b"\n");
76 test_byte_string("b\"\\r\"", b"\r");
77 test_byte_string("b\"\\t\"", b"\t");
78 test_byte_string("b\"\\\"\"", b"\"");
79 test_byte_string("b\"'\"", b"'");
80 test_byte_string("b\"\"", b"");
81 test_byte_string(
82 "b\"contains\nnewlines\\\nescaped newlines\"",
83 b"contains\nnewlinesescaped newlines",
84 );
85 test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
86 test_byte_string("b\"...\"q", b"...");
87 test_byte_string("br\"...\"q", b"...");
88 test_byte_string("br##\"...\"##q", b"...");
89 }
90
91 #[test]
bytes()92 fn bytes() {
93 fn test_byte(s: &str, value: u8) {
94 match lit(s) {
95 Lit::Byte(lit) => {
96 assert_eq!(lit.value(), value);
97 let again = lit.into_token_stream().to_string();
98 assert_eq!(again, s);
99 }
100 wrong => panic!("{:?}", wrong),
101 }
102 }
103
104 test_byte("b'a'", b'a');
105 test_byte("b'\\n'", b'\n');
106 test_byte("b'\\r'", b'\r');
107 test_byte("b'\\t'", b'\t');
108 test_byte("b'\\''", b'\'');
109 test_byte("b'\"'", b'"');
110 test_byte("b'a'q", b'a');
111 }
112
113 #[test]
chars()114 fn chars() {
115 fn test_char(s: &str, value: char) {
116 match lit(s) {
117 Lit::Char(lit) => {
118 assert_eq!(lit.value(), value);
119 let again = lit.into_token_stream().to_string();
120 if again != s {
121 test_char(&again, value);
122 }
123 }
124 wrong => panic!("{:?}", wrong),
125 }
126 }
127
128 test_char("'a'", 'a');
129 test_char("'\\n'", '\n');
130 test_char("'\\r'", '\r');
131 test_char("'\\t'", '\t');
132 test_char("''", ''); // NOTE: This is an emoji
133 test_char("'\\''", '\'');
134 test_char("'\"'", '"');
135 test_char("'\\u{1F415}'", '\u{1F415}');
136 test_char("'a'q", 'a');
137 }
138
139 #[test]
ints()140 fn ints() {
141 fn test_int(s: &str, value: u64, suffix: &str) {
142 match lit(s) {
143 Lit::Int(lit) => {
144 assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
145 assert_eq!(lit.suffix(), suffix);
146 let again = lit.into_token_stream().to_string();
147 if again != s {
148 test_int(&again, value, suffix);
149 }
150 }
151 wrong => panic!("{:?}", wrong),
152 }
153 }
154
155 test_int("5", 5, "");
156 test_int("5u32", 5, "u32");
157 test_int("0E", 0, "E");
158 test_int("0ECMA", 0, "ECMA");
159 test_int("0o0A", 0, "A");
160 test_int("5_0", 50, "");
161 test_int("5_____0_____", 50, "");
162 test_int("0x7f", 127, "");
163 test_int("0x7F", 127, "");
164 test_int("0b1001", 9, "");
165 test_int("0o73", 59, "");
166 test_int("0x7Fu8", 127, "u8");
167 test_int("0b1001i8", 9, "i8");
168 test_int("0o73u32", 59, "u32");
169 test_int("0x__7___f_", 127, "");
170 test_int("0x__7___F_", 127, "");
171 test_int("0b_1_0__01", 9, "");
172 test_int("0o_7__3", 59, "");
173 test_int("0x_7F__u8", 127, "u8");
174 test_int("0b__10__0_1i8", 9, "i8");
175 test_int("0o__7__________________3u32", 59, "u32");
176 test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
177 }
178
179 #[test]
floats()180 fn floats() {
181 fn test_float(s: &str, value: f64, suffix: &str) {
182 match lit(s) {
183 Lit::Float(lit) => {
184 assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value);
185 assert_eq!(lit.suffix(), suffix);
186 let again = lit.into_token_stream().to_string();
187 if again != s {
188 test_float(&again, value, suffix);
189 }
190 }
191 wrong => panic!("{:?}", wrong),
192 }
193 }
194
195 test_float("5.5", 5.5, "");
196 test_float("5.5E12", 5.5e12, "");
197 test_float("5.5e12", 5.5e12, "");
198 test_float("1.0__3e-12", 1.03e-12, "");
199 test_float("1.03e+12", 1.03e12, "");
200 test_float("9e99e99", 9e99, "e99");
201 test_float("1e_0", 1.0, "");
202 test_float("0.0ECMA", 0.0, "ECMA");
203 }
204
205 #[test]
negative()206 fn negative() {
207 let span = Span::call_site();
208 assert_eq!("-1", LitInt::new("-1", span).to_string());
209 assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
210 assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
211 assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
212 assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
213 assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
214 assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
215 assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
216 }
217
218 #[test]
suffix()219 fn suffix() {
220 fn get_suffix(token: &str) -> String {
221 let lit = syn::parse_str::<Lit>(token).unwrap();
222 match lit {
223 Lit::Str(lit) => lit.suffix().to_owned(),
224 Lit::ByteStr(lit) => lit.suffix().to_owned(),
225 Lit::Byte(lit) => lit.suffix().to_owned(),
226 Lit::Char(lit) => lit.suffix().to_owned(),
227 Lit::Int(lit) => lit.suffix().to_owned(),
228 Lit::Float(lit) => lit.suffix().to_owned(),
229 _ => unimplemented!(),
230 }
231 }
232
233 assert_eq!(get_suffix("\"\"s"), "s");
234 assert_eq!(get_suffix("r\"\"r"), "r");
235 assert_eq!(get_suffix("b\"\"b"), "b");
236 assert_eq!(get_suffix("br\"\"br"), "br");
237 assert_eq!(get_suffix("r#\"\"#r"), "r");
238 assert_eq!(get_suffix("'c'c"), "c");
239 assert_eq!(get_suffix("b'b'b"), "b");
240 assert_eq!(get_suffix("1i32"), "i32");
241 assert_eq!(get_suffix("1_i32"), "i32");
242 assert_eq!(get_suffix("1.0f32"), "f32");
243 assert_eq!(get_suffix("1.0_f32"), "f32");
244 }
245
246 #[test]
test_deep_group_empty()247 fn test_deep_group_empty() {
248 let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
249 Delimiter::None,
250 TokenStream::from_iter(vec![TokenTree::Group(Group::new(
251 Delimiter::None,
252 TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
253 ))]),
254 ))]);
255
256 snapshot!(tokens as Lit, @r#""hi""# );
257 }
258
259 #[test]
test_error()260 fn test_error() {
261 let err = syn::parse_str::<LitStr>("...").unwrap_err();
262 assert_eq!("expected string literal", err.to_string());
263
264 let err = syn::parse_str::<LitStr>("5").unwrap_err();
265 assert_eq!("expected string literal", err.to_string());
266 }
267