1#![allow(clippy::float_cmp, clippy::non_ascii_literal)]
2
3#[macro_use]
4mod macros;
5
6use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
7use quote::ToTokens;
8use std::iter::FromIterator;
9use std::str::FromStr;
10use syn::{Lit, LitFloat, LitInt, LitStr};
11
12fn lit(s: &str) -> Lit {
13 match TokenStream::from_str(s)
14 .unwrap()
15 .into_iter()
16 .next()
17 .unwrap()
18 {
19 TokenTree::Literal(lit) => Lit::new(lit),
20 _ => panic!(),
21 }
22}
23
24#[test]
25fn strings() {
26 fn test_string(s: &str, value: &str) {
27 match lit(s) {
28 Lit::Str(lit) => {
29 assert_eq!(lit.value(), value);
30 let again = lit.into_token_stream().to_string();
31 if again != s {
32 test_string(&again, value);
33 }
34 }
35 wrong => panic!("{:?}", wrong),
36 }
37 }
38
39 test_string("\"a\"", "a");
40 test_string("\"\\n\"", "\n");
41 test_string("\"\\r\"", "\r");
42 test_string("\"\\t\"", "\t");
43 test_string("\"🐕\"", "🐕"); // NOTE: This is an emoji
44 test_string("\"\\\"\"", "\"");
45 test_string("\"'\"", "'");
46 test_string("\"\"", "");
47 test_string("\"\\u{1F415}\"", "\u{1F415}");
48 test_string("\"\\u{1_2__3_}\"", "\u{123}");
49 test_string(
50 "\"contains\nnewlines\\\nescaped newlines\"",
51 "contains\nnewlinesescaped newlines",
52 );
53 test_string(
54 "\"escaped newline\\\n \x0C unsupported whitespace\"",
55 "escaped newline\x0C unsupported whitespace",
56 );
57 test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
58 test_string("\"...\"q", "...");
59 test_string("r\"...\"q", "...");
60 test_string("r##\"...\"##q", "...");
61}
62
63#[test]
64fn byte_strings() {
65 fn test_byte_string(s: &str, value: &[u8]) {
66 match lit(s) {
67 Lit::ByteStr(lit) => {
68 assert_eq!(lit.value(), value);
69 let again = lit.into_token_stream().to_string();
70 if again != s {
71 test_byte_string(&again, value);
72 }
73 }
74 wrong => panic!("{:?}", wrong),
75 }
76 }
77
78 test_byte_string("b\"a\"", b"a");
79 test_byte_string("b\"\\n\"", b"\n");
80 test_byte_string("b\"\\r\"", b"\r");
81 test_byte_string("b\"\\t\"", b"\t");
82 test_byte_string("b\"\\\"\"", b"\"");
83 test_byte_string("b\"'\"", b"'");
84 test_byte_string("b\"\"", b"");
85 test_byte_string(
86 "b\"contains\nnewlines\\\nescaped newlines\"",
87 b"contains\nnewlinesescaped newlines",
88 );
89 test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
90 test_byte_string("b\"...\"q", b"...");
91 test_byte_string("br\"...\"q", b"...");
92 test_byte_string("br##\"...\"##q", b"...");
93}
94
95#[test]
96fn bytes() {
97 fn test_byte(s: &str, value: u8) {
98 match lit(s) {
99 Lit::Byte(lit) => {
100 assert_eq!(lit.value(), value);
101 let again = lit.into_token_stream().to_string();
102 assert_eq!(again, s);
103 }
104 wrong => panic!("{:?}", wrong),
105 }
106 }
107
108 test_byte("b'a'", b'a');
109 test_byte("b'\\n'", b'\n');
110 test_byte("b'\\r'", b'\r');
111 test_byte("b'\\t'", b'\t');
112 test_byte("b'\\''", b'\'');
113 test_byte("b'\"'", b'"');
114 test_byte("b'a'q", b'a');
115}
116
117#[test]
118fn chars() {
119 fn test_char(s: &str, value: char) {
120 match lit(s) {
121 Lit::Char(lit) => {
122 assert_eq!(lit.value(), value);
123 let again = lit.into_token_stream().to_string();
124 if again != s {
125 test_char(&again, value);
126 }
127 }
128 wrong => panic!("{:?}", wrong),
129 }
130 }
131
132 test_char("'a'", 'a');
133 test_char("'\\n'", '\n');
134 test_char("'\\r'", '\r');
135 test_char("'\\t'", '\t');
136 test_char("'🐕'", '🐕'); // NOTE: This is an emoji
137 test_char("'\\''", '\'');
138 test_char("'\"'", '"');
139 test_char("'\\u{1F415}'", '\u{1F415}');
140 test_char("'a'q", 'a');
141}
142
143#[test]
144fn ints() {
145 fn test_int(s: &str, value: u64, suffix: &str) {
146 match lit(s) {
147 Lit::Int(lit) => {
148 assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
149 assert_eq!(lit.suffix(), suffix);
150 let again = lit.into_token_stream().to_string();
151 if again != s {
152 test_int(&again, value, suffix);
153 }
154 }
155 wrong => panic!("{:?}", wrong),
156 }
157 }
158
159 test_int("5", 5, "");
160 test_int("5u32", 5, "u32");
161 test_int("0E", 0, "E");
162 test_int("0ECMA", 0, "ECMA");
163 test_int("0o0A", 0, "A");
164 test_int("5_0", 50, "");
165 test_int("5_____0_____", 50, "");
166 test_int("0x7f", 127, "");
167 test_int("0x7F", 127, "");
168 test_int("0b1001", 9, "");
169 test_int("0o73", 59, "");
170 test_int("0x7Fu8", 127, "u8");
171 test_int("0b1001i8", 9, "i8");
172 test_int("0o73u32", 59, "u32");
173 test_int("0x__7___f_", 127, "");
174 test_int("0x__7___F_", 127, "");
175 test_int("0b_1_0__01", 9, "");
176 test_int("0o_7__3", 59, "");
177 test_int("0x_7F__u8", 127, "u8");
178 test_int("0b__10__0_1i8", 9, "i8");
179 test_int("0o__7__________________3u32", 59, "u32");
180 test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
181}
182
183#[test]
184fn floats() {
185 fn test_float(s: &str, value: f64, suffix: &str) {
186 match lit(s) {
187 Lit::Float(lit) => {
188 assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value);
189 assert_eq!(lit.suffix(), suffix);
190 let again = lit.into_token_stream().to_string();
191 if again != s {
192 test_float(&again, value, suffix);
193 }
194 }
195 wrong => panic!("{:?}", wrong),
196 }
197 }
198
199 test_float("5.5", 5.5, "");
200 test_float("5.5E12", 5.5e12, "");
201 test_float("5.5e12", 5.5e12, "");
202 test_float("1.0__3e-12", 1.03e-12, "");
203 test_float("1.03e+12", 1.03e12, "");
204 test_float("9e99e99", 9e99, "e99");
205 test_float("1e_0", 1.0, "");
206 test_float("0.0ECMA", 0.0, "ECMA");
207}
208
209#[test]
210fn negative() {
211 let span = Span::call_site();
212 assert_eq!("-1", LitInt::new("-1", span).to_string());
213 assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
214 assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
215 assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
216 assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
217 assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
218 assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
219 assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
220}
221
222#[test]
223fn suffix() {
224 fn get_suffix(token: &str) -> String {
225 let lit = syn::parse_str::<Lit>(token).unwrap();
226 match lit {
227 Lit::Str(lit) => lit.suffix().to_owned(),
228 Lit::ByteStr(lit) => lit.suffix().to_owned(),
229 Lit::Byte(lit) => lit.suffix().to_owned(),
230 Lit::Char(lit) => lit.suffix().to_owned(),
231 Lit::Int(lit) => lit.suffix().to_owned(),
232 Lit::Float(lit) => lit.suffix().to_owned(),
233 _ => unimplemented!(),
234 }
235 }
236
237 assert_eq!(get_suffix("\"\"s"), "s");
238 assert_eq!(get_suffix("r\"\"r"), "r");
239 assert_eq!(get_suffix("b\"\"b"), "b");
240 assert_eq!(get_suffix("br\"\"br"), "br");
241 assert_eq!(get_suffix("r#\"\"#r"), "r");
242 assert_eq!(get_suffix("'c'c"), "c");
243 assert_eq!(get_suffix("b'b'b"), "b");
244 assert_eq!(get_suffix("1i32"), "i32");
245 assert_eq!(get_suffix("1_i32"), "i32");
246 assert_eq!(get_suffix("1.0f32"), "f32");
247 assert_eq!(get_suffix("1.0_f32"), "f32");
248}
249
250#[test]
251fn test_deep_group_empty() {
252 let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
253 Delimiter::None,
254 TokenStream::from_iter(vec![TokenTree::Group(Group::new(
255 Delimiter::None,
256 TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
257 ))]),
258 ))]);
259
260 snapshot!(tokens as Lit, @r#""hi""# );
261}
262
263#[test]
264fn test_error() {
265 let err = syn::parse_str::<LitStr>("...").unwrap_err();
266 assert_eq!("expected string literal", err.to_string());
267
268 let err = syn::parse_str::<LitStr>("5").unwrap_err();
269 assert_eq!("expected string literal", err.to_string());
270}
271