1 | #![allow (clippy::assertions_on_result_states)] |
2 | |
3 | use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; |
4 | |
5 | // #[doc = "..."] -> "..." |
6 | fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal { |
7 | lit_of_doc_comment(tokens, false) |
8 | } |
9 | |
10 | // #![doc = "..."] -> "..." |
11 | fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal { |
12 | lit_of_doc_comment(tokens, true) |
13 | } |
14 | |
15 | fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal { |
16 | let mut iter = tokens.clone().into_iter(); |
17 | match iter.next().unwrap() { |
18 | TokenTree::Punct(punct) => { |
19 | assert_eq!(punct.as_char(), '#' ); |
20 | assert_eq!(punct.spacing(), Spacing::Alone); |
21 | } |
22 | _ => panic!("wrong token {:?}" , tokens), |
23 | } |
24 | if inner { |
25 | match iter.next().unwrap() { |
26 | TokenTree::Punct(punct) => { |
27 | assert_eq!(punct.as_char(), '!' ); |
28 | assert_eq!(punct.spacing(), Spacing::Alone); |
29 | } |
30 | _ => panic!("wrong token {:?}" , tokens), |
31 | } |
32 | } |
33 | iter = match iter.next().unwrap() { |
34 | TokenTree::Group(group) => { |
35 | assert_eq!(group.delimiter(), Delimiter::Bracket); |
36 | assert!(iter.next().is_none(), "unexpected token {:?}" , tokens); |
37 | group.stream().into_iter() |
38 | } |
39 | _ => panic!("wrong token {:?}" , tokens), |
40 | }; |
41 | match iter.next().unwrap() { |
42 | TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc" ), |
43 | _ => panic!("wrong token {:?}" , tokens), |
44 | } |
45 | match iter.next().unwrap() { |
46 | TokenTree::Punct(punct) => { |
47 | assert_eq!(punct.as_char(), '=' ); |
48 | assert_eq!(punct.spacing(), Spacing::Alone); |
49 | } |
50 | _ => panic!("wrong token {:?}" , tokens), |
51 | } |
52 | match iter.next().unwrap() { |
53 | TokenTree::Literal(literal) => { |
54 | assert!(iter.next().is_none(), "unexpected token {:?}" , tokens); |
55 | literal |
56 | } |
57 | _ => panic!("wrong token {:?}" , tokens), |
58 | } |
59 | } |
60 | |
61 | #[test] |
62 | fn closed_immediately() { |
63 | let stream = "/**/" .parse::<TokenStream>().unwrap(); |
64 | let tokens = stream.into_iter().collect::<Vec<_>>(); |
65 | assert!(tokens.is_empty(), "not empty -- {:?}" , tokens); |
66 | } |
67 | |
68 | #[test] |
69 | fn incomplete() { |
70 | assert!("/*/" .parse::<TokenStream>().is_err()); |
71 | } |
72 | |
73 | #[test] |
74 | fn lit() { |
75 | let stream = "/// doc" .parse::<TokenStream>().unwrap(); |
76 | let lit = lit_of_outer_doc_comment(&stream); |
77 | assert_eq!(lit.to_string(), " \" doc \"" ); |
78 | |
79 | let stream = "//! doc" .parse::<TokenStream>().unwrap(); |
80 | let lit = lit_of_inner_doc_comment(&stream); |
81 | assert_eq!(lit.to_string(), " \" doc \"" ); |
82 | |
83 | let stream = "/** doc */" .parse::<TokenStream>().unwrap(); |
84 | let lit = lit_of_outer_doc_comment(&stream); |
85 | assert_eq!(lit.to_string(), " \" doc \"" ); |
86 | |
87 | let stream = "/*! doc */" .parse::<TokenStream>().unwrap(); |
88 | let lit = lit_of_inner_doc_comment(&stream); |
89 | assert_eq!(lit.to_string(), " \" doc \"" ); |
90 | } |
91 | |
92 | #[test] |
93 | fn carriage_return() { |
94 | let stream = "/// \r\n" .parse::<TokenStream>().unwrap(); |
95 | let lit = lit_of_outer_doc_comment(&stream); |
96 | assert_eq!(lit.to_string(), " \"\"" ); |
97 | |
98 | let stream = "/** \r\n*/" .parse::<TokenStream>().unwrap(); |
99 | let lit = lit_of_outer_doc_comment(&stream); |
100 | assert_eq!(lit.to_string(), " \"\\r \\n \"" ); |
101 | |
102 | "/// \r" .parse::<TokenStream>().unwrap_err(); |
103 | "/// \r \n" .parse::<TokenStream>().unwrap_err(); |
104 | "/** \r \n*/" .parse::<TokenStream>().unwrap_err(); |
105 | } |
106 | |