1 | //! Private implementation details of `xshell`. |
2 | |
3 | #![deny (missing_debug_implementations)] |
4 | #![deny (rust_2018_idioms)] |
5 | |
6 | use std::iter; |
7 | |
8 | use proc_macro::{Delimiter, Group, Literal, Span, TokenStream, TokenTree}; |
9 | |
10 | #[doc (hidden)] |
11 | #[proc_macro ] |
12 | pub fn __cmd(macro_arg: TokenStream) -> TokenStream { |
13 | try_cmd(macro_arg).unwrap_or_else(|msg: String| parse_ts(&format!("compile_error!( {:?})" , msg))) |
14 | } |
15 | |
16 | type Result<T> = std::result::Result<T, String>; |
17 | |
18 | fn try_cmd(macro_arg: TokenStream) -> Result<TokenStream> { |
19 | let (cmd, literal) = { |
20 | let mut iter = macro_arg.into_iter(); |
21 | let cmd = iter.next().unwrap(); |
22 | let literal = iter.next().unwrap(); |
23 | assert!(iter.next().is_none()); |
24 | (cmd, literal) |
25 | }; |
26 | |
27 | let literal = match into_literal(&literal) { |
28 | Some(it) => it, |
29 | None => return Err("expected a plain string literal" .to_string()), |
30 | }; |
31 | |
32 | let literal_text = literal.to_string(); |
33 | if !literal_text.starts_with('"' ) { |
34 | return Err("expected a plain string literal" .to_string()); |
35 | } |
36 | |
37 | let mut args = shell_lex(literal_text.as_str(), literal.span()); |
38 | |
39 | let mut res = TokenStream::new(); |
40 | |
41 | { |
42 | let (_joined_to_prev, splat, program) = |
43 | args.next().ok_or_else(|| "command can't be empty" .to_string())??; |
44 | if splat { |
45 | return Err("can't splat program name" .to_string()); |
46 | } |
47 | res.extend(Some(cmd)); |
48 | res.extend(program); |
49 | } |
50 | |
51 | let mut prev_spat = false; |
52 | for arg in args { |
53 | let (joined_to_prev, splat, arg) = arg?; |
54 | if prev_spat && joined_to_prev { |
55 | return Err(format!( |
56 | "can't combine splat with concatenation, add spaces around ` {{{}... }}`" , |
57 | trim_decorations(&res.into_iter().last().unwrap().to_string()), |
58 | )); |
59 | } |
60 | prev_spat = splat; |
61 | |
62 | let method = match (joined_to_prev, splat) { |
63 | (false, false) => ".arg" , |
64 | (false, true) => ".args" , |
65 | (true, false) => ".__extend_arg" , |
66 | (true, true) => { |
67 | return Err(format!( |
68 | "can't combine splat with concatenation, add spaces around ` {{{}... }}`" , |
69 | trim_decorations(&arg.to_string()), |
70 | )) |
71 | } |
72 | }; |
73 | |
74 | res.extend(parse_ts(method)); |
75 | res.extend(arg); |
76 | } |
77 | |
78 | Ok(res) |
79 | } |
80 | |
81 | fn into_literal(ts: &TokenTree) -> Option<Literal> { |
82 | match ts { |
83 | TokenTree::Literal(l: &Literal) => Some(l.clone()), |
84 | TokenTree::Group(g: &Group) => match g.delimiter() { |
85 | Delimiter::None => match g.stream().into_iter().collect::<Vec<_>>().as_slice() { |
86 | [TokenTree::Literal(l: &Literal)] => Some(l.clone()), |
87 | _ => None, |
88 | }, |
89 | Delimiter::Parenthesis | Delimiter::Brace | Delimiter::Bracket => None, |
90 | }, |
91 | _ => None, |
92 | } |
93 | } |
94 | |
95 | fn trim_decorations(s: &str) -> &str { |
96 | &s[1..s.len() - 1] |
97 | } |
98 | |
99 | fn shell_lex( |
100 | cmd: &str, |
101 | call_site: Span, |
102 | ) -> impl Iterator<Item = Result<(bool, bool, TokenStream)>> + '_ { |
103 | tokenize(cmd).map(move |token: Result, String>| { |
104 | let token: Token<'_> = token?; |
105 | let mut splat: bool = false; |
106 | let ts: TokenStream = match token.kind { |
107 | TokenKind::Word => parse_ts(&format!("( \"{}\")" , token.text)), |
108 | TokenKind::String => parse_ts(&format!("( \"{}\")" , trim_decorations(token.text))), |
109 | TokenKind::Interpolation { splat: s: bool } => { |
110 | splat = s; |
111 | let text: &str = trim_decorations(token.text); |
112 | let text: &str = &text[..text.len() - (if splat { "..." .len() } else { 0 })]; |
113 | if !(text.chars().all(|c: char| c.is_ascii_alphanumeric() || c == '_' )) { |
114 | return Err(format!( |
115 | "can only interpolate simple variables, got this expression instead: ` {}`" , |
116 | text |
117 | )); |
118 | } |
119 | let ts: String = if splat { format!("( {})" , text) } else { format!("(&( {}))" , text) }; |
120 | respan(parse_ts(&ts), span:call_site) |
121 | } |
122 | }; |
123 | Ok((token.joined_to_prev, splat, ts)) |
124 | }) |
125 | } |
126 | |
127 | /// Like trim_matches except only trims a maximum of 1 match |
128 | fn strip_matches<'a>(s: &'a str, pattern: &str) -> &'a str { |
129 | s.strip_prefix(pattern).unwrap_or(s).strip_suffix(pattern).unwrap_or(default:s) |
130 | } |
131 | |
132 | fn tokenize(cmd: &str) -> impl Iterator<Item = Result<Token<'_>>> + '_ { |
133 | let mut cmd: &str = strip_matches(s:cmd, pattern:" \"" ); |
134 | |
135 | iter::from_fn(move || { |
136 | let old_len: usize = cmd.len(); |
137 | cmd = cmd.trim_start(); |
138 | let joined_to_prev: bool = old_len == cmd.len(); |
139 | if cmd.is_empty() { |
140 | return None; |
141 | } |
142 | let (len: usize, kind: TokenKind) = match next_token(cmd) { |
143 | Ok(it: (usize, TokenKind)) => it, |
144 | Err(err: String) => { |
145 | cmd = "" ; |
146 | return Some(Err(err)); |
147 | } |
148 | }; |
149 | let token: Token<'_> = Token { joined_to_prev, text: &cmd[..len], kind }; |
150 | cmd = &cmd[len..]; |
151 | Some(Ok(token)) |
152 | }) |
153 | } |
154 | |
155 | #[derive (Debug)] |
156 | struct Token<'a> { |
157 | joined_to_prev: bool, |
158 | text: &'a str, |
159 | kind: TokenKind, |
160 | } |
161 | #[derive (Debug)] |
162 | enum TokenKind { |
163 | Word, |
164 | String, |
165 | Interpolation { splat: bool }, |
166 | } |
167 | |
168 | fn next_token(s: &str) -> Result<(usize, TokenKind)> { |
169 | if s.starts_with('{' ) { |
170 | let len: usize = s.find('}' ).ok_or_else(|| "unclosed `{` in command" .to_string())? + 1; |
171 | let splat: bool = s[..len].ends_with("...}" ); |
172 | return Ok((len, TokenKind::Interpolation { splat })); |
173 | } |
174 | if s.starts_with(' \'' ) { |
175 | let len: usize = s[1..].find(' \'' ).ok_or_else(|| "unclosed `'` in command" .to_string())? + 2; |
176 | return Ok((len, TokenKind::String)); |
177 | } |
178 | let len: usize = |
179 | s.find(|it: char| it.is_ascii_whitespace() || it == ' \'' || it == '{' ).unwrap_or(default:s.len()); |
180 | Ok((len, TokenKind::Word)) |
181 | } |
182 | |
183 | fn respan(ts: TokenStream, span: Span) -> TokenStream { |
184 | let mut res: TokenStream = TokenStream::new(); |
185 | for tt: TokenTree in ts { |
186 | let tt: TokenTree = match tt { |
187 | TokenTree::Ident(mut ident: Ident) => { |
188 | ident.set_span(ident.span().resolved_at(span).located_at(span)); |
189 | TokenTree::Ident(ident) |
190 | } |
191 | TokenTree::Group(group: Group) => { |
192 | TokenTree::Group(Group::new(group.delimiter(), stream:respan(ts:group.stream(), span))) |
193 | } |
194 | _ => tt, |
195 | }; |
196 | res.extend(iter:Some(tt)) |
197 | } |
198 | res |
199 | } |
200 | |
201 | fn parse_ts(s: &str) -> TokenStream { |
202 | s.parse().unwrap() |
203 | } |
204 | |