1// Copyright © SixtyFPS GmbH <info@slint.dev>
2// SPDX-License-Identifier: GPL-3.0-only OR LicenseRef-Slint-Royalty-free-1.1 OR LicenseRef-Slint-commercial
3
4// cSpell:ignore punct
5
6#![doc = include_str!("README.md")]
7#![doc(html_logo_url = "https://slint.dev/logo/slint-logo-square-light.svg")]
8
9extern crate proc_macro;
10
11use i_slint_compiler::diagnostics::BuildDiagnostics;
12use i_slint_compiler::parser::SyntaxKind;
13use i_slint_compiler::*;
14use proc_macro::{Spacing, TokenStream, TokenTree};
15use quote::quote;
16
17/// Returns true if the two token are touching. For example the two token `foo`and `-` are touching if
18/// it was written like so in the source code: `foo-` but not when written like so `foo -`
19///
20/// Returns None if we couldn't detect whether they are touching (eg, our heuristics don't work with rust-analyzer)
21fn are_token_touching(token1: proc_macro::Span, token2: proc_macro::Span) -> Option<bool> {
22 // There is no way with stable API to find out if the token are touching, so do it by
23 // extracting the range from the debug representation of the span
24 are_token_touching_impl(&format!("{token1:?}"), &format!("{token2:?}"))
25}
26
27fn are_token_touching_impl(token1_debug: &str, token2_debug: &str) -> Option<bool> {
28 // The debug representation of a span look like this: "#0 bytes(6662789..6662794)"
29 // we just have to find out if the first number of the range of second span
30 // is the same as the second number of the first span
31 let is_byte_char: impl Fn(char) -> bool = |c: char| c.is_numeric() || c == ':';
32 let not_is_byte_char: impl Fn(char) -> bool = |c: char| !is_byte_char(c);
33 let end_of_token1: &str = token1_debug&str
34 .trim_end_matches(not_is_byte_char)
35 .rsplit(not_is_byte_char)
36 .next()?
37 .trim_matches(':');
38 let begin_of_token2: &str = token2_debug&str
39 .trim_end_matches(not_is_byte_char)
40 .strip_suffix(is_byte_char)?
41 .trim_end_matches(is_byte_char)
42 .trim_end_matches(not_is_byte_char)
43 .rsplit(not_is_byte_char)
44 .next()?
45 .trim_matches(':');
46 (!begin_of_token2.is_empty()).then_some(end_of_token1 == begin_of_token2)
47}
48
49#[test]
50fn are_token_touching_impl_test() {
51 assert!(are_token_touching_impl("#0 bytes(6662788..6662789)", "#0 bytes(6662789..6662794)")
52 .unwrap());
53 assert!(!are_token_touching_impl("#0 bytes(6662788..6662789)", "#0 bytes(6662790..6662794)")
54 .unwrap());
55 assert!(!are_token_touching_impl("#0 bytes(6662789..6662794)", "#0 bytes(6662788..6662789)")
56 .unwrap());
57 assert!(
58 !are_token_touching_impl("#0 bytes(6662788..6662789)", "#0 bytes(662789..662794)").unwrap()
59 );
60 assert!(are_token_touching_impl("#0 bytes(123..456)", "#0 bytes(456..789)").unwrap());
61
62 // Alternative representation on nightly with a special flag
63 assert!(are_token_touching_impl("/foo/bar.rs:12:7: 12:18", "/foo/bar.rs:12:18: 12:19").unwrap());
64 assert!(are_token_touching_impl("/foo/bar.rs:2:7: 13:18", "/foo/bar.rs:13:18: 14:29").unwrap());
65 assert!(!are_token_touching_impl("/foo/bar.rs:2:7: 13:18", "/foo/bar.rs:14:18: 14:29").unwrap());
66 assert!(!are_token_touching_impl("/foo/bar.rs:2:7: 2:8", "/foo/bar.rs:2:18: 2:29").unwrap());
67
68 // What happens if the representation change
69 assert!(are_token_touching_impl("hello", "hello").is_none());
70 assert!(are_token_touching_impl("hello42", "hello42").is_none());
71
72 // rust-analyzer just has indices that means nothing
73 assert!(are_token_touching_impl("55", "56").is_none());
74}
75
76fn fill_token_vec(stream: impl Iterator<Item = TokenTree>, vec: &mut Vec<parser::Token>) {
77 let mut prev_spacing = Spacing::Alone;
78 let mut prev_span = proc_macro::Span::call_site();
79 for t in stream {
80 let span = t.span();
81 match t {
82 TokenTree::Ident(i) => {
83 if let Some(last) = vec.last_mut() {
84 if (last.kind == SyntaxKind::ColorLiteral && last.text.len() == 1)
85 || (last.kind == SyntaxKind::Identifier
86 && are_token_touching(prev_span, span)
87 .unwrap_or_else(|| last.text.ends_with('-')))
88 {
89 last.text = format!("{}{}", last.text, i).into();
90 prev_span = span;
91 continue;
92 }
93 }
94 vec.push(parser::Token {
95 kind: SyntaxKind::Identifier,
96 text: i.to_string().into(),
97 span: Some(i.span()),
98 ..Default::default()
99 });
100 }
101 TokenTree::Punct(p) => {
102 let kind = match p.as_char() {
103 ':' => SyntaxKind::Colon,
104 '=' => {
105 if let Some(last) = vec.last_mut() {
106 let kt = match last.kind {
107 SyntaxKind::Star => Some((SyntaxKind::StarEqual, "*=")),
108 SyntaxKind::Colon => Some((SyntaxKind::ColonEqual, ":=")),
109 SyntaxKind::Plus => Some((SyntaxKind::PlusEqual, "+=")),
110 SyntaxKind::Minus => Some((SyntaxKind::MinusEqual, "-=")),
111 SyntaxKind::Div => Some((SyntaxKind::DivEqual, "/=")),
112 SyntaxKind::LAngle => Some((SyntaxKind::LessEqual, "<=")),
113 SyntaxKind::RAngle => Some((SyntaxKind::GreaterEqual, ">=")),
114 SyntaxKind::Equal => Some((SyntaxKind::EqualEqual, "==")),
115 SyntaxKind::Bang => Some((SyntaxKind::NotEqual, "!=")),
116 _ => None,
117 };
118 if let Some((k, t)) = kt {
119 if prev_spacing == Spacing::Joint {
120 last.kind = k;
121 last.text = t.into();
122 continue;
123 }
124 }
125 }
126 SyntaxKind::Equal
127 }
128 ';' => SyntaxKind::Semicolon,
129 '!' => SyntaxKind::Bang,
130 '.' => SyntaxKind::Dot,
131 '+' => SyntaxKind::Plus,
132 '-' => {
133 if let Some(last) = vec.last_mut() {
134 if last.kind == SyntaxKind::Identifier
135 && are_token_touching(prev_span, p.span()).unwrap_or(true)
136 {
137 last.text = format!("{}-", last.text).into();
138 prev_span = span;
139 continue;
140 }
141 }
142 SyntaxKind::Minus
143 }
144 '*' => SyntaxKind::Star,
145 '/' => SyntaxKind::Div,
146 '<' => SyntaxKind::LAngle,
147 '>' => {
148 if let Some(last) = vec.last_mut() {
149 if last.kind == SyntaxKind::LessEqual && prev_spacing == Spacing::Joint
150 {
151 last.kind = SyntaxKind::DoubleArrow;
152 last.text = "<=>".into();
153 continue;
154 } else if last.kind == SyntaxKind::Equal
155 && prev_spacing == Spacing::Joint
156 {
157 last.kind = SyntaxKind::FatArrow;
158 last.text = "=>".into();
159 continue;
160 } else if last.kind == SyntaxKind::Minus
161 && prev_spacing == Spacing::Joint
162 {
163 last.kind = SyntaxKind::Arrow;
164 last.text = "->".into();
165 continue;
166 }
167 }
168 SyntaxKind::RAngle
169 }
170 '#' => SyntaxKind::ColorLiteral,
171 '?' => SyntaxKind::Question,
172 ',' => SyntaxKind::Comma,
173 '&' => {
174 // Since the '&' alone does not exist or cannot be part of any other token that &&
175 // just consider it as '&&' and skip the joint ones. FIXME. do that properly
176 if let Some(last) = vec.last_mut() {
177 if last.kind == SyntaxKind::AndAnd && prev_spacing == Spacing::Joint {
178 continue;
179 }
180 }
181 SyntaxKind::AndAnd
182 }
183 '|' => {
184 // Since the '|' alone does not exist or cannot be part of any other token that ||
185 // just consider it as '||' and skip the joint ones.
186 if let Some(last) = vec.last_mut() {
187 if last.kind == SyntaxKind::Pipe && prev_spacing == Spacing::Joint {
188 last.kind = SyntaxKind::OrOr;
189 continue;
190 }
191 }
192 SyntaxKind::Pipe
193 }
194 '%' => {
195 // handle % as a unit
196 if let Some(last) = vec.last_mut() {
197 if last.kind == SyntaxKind::NumberLiteral {
198 last.text = format!("{}%", last.text).into();
199 continue;
200 }
201 }
202 SyntaxKind::Percent
203 }
204 '$' => SyntaxKind::Dollar,
205 '@' => SyntaxKind::At,
206 _ => SyntaxKind::Error,
207 };
208 prev_spacing = p.spacing();
209 vec.push(parser::Token {
210 kind,
211 text: p.to_string().into(),
212 span: Some(p.span()),
213 ..Default::default()
214 });
215 }
216 TokenTree::Literal(l) => {
217 let s = l.to_string();
218 // Why can't the rust API give me the type of the literal
219 let f = s.chars().next().unwrap();
220 let kind = if f == '"' {
221 SyntaxKind::StringLiteral
222 } else if f.is_ascii_digit() {
223 if let Some(last) = vec.last_mut() {
224 if (last.kind == SyntaxKind::ColorLiteral && last.text.len() == 1)
225 || (last.kind == SyntaxKind::Identifier
226 && are_token_touching(prev_span, span)
227 .unwrap_or_else(|| last.text.ends_with('-')))
228 {
229 last.text = format!("{}{}", last.text, s).into();
230 prev_span = span;
231 continue;
232 }
233 }
234 SyntaxKind::NumberLiteral
235 } else {
236 SyntaxKind::Error
237 };
238 vec.push(parser::Token {
239 kind,
240 text: s.into(),
241 span: Some(l.span()),
242 ..Default::default()
243 });
244 }
245 TokenTree::Group(g) => {
246 use proc_macro::Delimiter::*;
247 use SyntaxKind::*;
248 let (l, r, sl, sr) = match g.delimiter() {
249 Parenthesis => (LParent, RParent, "(", ")"),
250 Brace => (LBrace, RBrace, "{", "}"),
251 Bracket => (LBracket, RBracket, "[", "]"),
252 None => todo!(),
253 };
254 vec.push(parser::Token {
255 kind: l,
256 text: sl.into(),
257 span: Some(g.span()), // span_open is not stable
258 ..Default::default()
259 });
260 fill_token_vec(g.stream().into_iter(), vec);
261 vec.push(parser::Token {
262 kind: r,
263 text: sr.into(),
264 span: Some(g.span()), // span_clone is not stable
265 ..Default::default()
266 });
267 }
268 }
269 prev_span = span;
270 }
271}
272
273fn extract_path(literal: proc_macro::Literal) -> std::path::PathBuf {
274 let path_with_quotes: String = literal.to_string();
275 let path_with_quotes_stripped: &str = if let Some(p: &str) = path_with_quotes.strip_prefix('r') {
276 let hash_removed: &str = p.trim_matches('#');
277 hash_removed.strip_prefix('\"').unwrap().strip_suffix('\"').unwrap()
278 } else {
279 // FIXME: unescape
280 path_with_quotes.trim_matches('\"')
281 };
282 path_with_quotes_stripped.into()
283}
284
285fn extract_compiler_config(
286 mut stream: proc_macro::token_stream::IntoIter,
287 compiler_config: &mut CompilerConfiguration,
288) -> impl Iterator<Item = TokenTree> {
289 let mut remaining_stream;
290 loop {
291 remaining_stream = stream.clone();
292 match (stream.next(), stream.next()) {
293 (Some(TokenTree::Punct(p)), Some(TokenTree::Group(group)))
294 if p.as_char() == '#' && group.delimiter() == proc_macro::Delimiter::Bracket =>
295 {
296 let mut attr_stream = group.stream().into_iter();
297 match attr_stream.next() {
298 Some(TokenTree::Ident(include_ident))
299 if include_ident.to_string() == "include_path" =>
300 {
301 match (attr_stream.next(), attr_stream.next()) {
302 (
303 Some(TokenTree::Punct(equal_punct)),
304 Some(TokenTree::Literal(path)),
305 ) if equal_punct.as_char() == '=' => {
306 compiler_config.include_paths.push(extract_path(path));
307 }
308 _ => break,
309 }
310 }
311 Some(TokenTree::Ident(library_ident))
312 if library_ident.to_string() == "library_path" =>
313 {
314 match (attr_stream.next(), attr_stream.next(), attr_stream.next()) {
315 (
316 Some(TokenTree::Group(group)),
317 Some(TokenTree::Punct(equal_punct)),
318 Some(TokenTree::Literal(path)),
319 ) if group.delimiter() == proc_macro::Delimiter::Parenthesis
320 && equal_punct.as_char() == '=' =>
321 {
322 let library_name = group.stream().into_iter().next().unwrap();
323 compiler_config
324 .library_paths
325 .insert(library_name.to_string(), extract_path(path));
326 }
327 _ => break,
328 }
329 }
330 Some(TokenTree::Ident(style_ident)) if style_ident.to_string() == "style" => {
331 match (attr_stream.next(), attr_stream.next()) {
332 (
333 Some(TokenTree::Punct(equal_punct)),
334 Some(TokenTree::Literal(requested_style)),
335 ) if equal_punct.as_char() == '=' => {
336 compiler_config.style = requested_style
337 .to_string()
338 .strip_prefix('\"')
339 .unwrap()
340 .strip_suffix('\"')
341 .unwrap()
342 .to_string()
343 .into();
344 }
345 _ => break,
346 }
347 }
348 _ => break,
349 }
350 }
351 _ => break,
352 }
353 }
354 remaining_stream
355}
356
357/// This macro allows you to use the Slint design markup language inline in Rust code. Within the braces of the macro
358/// you can use place Slint code and the named exported components will be available for instantiation.
359///
360/// For the documentation about the syntax of the language, see
361#[doc = concat!("[The Slint Language Documentation](https://slint.dev/releases/", env!("CARGO_PKG_VERSION"), "/docs/slint)")]
362///
363/// When `import`ing `.slint` files or loading images with `@image-url`, the specified paths are relative to the
364/// the directory that contains Cargo.toml.
365///
366/// ### Limitations
367///
368/// Within `.slint` files, you can interpolate string literals using `\{...}` syntax.
369/// This is not possible in this macro as this wouldn't parse as a Rust string.
370#[proc_macro]
371pub fn slint(stream: TokenStream) -> TokenStream {
372 let token_iter = stream.into_iter();
373
374 let mut compiler_config =
375 CompilerConfiguration::new(i_slint_compiler::generator::OutputFormat::Rust);
376
377 let token_iter = extract_compiler_config(token_iter, &mut compiler_config);
378
379 let mut tokens = vec![];
380 fill_token_vec(token_iter, &mut tokens);
381
382 let source_file = if let Some(cargo_manifest) = std::env::var_os("CARGO_MANIFEST_DIR") {
383 let mut path: std::path::PathBuf = cargo_manifest.into();
384 path.push("Cargo.toml");
385 diagnostics::SourceFileInner::from_path_only(path)
386 } else {
387 diagnostics::SourceFileInner::from_path_only(Default::default())
388 };
389 let mut diag = BuildDiagnostics::default();
390 let syntax_node = parser::parse_tokens(tokens.clone(), source_file, &mut diag);
391 if diag.has_error() {
392 return diag.report_macro_diagnostic(&tokens);
393 }
394
395 //println!("{:#?}", syntax_node);
396 compiler_config.translation_domain = std::env::var("CARGO_PKG_NAME").ok();
397 let (root_component, diag, _) =
398 spin_on::spin_on(compile_syntax_node(syntax_node, diag, compiler_config));
399 //println!("{:#?}", tree);
400 if diag.has_error() {
401 return diag.report_macro_diagnostic(&tokens);
402 }
403
404 let mut result = generator::rust::generate(&root_component);
405
406 // Make sure to recompile if any of the external files changes
407 let reload = diag
408 .all_loaded_files
409 .iter()
410 .filter(|path| path.is_absolute() && !path.ends_with("Cargo.toml"))
411 .filter_map(|p| p.to_str())
412 .map(|p| quote! {const _ : &'static [u8] = ::core::include_bytes!(#p);});
413
414 result.extend(reload);
415 result.extend(quote! {const _ : ::core::option::Option<&'static str> = ::core::option_env!("SLINT_STYLE");});
416
417 let mut result = TokenStream::from(result);
418 if !diag.is_empty() {
419 result.extend(diag.report_macro_diagnostic(&tokens));
420 }
421 result
422}
423