1 | // Copyright © SixtyFPS GmbH <info@sixtyfps.io> |
2 | // SPDX-License-Identifier: MIT OR Apache-2.0 |
3 | |
4 | /*! |
5 | Document your crate's feature flags. |
6 | |
7 | This crates provides a macro that extracts "documentation" comments from Cargo.toml |
8 | |
9 | To use this crate, add `#![doc = document_features::document_features!()]` in your crate documentation. |
10 | The `document_features!()` macro reads your `Cargo.toml` file, extracts feature comments and generates |
11 | a markdown string for your documentation. |
12 | |
13 | Basic example: |
14 | |
15 | ```rust |
16 | //! Normal crate documentation goes here. |
17 | //! |
18 | //! ## Feature flags |
19 | #![doc = document_features::document_features!()] |
20 | |
21 | // rest of the crate goes here. |
22 | ``` |
23 | |
24 | ## Documentation format: |
25 | |
26 | The documentation of your crate features goes into `Cargo.toml`, where they are defined. |
27 | |
28 | The `document_features!()` macro analyzes the contents of `Cargo.toml`. |
29 | Similar to Rust's documentation comments `///` and `//!`, the macro understands |
30 | comments that start with `## ` and `#! `. Note the required trailing space. |
31 | Lines starting with `###` will not be understood as doc comment. |
32 | |
33 | `## ` comments are meant to be *above* the feature they document. |
34 | There can be several `## ` comments, but they must always be followed by a |
35 | feature name or an optional dependency. |
36 | There should not be `#! ` comments between the comment and the feature they document. |
37 | |
38 | `#! ` comments are not associated with a particular feature, and will be printed |
39 | in where they occur. Use them to group features, for example. |
40 | |
41 | ## Examples: |
42 | |
43 | */ |
44 | #![doc = self_test!(/** |
45 | [package] |
46 | name = "..." |
47 | # ... |
48 | |
49 | [features] |
50 | default = ["foo"] |
51 | #! This comments goes on top |
52 | |
53 | ## The foo feature enables the `foo` functions |
54 | foo = [] |
55 | |
56 | ## The bar feature enables the bar module |
57 | bar = [] |
58 | |
59 | #! ### Experimental features |
60 | #! The following features are experimental |
61 | |
62 | ## Enable the fusion reactor |
63 | ## |
64 | ## ⚠️ Can lead to explosions |
65 | fusion = [] |
66 | |
67 | [dependencies] |
68 | document-features = "0.2" |
69 | |
70 | #! ### Optional dependencies |
71 | |
72 | ## Enable this feature to implement the trait for the types from the genial crate |
73 | genial = { version = "0.2", optional = true } |
74 | |
75 | ## This awesome dependency is specified in its own table |
76 | [dependencies.awesome] |
77 | version = "1.3.5" |
78 | optional = true |
79 | */ |
80 | => |
81 | /** |
82 | This comments goes on top |
83 | * **`foo`** *(enabled by default)* — The foo feature enables the `foo` functions |
84 | * **`bar`** — The bar feature enables the bar module |
85 | |
86 | #### Experimental features |
87 | The following features are experimental |
88 | * **`fusion`** — Enable the fusion reactor |
89 | |
90 | ⚠️ Can lead to explosions |
91 | |
92 | #### Optional dependencies |
93 | * **`genial`** — Enable this feature to implement the trait for the types from the genial crate |
94 | * **`awesome`** — This awesome dependency is specified in its own table |
95 | */ |
96 | )] |
97 | /*! |
98 | |
99 | ## Customization |
100 | |
101 | You can customize the formatting of the features in the generated documentation by setting |
102 | the key **`feature_label=`** to a given format string. This format string must be either |
103 | a [string literal](https://doc.rust-lang.org/reference/tokens.html#string-literals) or |
104 | a [raw string literal](https://doc.rust-lang.org/reference/tokens.html#raw-string-literals). |
105 | Every occurrence of `{feature}` inside the format string will be substituted with the name of the feature. |
106 | |
107 | For instance, to emulate the HTML formatting used by `rustdoc` one can use the following: |
108 | |
109 | ```rust |
110 | #![doc = document_features::document_features!(feature_label = r#"<span class="stab portability"><code>{feature}</code></span>"# )] |
111 | ``` |
112 | |
113 | The default formatting is equivalent to: |
114 | |
115 | ```rust |
116 | #![doc = document_features::document_features!(feature_label = "**`{feature}`**" )] |
117 | ``` |
118 | |
119 | ## Compatibility |
120 | |
121 | The minimum Rust version required to use this crate is Rust 1.54 because of the |
122 | feature to have macro in doc comments. You can make this crate optional and use |
123 | `#[cfg_attr()]` statements to enable it only when building the documentation: |
124 | You need to have two levels of `cfg_attr` because Rust < 1.54 doesn't parse the attribute |
125 | otherwise. |
126 | |
127 | ```rust,ignore |
128 | #![cfg_attr( |
129 | feature = "document-features" , |
130 | cfg_attr(doc, doc = ::document_features::document_features!()) |
131 | )] |
132 | ``` |
133 | |
134 | In your Cargo.toml, enable this feature while generating the documentation on docs.rs: |
135 | |
136 | ```toml |
137 | [dependencies] |
138 | document-features = { version = "0.2", optional = true } |
139 | |
140 | [package.metadata.docs.rs] |
141 | features = ["document-features"] |
142 | ## Alternative: enable all features so they are all documented |
143 | ## all-features = true |
144 | ``` |
145 | */ |
146 | |
147 | #[cfg (not(feature = "default" ))] |
148 | compile_error!( |
149 | "The feature `default` must be enabled to ensure \ |
150 | forward compatibility with future version of this crate" |
151 | ); |
152 | |
153 | extern crate proc_macro; |
154 | |
155 | use proc_macro::{TokenStream, TokenTree}; |
156 | use std::borrow::Cow; |
157 | use std::collections::{HashMap, HashSet}; |
158 | use std::convert::TryFrom; |
159 | use std::fmt::Write; |
160 | use std::path::Path; |
161 | use std::str::FromStr; |
162 | |
163 | fn error(e: &str) -> TokenStream { |
164 | TokenStream::from_str(&format!("::core::compile_error! {{\"{}\"}}" , e.escape_default())).unwrap() |
165 | } |
166 | |
167 | fn compile_error(msg: &str, tt: Option<TokenTree>) -> TokenStream { |
168 | let span: Span = tt.as_ref().map_or_else(default:proc_macro::Span::call_site, f:TokenTree::span); |
169 | use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing}; |
170 | use std::iter::FromIterator; |
171 | TokenStream::from_iter(vec![ |
172 | TokenTree::Ident(Ident::new("compile_error" , span)), |
173 | TokenTree::Punct({ |
174 | let mut punct = Punct::new('!' , Spacing::Alone); |
175 | punct.set_span(span); |
176 | punct |
177 | }), |
178 | TokenTree::Group({ |
179 | let mut group = Group::new(Delimiter::Brace, { |
180 | TokenStream::from_iter([TokenTree::Literal({ |
181 | let mut string = Literal::string(msg); |
182 | string.set_span(span); |
183 | string |
184 | })]) |
185 | }); |
186 | group.set_span(span); |
187 | group |
188 | }), |
189 | ]) |
190 | } |
191 | |
192 | #[derive (Default)] |
193 | struct Args { |
194 | feature_label: Option<String>, |
195 | } |
196 | |
197 | fn parse_args(input: TokenStream) -> Result<Args, TokenStream> { |
198 | let mut token_trees = input.into_iter().fuse(); |
199 | |
200 | // parse the key, ensuring that it is the identifier `feature_label` |
201 | match token_trees.next() { |
202 | None => return Ok(Args::default()), |
203 | Some(TokenTree::Ident(ident)) if ident.to_string() == "feature_label" => (), |
204 | tt => return Err(compile_error("expected `feature_label`" , tt)), |
205 | } |
206 | |
207 | // parse a single equal sign `=` |
208 | match token_trees.next() { |
209 | Some(TokenTree::Punct(p)) if p.as_char() == '=' => (), |
210 | tt => return Err(compile_error("expected `=`" , tt)), |
211 | } |
212 | |
213 | // parse the value, ensuring that it is a string literal containing the substring `"{feature}"` |
214 | let feature_label; |
215 | if let Some(tt) = token_trees.next() { |
216 | match litrs::StringLit::<String>::try_from(&tt) { |
217 | Ok(string_lit) if string_lit.value().contains("{feature}" ) => { |
218 | feature_label = string_lit.value().to_string() |
219 | } |
220 | _ => { |
221 | return Err(compile_error( |
222 | "expected a string literal containing the substring \"{feature} \"" , |
223 | Some(tt), |
224 | )) |
225 | } |
226 | } |
227 | } else { |
228 | return Err(compile_error( |
229 | "expected a string literal containing the substring \"{feature} \"" , |
230 | None, |
231 | )); |
232 | } |
233 | |
234 | // ensure there is nothing left after the format string |
235 | if let tt @ Some(_) = token_trees.next() { |
236 | return Err(compile_error("unexpected token after the format string" , tt)); |
237 | } |
238 | |
239 | Ok(Args { feature_label: Some(feature_label) }) |
240 | } |
241 | |
242 | /// Produce a literal string containing documentation extracted from Cargo.toml |
243 | /// |
244 | /// See the [crate] documentation for details |
245 | #[proc_macro ] |
246 | pub fn document_features(tokens: TokenStream) -> TokenStream { |
247 | parse_args(tokens) |
248 | .and_then(|args| document_features_impl(&args)) |
249 | .unwrap_or_else(op:std::convert::identity) |
250 | } |
251 | |
252 | fn document_features_impl(args: &Args) -> Result<TokenStream, TokenStream> { |
253 | let path: String = std::env::var(key:"CARGO_MANIFEST_DIR" ).unwrap(); |
254 | let mut cargo_toml: String = std::fs::read_to_string(Path::new(&path).join("Cargo.toml" )) |
255 | .map_err(|e: Error| error(&format!("Can't open Cargo.toml: {:?}" , e)))?; |
256 | |
257 | if !has_doc_comments(&cargo_toml) { |
258 | // On crates.io, Cargo.toml is usually "normalized" and stripped of all comments. |
259 | // The original Cargo.toml has been renamed Cargo.toml.orig |
260 | if let Ok(orig: String) = std::fs::read_to_string(path:Path::new(&path).join(path:"Cargo.toml.orig" )) { |
261 | if has_doc_comments(&orig) { |
262 | cargo_toml = orig; |
263 | } |
264 | } |
265 | } |
266 | |
267 | let result: String = process_toml(&cargo_toml, args).map_err(|e: String| error(&e))?; |
268 | Ok(std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&result))).collect()) |
269 | } |
270 | |
271 | /// Check if the Cargo.toml has comments that looks like doc comments. |
272 | fn has_doc_comments(cargo_toml: &str) -> bool { |
273 | let mut lines = cargo_toml.lines().map(str::trim); |
274 | while let Some(line) = lines.next() { |
275 | if line.starts_with("## " ) || line.starts_with("#! " ) { |
276 | return true; |
277 | } |
278 | let before_coment = line.split_once('#' ).map_or(line, |(before, _)| before); |
279 | if line.starts_with("#" ) { |
280 | continue; |
281 | } |
282 | if let Some((_, mut quote)) = before_coment.split_once(" \"\"\"" ) { |
283 | loop { |
284 | // skip slashes. |
285 | if let Some((_, s)) = quote.split_once(' \\' ) { |
286 | quote = s.strip_prefix(' \\' ).or_else(|| s.strip_prefix('"' )).unwrap_or(s); |
287 | continue; |
288 | } |
289 | // skip quotes. |
290 | if let Some((_, out_quote)) = quote.split_once(" \"\"\"" ) { |
291 | let out_quote = out_quote.trim_start_matches('"' ); |
292 | let out_quote = |
293 | out_quote.split_once('#' ).map_or(out_quote, |(before, _)| before); |
294 | if let Some((_, q)) = out_quote.split_once(" \"\"\"" ) { |
295 | quote = q; |
296 | continue; |
297 | } |
298 | break; |
299 | }; |
300 | match lines.next() { |
301 | Some(l) => quote = l, |
302 | None => return false, |
303 | } |
304 | } |
305 | } |
306 | } |
307 | false |
308 | } |
309 | |
310 | #[test ] |
311 | fn test_has_doc_coment() { |
312 | assert!(has_doc_comments("foo \nbar \n## comment \nddd" )); |
313 | assert!(!has_doc_comments("foo \nbar \n#comment \nddd" )); |
314 | assert!(!has_doc_comments( |
315 | r#" |
316 | [[package.metadata.release.pre-release-replacements]] |
317 | exactly = 1 # not a doc comment |
318 | file = "CHANGELOG.md" |
319 | replace = """ |
320 | <!-- next-header --> |
321 | ## [Unreleased] - ReleaseDate |
322 | """ |
323 | search = "<!-- next-header -->" |
324 | array = ["""foo""", """ |
325 | bar""", """eee |
326 | ## not a comment |
327 | """] |
328 | "# |
329 | )); |
330 | assert!(has_doc_comments( |
331 | r#" |
332 | [[package.metadata.release.pre-release-replacements]] |
333 | exactly = 1 # """ |
334 | file = "CHANGELOG.md" |
335 | replace = """ |
336 | <!-- next-header --> |
337 | ## [Unreleased] - ReleaseDate |
338 | """ |
339 | search = "<!-- next-header -->" |
340 | array = ["""foo""", """ |
341 | bar""", """eee |
342 | ## not a comment |
343 | """] |
344 | ## This is a comment |
345 | feature = "45" |
346 | "# |
347 | )); |
348 | |
349 | assert!(!has_doc_comments( |
350 | r#" |
351 | [[package.metadata.release.pre-release-replacements]] |
352 | value = """" string \""" |
353 | ## within the string |
354 | \"""" |
355 | another_string = """"" # """ |
356 | ## also within""" |
357 | "# |
358 | )); |
359 | |
360 | assert!(has_doc_comments( |
361 | r#" |
362 | [[package.metadata.release.pre-release-replacements]] |
363 | value = """" string \""" |
364 | ## within the string |
365 | \"""" |
366 | another_string = """"" # """ |
367 | ## also within""" |
368 | ## out of the string |
369 | foo = bar |
370 | "# |
371 | )); |
372 | } |
373 | |
374 | fn dependents( |
375 | feature_dependencies: &HashMap<String, Vec<String>>, |
376 | feature: &str, |
377 | collected: &mut HashSet<String>, |
378 | ) { |
379 | if collected.contains(feature) { |
380 | return; |
381 | } |
382 | collected.insert(feature.to_string()); |
383 | if let Some(dependencies: &Vec) = feature_dependencies.get(feature) { |
384 | for dependency: &String in dependencies { |
385 | dependents(feature_dependencies, feature:dependency, collected); |
386 | } |
387 | } |
388 | } |
389 | |
390 | fn parse_feature_deps<'a>( |
391 | s: &'a str, |
392 | dep: &str, |
393 | ) -> Result<impl Iterator<Item = String> + 'a, String> { |
394 | Ok(simpl Iterator .trim() |
395 | .strip_prefix('[' ) |
396 | .and_then(|r| r.strip_suffix(']' )) |
397 | .ok_or_else(|| format!("Parse error while parsing dependency {}" , dep))? |
398 | .split(',' ) |
399 | .map(|d: &str| d.trim().trim_matches(|c: char| c == '"' || c == ' \'' ).trim().to_string()) |
400 | .filter(|d: &String| !d.is_empty())) |
401 | } |
402 | |
403 | fn process_toml(cargo_toml: &str, args: &Args) -> Result<String, String> { |
404 | // Get all lines between the "[features]" and the next block |
405 | let mut lines = cargo_toml |
406 | .lines() |
407 | .map(str::trim) |
408 | // and skip empty lines and comments that are not docs comments |
409 | .filter(|l| { |
410 | !l.is_empty() && (!l.starts_with('#' ) || l.starts_with("##" ) || l.starts_with("#!" )) |
411 | }); |
412 | let mut top_comment = String::new(); |
413 | let mut current_comment = String::new(); |
414 | let mut features = vec![]; |
415 | let mut default_features = HashSet::new(); |
416 | let mut current_table = "" ; |
417 | let mut dependencies = HashMap::new(); |
418 | while let Some(line) = lines.next() { |
419 | if let Some(x) = line.strip_prefix("#!" ) { |
420 | if !x.is_empty() && !x.starts_with(' ' ) { |
421 | continue; // it's not a doc comment |
422 | } |
423 | if !current_comment.is_empty() { |
424 | return Err("Cannot mix ## and #! comments between features." .into()); |
425 | } |
426 | if top_comment.is_empty() && !features.is_empty() { |
427 | top_comment = " \n" .into(); |
428 | } |
429 | writeln!(top_comment, " {}" , x).unwrap(); |
430 | } else if let Some(x) = line.strip_prefix("##" ) { |
431 | if !x.is_empty() && !x.starts_with(' ' ) { |
432 | continue; // it's not a doc comment |
433 | } |
434 | writeln!(current_comment, " {}" , x).unwrap(); |
435 | } else if let Some(table) = line.strip_prefix('[' ) { |
436 | current_table = table |
437 | .split_once(']' ) |
438 | .map(|(t, _)| t.trim()) |
439 | .ok_or_else(|| format!("Parse error while parsing line: {}" , line))?; |
440 | if !current_comment.is_empty() { |
441 | #[allow (clippy::unnecessary_lazy_evaluations)] |
442 | let dep = current_table |
443 | .rsplit_once('.' ) |
444 | .and_then(|(table, dep)| table.trim().ends_with("dependencies" ).then(|| dep)) |
445 | .ok_or_else(|| format!("Not a feature: ` {}`" , line))?; |
446 | features.push(( |
447 | dep.trim(), |
448 | std::mem::take(&mut top_comment), |
449 | std::mem::take(&mut current_comment), |
450 | )); |
451 | } |
452 | } else if let Some((dep, rest)) = line.split_once('=' ) { |
453 | let dep = dep.trim().trim_matches('"' ); |
454 | let rest = get_balanced(rest, &mut lines) |
455 | .map_err(|e| format!("Parse error while parsing value {}: {}" , dep, e))?; |
456 | if current_table == "features" { |
457 | if dep == "default" { |
458 | default_features.extend(parse_feature_deps(&rest, dep)?); |
459 | } else { |
460 | for d in parse_feature_deps(&rest, dep)? { |
461 | dependencies |
462 | .entry(dep.to_string()) |
463 | .or_insert_with(Vec::new) |
464 | .push(d.clone()); |
465 | } |
466 | } |
467 | } |
468 | if !current_comment.is_empty() { |
469 | if current_table.ends_with("dependencies" ) { |
470 | if !rest |
471 | .split_once("optional" ) |
472 | .and_then(|(_, r)| r.trim().strip_prefix('=' )) |
473 | .map_or(false, |r| r.trim().starts_with("true" )) |
474 | { |
475 | return Err(format!("Dependency {} is not an optional dependency" , dep)); |
476 | } |
477 | } else if current_table != "features" { |
478 | return Err(format!( |
479 | r#"Comment cannot be associated with a feature: " {}""# , |
480 | current_comment.trim() |
481 | )); |
482 | } |
483 | features.push(( |
484 | dep, |
485 | std::mem::take(&mut top_comment), |
486 | std::mem::take(&mut current_comment), |
487 | )); |
488 | } |
489 | } |
490 | } |
491 | let df = default_features.iter().cloned().collect::<Vec<_>>(); |
492 | for feature in df { |
493 | let mut resolved = HashSet::new(); |
494 | dependents(&dependencies, &feature, &mut resolved); |
495 | default_features.extend(resolved.into_iter()); |
496 | } |
497 | if !current_comment.is_empty() { |
498 | return Err("Found comment not associated with a feature" .into()); |
499 | } |
500 | if features.is_empty() { |
501 | return Ok("*No documented features in Cargo.toml*" .into()); |
502 | } |
503 | let mut result = String::new(); |
504 | for (f, top, comment) in features { |
505 | let default = if default_features.contains(f) { " *(enabled by default)*" } else { "" }; |
506 | let feature_label = args.feature_label.as_deref().unwrap_or("**`{feature}`**" ); |
507 | let comment = if comment.trim().is_empty() { |
508 | String::new() |
509 | } else { |
510 | format!(" — {}" , comment.trim_end()) |
511 | }; |
512 | |
513 | writeln!( |
514 | result, |
515 | " {}* {}{}{}" , |
516 | top, |
517 | feature_label.replace("{feature}" , f), |
518 | default, |
519 | comment, |
520 | ) |
521 | .unwrap(); |
522 | } |
523 | result += &top_comment; |
524 | Ok(result) |
525 | } |
526 | |
527 | fn get_balanced<'a>( |
528 | first_line: &'a str, |
529 | lines: &mut impl Iterator<Item = &'a str>, |
530 | ) -> Result<Cow<'a, str>, String> { |
531 | let mut line = first_line; |
532 | let mut result = Cow::from("" ); |
533 | |
534 | let mut in_quote = false; |
535 | let mut level = 0; |
536 | loop { |
537 | let mut last_slash = false; |
538 | for (idx, b) in line.as_bytes().iter().enumerate() { |
539 | if last_slash { |
540 | last_slash = false |
541 | } else if in_quote { |
542 | match b { |
543 | b' \\' => last_slash = true, |
544 | b'"' | b' \'' => in_quote = false, |
545 | _ => (), |
546 | } |
547 | } else { |
548 | match b { |
549 | b' \\' => last_slash = true, |
550 | b'"' => in_quote = true, |
551 | b'{' | b'[' => level += 1, |
552 | b'}' | b']' if level == 0 => return Err("unbalanced source" .into()), |
553 | b'}' | b']' => level -= 1, |
554 | b'#' => { |
555 | line = &line[..idx]; |
556 | break; |
557 | } |
558 | _ => (), |
559 | } |
560 | } |
561 | } |
562 | if result.len() == 0 { |
563 | result = Cow::from(line); |
564 | } else { |
565 | *result.to_mut() += line; |
566 | } |
567 | if level == 0 { |
568 | return Ok(result); |
569 | } |
570 | line = if let Some(l) = lines.next() { |
571 | l |
572 | } else { |
573 | return Err("unbalanced source" .into()); |
574 | }; |
575 | } |
576 | } |
577 | |
578 | #[test ] |
579 | fn test_get_balanced() { |
580 | assert_eq!( |
581 | get_balanced( |
582 | "{" , |
583 | &mut IntoIterator::into_iter(["a" , "{ abc[], #ignore" , " def }" , "}" , "xxx" ]) |
584 | ), |
585 | Ok("{a{ abc[], def }}" .into()) |
586 | ); |
587 | assert_eq!( |
588 | get_balanced("{ foo = \"{# \" } #ignore" , &mut IntoIterator::into_iter(["xxx" ])), |
589 | Ok("{ foo = \"{# \" } " .into()) |
590 | ); |
591 | assert_eq!( |
592 | get_balanced("]" , &mut IntoIterator::into_iter(["[" ])), |
593 | Err("unbalanced source" .into()) |
594 | ); |
595 | } |
596 | |
597 | #[cfg (feature = "self-test" )] |
598 | #[proc_macro ] |
599 | #[doc (hidden)] |
600 | /// Helper macro for the tests. Do not use |
601 | pub fn self_test_helper(input: TokenStream) -> TokenStream { |
602 | let mut code = String::new(); |
603 | for line in (&input).to_string().trim_matches(|c| c == '"' || c == '#' ).lines() { |
604 | // Rustdoc removes the lines that starts with `# ` and removes one `#` from lines that starts with # followed by space. |
605 | // We need to re-add the `#` that was removed by rustdoc to get the original. |
606 | if line.strip_prefix('#' ).map_or(false, |x| x.is_empty() || x.starts_with(' ' )) { |
607 | code += "#" ; |
608 | } |
609 | code += line; |
610 | code += " \n" ; |
611 | } |
612 | process_toml(&code, &Args::default()).map_or_else( |
613 | |e| error(&e), |
614 | |r| std::iter::once(proc_macro::TokenTree::from(proc_macro::Literal::string(&r))).collect(), |
615 | ) |
616 | } |
617 | |
618 | #[cfg (feature = "self-test" )] |
619 | macro_rules! self_test { |
620 | (#[doc = $toml:literal] => #[doc = $md:literal]) => { |
621 | concat!( |
622 | " \n`````rust \n\ |
623 | fn normalize_md(md : &str) -> String { |
624 | md.lines().skip_while(|l| l.is_empty()).map(|l| l.trim()) |
625 | .collect::<Vec<_>>().join( \"\\n \") |
626 | } |
627 | assert_eq!(normalize_md(document_features::self_test_helper!(" , |
628 | stringify!($toml), |
629 | ")), normalize_md(" , |
630 | stringify!($md), |
631 | ")); \n````` \n\n" |
632 | ) |
633 | }; |
634 | } |
635 | |
636 | #[cfg (not(feature = "self-test" ))] |
637 | macro_rules! self_test { |
638 | (#[doc = $toml:literal] => #[doc = $md:literal]) => { |
639 | concat!( |
640 | "This contents in Cargo.toml: \n`````toml" , |
641 | $toml, |
642 | " \n````` \n Generates the following: \n\ |
643 | <table><tr><th>Preview</th></tr><tr><td> \n\n" , |
644 | $md, |
645 | " \n</td></tr></table> \n\n \n" , |
646 | ) |
647 | }; |
648 | } |
649 | |
650 | use self_test; |
651 | |
652 | // The following struct is inserted only during generation of the documentation in order to exploit doc-tests. |
653 | // These doc-tests are used to check that invalid arguments to the `document_features!` macro cause a compile time error. |
654 | // For a more principled way of testing compilation error, maybe investigate <https://docs.rs/trybuild>. |
655 | // |
656 | /// ```rust |
657 | /// #![doc = document_features::document_features!()] |
658 | /// #![doc = document_features::document_features!(feature_label = "**`{feature}`**")] |
659 | /// #![doc = document_features::document_features!(feature_label = r"**`{feature}`**")] |
660 | /// #![doc = document_features::document_features!(feature_label = r#"**`{feature}`**"#)] |
661 | /// #![doc = document_features::document_features!(feature_label = "<span class=\"stab portability\"><code>{feature}</code></span>")] |
662 | /// #![doc = document_features::document_features!(feature_label = r#"<span class="stab portability"><code>{feature}</code></span>"#)] |
663 | /// ``` |
664 | /// ```compile_fail |
665 | /// #![doc = document_features::document_features!(feature_label > "<span>{feature}</span>")] |
666 | /// ``` |
667 | /// ```compile_fail |
668 | /// #![doc = document_features::document_features!(label = "<span>{feature}</span>")] |
669 | /// ``` |
670 | /// ```compile_fail |
671 | /// #![doc = document_features::document_features!(feature_label = "{feat}")] |
672 | /// ``` |
673 | /// ```compile_fail |
674 | /// #![doc = document_features::document_features!(feature_label = 3.14)] |
675 | /// ``` |
676 | /// ```compile_fail |
677 | /// #![doc = document_features::document_features!(feature_label = )] |
678 | /// ``` |
679 | /// ```compile_fail |
680 | /// #![doc = document_features::document_features!(feature_label = "**`{feature}`**" extra)] |
681 | /// ``` |
682 | #[cfg (doc)] |
683 | struct FeatureLabelCompilationTest; |
684 | |
685 | #[cfg (test)] |
686 | mod tests { |
687 | use super::{process_toml, Args}; |
688 | |
689 | #[track_caller ] |
690 | fn test_error(toml: &str, expected: &str) { |
691 | let err = process_toml(toml, &Args::default()).unwrap_err(); |
692 | assert!(err.contains(expected), "{:?} does not contain {:?}" , err, expected) |
693 | } |
694 | |
695 | #[test ] |
696 | fn only_get_balanced_in_correct_table() { |
697 | process_toml( |
698 | r#" |
699 | |
700 | [package.metadata.release] |
701 | pre-release-replacements = [ |
702 | {test=\"\#\# \"}, |
703 | ] |
704 | [abcd] |
705 | [features]#xyz |
706 | #! abc |
707 | # |
708 | ### |
709 | #! def |
710 | #! |
711 | ## 123 |
712 | ## 456 |
713 | feat1 = ["plop"] |
714 | #! ghi |
715 | no_doc = [] |
716 | ## |
717 | feat2 = ["momo"] |
718 | #! klm |
719 | default = ["feat1", "something_else"] |
720 | #! end |
721 | "# , |
722 | &Args::default(), |
723 | ) |
724 | .unwrap(); |
725 | } |
726 | |
727 | #[test ] |
728 | fn no_features() { |
729 | let r = process_toml( |
730 | r#" |
731 | [features] |
732 | [dependencies] |
733 | foo = 4; |
734 | "# , |
735 | &Args::default(), |
736 | ) |
737 | .unwrap(); |
738 | assert_eq!(r, "*No documented features in Cargo.toml*" ); |
739 | } |
740 | |
741 | #[test ] |
742 | fn no_features2() { |
743 | let r = process_toml( |
744 | r#" |
745 | [packages] |
746 | [dependencies] |
747 | "# , |
748 | &Args::default(), |
749 | ) |
750 | .unwrap(); |
751 | assert_eq!(r, "*No documented features in Cargo.toml*" ); |
752 | } |
753 | |
754 | #[test ] |
755 | fn parse_error3() { |
756 | test_error( |
757 | r#" |
758 | [features] |
759 | ff = [] |
760 | [abcd |
761 | efgh |
762 | [dependencies] |
763 | "# , |
764 | "Parse error while parsing line: [abcd" , |
765 | ); |
766 | } |
767 | |
768 | #[test ] |
769 | fn parse_error4() { |
770 | test_error( |
771 | r#" |
772 | [features] |
773 | ## dd |
774 | ## ff |
775 | #! ee |
776 | ## ff |
777 | "# , |
778 | "Cannot mix" , |
779 | ); |
780 | } |
781 | |
782 | #[test ] |
783 | fn parse_error5() { |
784 | test_error( |
785 | r#" |
786 | [features] |
787 | ## dd |
788 | "# , |
789 | "not associated with a feature" , |
790 | ); |
791 | } |
792 | |
793 | #[test ] |
794 | fn parse_error6() { |
795 | test_error( |
796 | r#" |
797 | [features] |
798 | # ff |
799 | foo = [] |
800 | default = [ |
801 | #ffff |
802 | # ff |
803 | "# , |
804 | "Parse error while parsing value default" , |
805 | ); |
806 | } |
807 | |
808 | #[test ] |
809 | fn parse_error7() { |
810 | test_error( |
811 | r#" |
812 | [features] |
813 | # f |
814 | foo = [ x = { ] |
815 | bar = [] |
816 | "# , |
817 | "Parse error while parsing value foo" , |
818 | ); |
819 | } |
820 | |
821 | #[test ] |
822 | fn not_a_feature1() { |
823 | test_error( |
824 | r#" |
825 | ## hallo |
826 | [features] |
827 | "# , |
828 | "Not a feature: `[features]`" , |
829 | ); |
830 | } |
831 | |
832 | #[test ] |
833 | fn not_a_feature2() { |
834 | test_error( |
835 | r#" |
836 | [package] |
837 | ## hallo |
838 | foo = [] |
839 | "# , |
840 | "Comment cannot be associated with a feature: \"hallo \"" , |
841 | ); |
842 | } |
843 | |
844 | #[test ] |
845 | fn non_optional_dep1() { |
846 | test_error( |
847 | r#" |
848 | [dev-dependencies] |
849 | ## Not optional |
850 | foo = { version = "1.2", optional = false } |
851 | "# , |
852 | "Dependency foo is not an optional dependency" , |
853 | ); |
854 | } |
855 | |
856 | #[test ] |
857 | fn non_optional_dep2() { |
858 | test_error( |
859 | r#" |
860 | [dev-dependencies] |
861 | ## Not optional |
862 | foo = { version = "1.2" } |
863 | "# , |
864 | "Dependency foo is not an optional dependency" , |
865 | ); |
866 | } |
867 | |
868 | #[test ] |
869 | fn basic() { |
870 | let toml = r#" |
871 | [abcd] |
872 | [features]#xyz |
873 | #! abc |
874 | # |
875 | ### |
876 | #! def |
877 | #! |
878 | ## 123 |
879 | ## 456 |
880 | feat1 = ["plop"] |
881 | #! ghi |
882 | no_doc = [] |
883 | ## |
884 | feat2 = ["momo"] |
885 | #! klm |
886 | default = ["feat1", "something_else"] |
887 | #! end |
888 | "# ; |
889 | let parsed = process_toml(toml, &Args::default()).unwrap(); |
890 | assert_eq!( |
891 | parsed, |
892 | " abc \n def \n\n* **`feat1`** *(enabled by default)* — 123 \n 456 \n\n ghi \n* **`feat2`** \n\n klm \n end \n" |
893 | ); |
894 | let parsed = process_toml( |
895 | toml, |
896 | &Args { |
897 | feature_label: Some( |
898 | "<span class= \"stab portability \"><code>{feature}</code></span>" .into(), |
899 | ), |
900 | }, |
901 | ) |
902 | .unwrap(); |
903 | assert_eq!( |
904 | parsed, |
905 | " abc \n def \n\n* <span class= \"stab portability \"><code>feat1</code></span> *(enabled by default)* — 123 \n 456 \n\n ghi \n* <span class= \"stab portability \"><code>feat2</code></span> \n\n klm \n end \n" |
906 | ); |
907 | } |
908 | |
909 | #[test ] |
910 | fn dependencies() { |
911 | let toml = r#" |
912 | #! top |
913 | [dev-dependencies] #yo |
914 | ## dep1 |
915 | dep1 = { version="1.2", optional=true} |
916 | #! yo |
917 | dep2 = "1.3" |
918 | ## dep3 |
919 | [target.'cfg(unix)'.build-dependencies.dep3] |
920 | version = "42" |
921 | optional = true |
922 | "# ; |
923 | let parsed = process_toml(toml, &Args::default()).unwrap(); |
924 | assert_eq!(parsed, " top \n* **`dep1`** — dep1 \n\n yo \n* **`dep3`** — dep3 \n" ); |
925 | let parsed = process_toml( |
926 | toml, |
927 | &Args { |
928 | feature_label: Some( |
929 | "<span class= \"stab portability \"><code>{feature}</code></span>" .into(), |
930 | ), |
931 | }, |
932 | ) |
933 | .unwrap(); |
934 | assert_eq!(parsed, " top \n* <span class= \"stab portability \"><code>dep1</code></span> — dep1 \n\n yo \n* <span class= \"stab portability \"><code>dep3</code></span> — dep3 \n" ); |
935 | } |
936 | |
937 | #[test ] |
938 | fn multi_lines() { |
939 | let toml = r#" |
940 | [package.metadata.foo] |
941 | ixyz = [ |
942 | ["array"], |
943 | [ |
944 | "of", |
945 | "arrays" |
946 | ] |
947 | ] |
948 | [dev-dependencies] |
949 | ## dep1 |
950 | dep1 = { |
951 | version="1.2-}", |
952 | optional=true |
953 | } |
954 | [features] |
955 | default = [ |
956 | "goo", |
957 | "\"]", |
958 | "bar", |
959 | ] |
960 | ## foo |
961 | foo = [ |
962 | "bar" |
963 | ] |
964 | ## bar |
965 | bar = [ |
966 | |
967 | ] |
968 | "# ; |
969 | let parsed = process_toml(toml, &Args::default()).unwrap(); |
970 | assert_eq!( |
971 | parsed, |
972 | "* **`dep1`** — dep1 \n* **`foo`** — foo \n* **`bar`** *(enabled by default)* — bar \n" |
973 | ); |
974 | let parsed = process_toml( |
975 | toml, |
976 | &Args { |
977 | feature_label: Some( |
978 | "<span class= \"stab portability \"><code>{feature}</code></span>" .into(), |
979 | ), |
980 | }, |
981 | ) |
982 | .unwrap(); |
983 | assert_eq!( |
984 | parsed, |
985 | "* <span class= \"stab portability \"><code>dep1</code></span> — dep1 \n* <span class= \"stab portability \"><code>foo</code></span> — foo \n* <span class= \"stab portability \"><code>bar</code></span> *(enabled by default)* — bar \n" |
986 | ); |
987 | } |
988 | |
989 | #[test ] |
990 | fn dots_in_feature() { |
991 | let toml = r#" |
992 | [features] |
993 | ## This is a test |
994 | "teßt." = [] |
995 | default = ["teßt."] |
996 | [dependencies] |
997 | ## A dep |
998 | "dep" = { version = "123", optional = true } |
999 | "# ; |
1000 | let parsed = process_toml(toml, &Args::default()).unwrap(); |
1001 | assert_eq!( |
1002 | parsed, |
1003 | "* **`teßt.`** *(enabled by default)* — This is a test \n* **`dep`** — A dep \n" |
1004 | ); |
1005 | let parsed = process_toml( |
1006 | toml, |
1007 | &Args { |
1008 | feature_label: Some( |
1009 | "<span class= \"stab portability \"><code>{feature}</code></span>" .into(), |
1010 | ), |
1011 | }, |
1012 | ) |
1013 | .unwrap(); |
1014 | assert_eq!( |
1015 | parsed, |
1016 | "* <span class= \"stab portability \"><code>teßt.</code></span> *(enabled by default)* — This is a test \n* <span class= \"stab portability \"><code>dep</code></span> — A dep \n" |
1017 | ); |
1018 | } |
1019 | |
1020 | #[test ] |
1021 | fn recursive_default() { |
1022 | let toml = r#" |
1023 | [features] |
1024 | default=["qqq"] |
1025 | |
1026 | ## Qqq |
1027 | qqq=["www"] |
1028 | |
1029 | ## Www |
1030 | www=[] |
1031 | "# ; |
1032 | let parsed = process_toml(toml, &Args::default()).unwrap(); |
1033 | assert_eq!(parsed, "* **`qqq`** *(enabled by default)* — Qqq \n* **`www`** *(enabled by default)* — Www \n" ); |
1034 | } |
1035 | } |
1036 | |