1use proc_macro2::TokenStream;
2use quote::ToTokens;
3use syn::{parse2, ItemMod};
4
5use crate::BindgenOptions;
6
7mod merge_extern_blocks;
8mod sort_semantically;
9
10use merge_extern_blocks::merge_extern_blocks;
11use sort_semantically::sort_semantically;
12
13struct PostProcessingPass {
14 should_run: fn(&BindgenOptions) -> bool,
15 run: fn(&mut ItemMod),
16}
17
18// TODO: This can be a const fn when mutable references are allowed in const
19// context.
20macro_rules! pass {
21 ($pass:ident) => {
22 PostProcessingPass {
23 should_run: |options| options.$pass,
24 run: |item_mod| $pass(item_mod),
25 }
26 };
27}
28
29const PASSES: &[PostProcessingPass] =
30 &[pass!(merge_extern_blocks), pass!(sort_semantically)];
31
32pub(crate) fn postprocessing(
33 items: Vec<TokenStream>,
34 options: &BindgenOptions,
35) -> TokenStream {
36 let require_syn = PASSES.iter().any(|pass| (pass.should_run)(options));
37 if !require_syn {
38 return items.into_iter().collect();
39 }
40 let module_wrapped_tokens =
41 quote!(mod wrapper_for_postprocessing_hack { #( #items )* });
42
43 // This syn business is a hack, for now. This means that we are re-parsing already
44 // generated code using `syn` (as opposed to `quote`) because `syn` provides us more
45 // control over the elements.
46 // One caveat is that some of the items coming from `quote`d output might have
47 // multiple items within them. Hence, we have to wrap the incoming in a `mod`.
48 // The `unwrap` here is deliberate because bindgen should generate valid rust items at all
49 // times.
50 let mut item_mod = parse2::<ItemMod>(module_wrapped_tokens).unwrap();
51
52 for pass in PASSES {
53 if (pass.should_run)(options) {
54 (pass.run)(&mut item_mod);
55 }
56 }
57
58 let synful_items = item_mod
59 .content
60 .map(|(_, items)| items)
61 .unwrap_or_default()
62 .into_iter()
63 .map(|item| item.into_token_stream());
64
65 quote! { #( #synful_items )* }
66}
67