1 | // Take a look at the license at the top of the repository in the LICENSE file. |
2 | |
3 | use std::{fmt, io}; |
4 | |
5 | use crate::js::token::{self, Keyword, ReservedChar, Token, Tokens}; |
6 | use crate::js::utils::{get_array, get_variable_name_and_value_positions, VariableNameGenerator}; |
7 | |
8 | use std::collections::{HashMap, HashSet}; |
9 | |
10 | /*#[derive(Debug, Clone, PartialEq, Eq)] |
11 | enum Elem<'a> { |
12 | Function(Function<'a>), |
13 | Block(Block<'a>), |
14 | Variable(Variable<'a>), |
15 | Condition(token::Condition), |
16 | Loop(Loop<'a>), |
17 | Operation(Operation<'a>), |
18 | } |
19 | |
20 | impl<'a> Elem<'a> { |
21 | fn is_condition(&self) -> bool { |
22 | match *self { |
23 | Elem::Condition(_) => true, |
24 | _ => false, |
25 | } |
26 | } |
27 | } |
28 | |
29 | #[derive(Clone, Copy, PartialEq, Eq, Debug)] |
30 | enum ConditionType { |
31 | If, |
32 | ElseIf, |
33 | Else, |
34 | Ternary, |
35 | } |
36 | |
37 | #[derive(Clone, PartialEq, Eq, Debug)] |
38 | struct Block<'a> { |
39 | elems: Vec<Elem<'a>>, |
40 | } |
41 | |
42 | #[derive(Clone, PartialEq, Eq, Debug)] |
43 | struct Argument<'a> { |
44 | name: &'a str, |
45 | } |
46 | |
47 | #[derive(Clone, PartialEq, Eq, Debug)] |
48 | struct Function<'a> { |
49 | name: Option<&'a str>, |
50 | args: Vec<Argument<'a>>, |
51 | block: Block<'a>, |
52 | } |
53 | |
54 | #[derive(Clone, PartialEq, Eq, Debug)] |
55 | struct Variable<'a> { |
56 | name: &'a str, |
57 | value: Option<&'a str>, |
58 | } |
59 | |
60 | /*struct Condition<'a> { |
61 | ty_: ConditionType, |
62 | condition: &'a str, |
63 | block: Block<'a>, |
64 | }*/ |
65 | |
66 | #[derive(Clone, Copy, PartialEq, Eq, Debug)] |
67 | enum LoopType { |
68 | Do, |
69 | For, |
70 | While, |
71 | } |
72 | |
73 | #[derive(Clone, PartialEq, Eq, Debug)] |
74 | struct Loop<'a> { |
75 | ty_: LoopType, |
76 | condition: Vec<Elem<'a>>, |
77 | block: Block<'a>, |
78 | } |
79 | |
80 | #[derive(Clone, PartialEq, Eq, Debug)] |
81 | struct Operation<'a> { |
82 | content: &'a str, |
83 | } |
84 | |
85 | fn get_while_condition<'a>(tokens: &[token::Token<'a>], pos: &mut usize) -> Result<Vec<Elem<'a>>, String> { |
86 | let tmp = *pos; |
87 | *pos += 1; |
88 | if let Err(e) = match tokens.get(tmp) { |
89 | Some(token::Token::Char(token::ReservedChar::OpenParenthese)) => Ok(()), |
90 | Some(e) => Err(format!("Expected \"(\", found \"{:?}\"", e)), |
91 | None => Err("Expected \"(\", found nothing...".to_owned()), |
92 | } { |
93 | return Err(e); |
94 | } |
95 | let mut elems: Vec<Elem<'a>> = Vec::with_capacity(1); |
96 | |
97 | while let Some(e) = tokens.get(*pos) { |
98 | *pos += 1; |
99 | match e { |
100 | token::Token::Char(token::ReservedChar::CloseParenthese) => return Ok(elems), |
101 | token::Token::Condition(e) => { |
102 | if let Some(cond) = elems.last() { |
103 | if cond.is_condition() { |
104 | return Err(format!("\"{:?}\" cannot follow \"{:?}\"", e, cond)); |
105 | } |
106 | } |
107 | } |
108 | _ => {} |
109 | } |
110 | } |
111 | Err("Expected \")\", found nothing...".to_owned()) |
112 | } |
113 | |
114 | fn get_do<'a>(tokens: &[token::Token<'a>], pos: &mut usize) -> Result<Elem<'a>, String> { |
115 | let tmp = *pos; |
116 | *pos += 1; |
117 | let block = match tokens.get(tmp) { |
118 | Some(token::Token::Char(token::ReservedChar::OpenCurlyBrace)) => get_block(tokens, pos, true), |
119 | Some(e) => Err(format!("Expected \"{{\", found \"{:?}\"", e)), |
120 | None => Err("Expected \"{\", found nothing...".to_owned()), |
121 | }?; |
122 | let tmp = *pos; |
123 | *pos += 1; |
124 | let condition = match tokens.get(tmp) { |
125 | Some(token::Token::Keyword(token::Keyword::While)) => get_while_condition(tokens, pos), |
126 | Some(e) => Err(format!("Expected \"while\", found \"{:?}\"", e)), |
127 | None => Err("Expected \"while\", found nothing...".to_owned()), |
128 | }?; |
129 | let mut loop_ = Loop { |
130 | ty_: LoopType::Do, |
131 | condition: condition, |
132 | block, |
133 | }; |
134 | Ok(Elem::Loop(loop_)) |
135 | } |
136 | |
137 | fn get_block<'a>(tokens: &[token::Token<'a>], pos: &mut usize, |
138 | start_with_paren: bool) -> Result<Block<'a>, String> { |
139 | let mut block = Block { elems: Vec::with_capacity(2) }; |
140 | while let Some(e) = tokens.get(*pos) { |
141 | *pos += 1; |
142 | block.elems.push(match e { |
143 | token::Token::Keyword(token::Keyword::Do) => get_do(tokens, pos), |
144 | token::Token::Char(token::ReservedChar::CloseCurlyBrace) => { |
145 | if start_with_paren { |
146 | return Ok(block); |
147 | } |
148 | return Err("Unexpected \"}\"".to_owned()); |
149 | } |
150 | }?); |
151 | } |
152 | if !start_with_paren { |
153 | Ok(block) |
154 | } else { |
155 | Err("Expected \"}\" at the end of the block but didn't find one...".to_owned()) |
156 | } |
157 | } |
158 | |
159 | fn build_ast<'a>(v: &[token::Token<'a>]) -> Result<Elem<'a>, String> { |
160 | let mut pos = 0; |
161 | |
162 | match get_block(v, &mut pos, false) { |
163 | Ok(ast) => Ok(Elem::Block(ast)), |
164 | Err(e) => Err(e), |
165 | } |
166 | }*/ |
167 | |
168 | /// Minifies a given JS source code. |
169 | /// |
170 | /// # Example |
171 | /// |
172 | /// ```rust |
173 | /// use minifier::js::minify; |
174 | /// |
175 | /// let js = r#" |
176 | /// function forEach(data, func) { |
177 | /// for (var i = 0; i < data.length; ++i) { |
178 | /// func(data[i]); |
179 | /// } |
180 | /// }"# .into(); |
181 | /// let js_minified = minify(js); |
182 | /// assert_eq!( |
183 | /// &js_minified.to_string(), |
184 | /// "function forEach(data,func){for(var i=0;i<data.length;++i){func(data[i])}}" , |
185 | /// ); |
186 | /// ``` |
187 | #[inline ] |
188 | pub fn minify(source: &str) -> Minified<'_> { |
189 | Minified(token::tokenize(source).apply(func:crate::js::clean_tokens)) |
190 | } |
191 | |
192 | pub struct Minified<'a>(token::Tokens<'a>); |
193 | |
194 | impl<'a> Minified<'a> { |
195 | pub fn write<W: io::Write>(self, w: W) -> io::Result<()> { |
196 | self.0.write(w) |
197 | } |
198 | } |
199 | |
200 | impl<'a> fmt::Display for Minified<'a> { |
201 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
202 | self.0.fmt(f) |
203 | } |
204 | } |
205 | |
206 | // TODO: No scope handling or anything. Might be nice as a second step to add it... |
207 | fn get_variables_name<'a>( |
208 | tokens: &'a Tokens<'a>, |
209 | ) -> (HashSet<&'a str>, HashMap<&'a str, (usize, usize)>) { |
210 | let mut ret = HashSet::new(); |
211 | let mut variables = HashMap::new(); |
212 | let mut pos = 0; |
213 | |
214 | while pos < tokens.len() { |
215 | if tokens[pos].is_keyword() || tokens[pos].is_other() { |
216 | if let Some((var_pos, Some(value_pos))) = |
217 | get_variable_name_and_value_positions(tokens, pos) |
218 | { |
219 | pos = value_pos; |
220 | if let Some(var_name) = tokens[var_pos].get_other() { |
221 | if !var_name.starts_with("r_" ) { |
222 | pos += 1; |
223 | continue; |
224 | } |
225 | ret.insert(var_name); |
226 | } |
227 | if let Some(s) = tokens[value_pos].get_string() { |
228 | variables.insert(s, (var_pos, value_pos)); |
229 | } |
230 | } |
231 | } |
232 | pos += 1; |
233 | } |
234 | (ret, variables) |
235 | } |
236 | |
237 | fn aggregate_strings_inner<'a, 'b: 'a>( |
238 | mut tokens: Tokens<'a>, |
239 | separation_token: Option<Token<'b>>, |
240 | ) -> Tokens<'a> { |
241 | let mut new_vars = Vec::with_capacity(50); |
242 | let mut to_replace: Vec<(usize, usize)> = Vec::new(); |
243 | |
244 | for (var_name, positions) in { |
245 | let mut strs: HashMap<&Token<'_>, Vec<usize>> = HashMap::with_capacity(1000); |
246 | let mut validated: HashMap<&Token<'_>, String> = HashMap::with_capacity(100); |
247 | |
248 | let mut var_gen = VariableNameGenerator::new(Some("r_" ), 2); |
249 | let mut next_name = var_gen.to_string(); |
250 | |
251 | let (all_variables, values) = get_variables_name(&tokens); |
252 | while all_variables.contains(&next_name.as_str()) { |
253 | var_gen.next(); |
254 | next_name = var_gen.to_string(); |
255 | } |
256 | |
257 | for pos in 0..tokens.len() { |
258 | let token = &tokens[pos]; |
259 | if let Some(str_token) = token.get_string() { |
260 | if let Some((var_pos, string_pos)) = values.get(&str_token) { |
261 | if pos != *string_pos { |
262 | to_replace.push((pos, *var_pos)); |
263 | } |
264 | continue; |
265 | } |
266 | let x = strs.entry(token).or_insert_with(|| Vec::with_capacity(1)); |
267 | x.push(pos); |
268 | if x.len() > 1 && validated.get(token).is_none() { |
269 | let len = str_token.len(); |
270 | // Computation here is simple, we declare new variables when creating this so |
271 | // the total of characters must be shorter than: |
272 | // `var r_aa=...;` -> 10 + `r_aa` -> 14 |
273 | if (x.len() + 2/* quotes */) * len |
274 | > next_name.len() + str_token.len() + 6 /* var _=_;*/ + x.len() * next_name.len() |
275 | { |
276 | validated.insert(token, next_name.clone()); |
277 | var_gen.next(); |
278 | next_name = var_gen.to_string(); |
279 | while all_variables.contains(&next_name.as_str()) { |
280 | var_gen.next(); |
281 | next_name = var_gen.to_string(); |
282 | } |
283 | } |
284 | } |
285 | } |
286 | } |
287 | let mut ret = Vec::with_capacity(validated.len()); |
288 | |
289 | // We need this macro to avoid having to sort the set when not testing the crate. |
290 | //#[cfg(test)] |
291 | macro_rules! inner_loop { |
292 | ($x:ident) => {{ |
293 | let mut $x = $x.into_iter().collect::<Vec<_>>(); |
294 | $x.sort_unstable_by(|a, b| a.1.cmp(&b.1)); |
295 | $x |
296 | }}; |
297 | } |
298 | /*#[cfg(not(test))] |
299 | macro_rules! inner_loop { |
300 | ($x:ident) => { |
301 | $x.into_iter() |
302 | } |
303 | }*/ |
304 | |
305 | for (token, var_name) in inner_loop!(validated) { |
306 | ret.push((var_name, strs.remove(&token).unwrap())); |
307 | var_gen.next(); |
308 | } |
309 | ret |
310 | } { |
311 | if new_vars.is_empty() { |
312 | new_vars.push(Token::Keyword(Keyword::Var)); |
313 | } else { |
314 | new_vars.push(Token::Char(ReservedChar::Comma)); |
315 | } |
316 | new_vars.push(Token::CreatedVarDecl(format!( |
317 | " {}= {}" , |
318 | var_name, tokens[positions[0]] |
319 | ))); |
320 | for pos in positions { |
321 | tokens.0[pos] = Token::CreatedVar(var_name.clone()); |
322 | } |
323 | } |
324 | if !new_vars.is_empty() { |
325 | new_vars.push(Token::Char(ReservedChar::SemiColon)); |
326 | } |
327 | for (to_replace_pos, variable_pos) in to_replace { |
328 | tokens.0[to_replace_pos] = tokens.0[variable_pos].clone(); |
329 | } |
330 | if let Some(token) = separation_token { |
331 | new_vars.push(token); |
332 | } |
333 | new_vars.append(&mut tokens.0); |
334 | Tokens(new_vars) |
335 | } |
336 | |
337 | /// Aggregate litteral strings. For instance, if the string litteral "Oh look over there!" |
338 | /// appears more than once, a variable will be created with this value and used everywhere the |
339 | /// string appears. Of course, this replacement is only performed when it allows to take |
340 | /// less space. |
341 | /// |
342 | /// # Example |
343 | /// |
344 | /// ```rust,no_run |
345 | /// extern crate minifier; |
346 | /// use minifier::js::{aggregate_strings, clean_tokens, simple_minify}; |
347 | /// use std::fs; |
348 | /// |
349 | /// fn main() { |
350 | /// let content = fs::read("some_file.js" ).expect("file not found" ); |
351 | /// let source = String::from_utf8_lossy(&content); |
352 | /// let s = simple_minify(&source); // First we get the tokens list. |
353 | /// let s = s.apply(aggregate_strings) // This `apply` aggregates string litterals. |
354 | /// .apply(clean_tokens) // This one is used to remove useless chars. |
355 | /// .to_string(); // And we finally convert to string. |
356 | /// println!("result: {}" , s); |
357 | /// } |
358 | /// ``` |
359 | #[inline ] |
360 | pub fn aggregate_strings(tokens: Tokens<'_>) -> Tokens<'_> { |
361 | aggregate_strings_inner(tokens, separation_token:None) |
362 | } |
363 | |
364 | /// Exactly like `aggregate_strings` except this one expects a separation token |
365 | /// to be passed. This token will be placed between the created variables for the |
366 | /// strings aggregation and the rest. |
367 | /// |
368 | /// # Example |
369 | /// |
370 | /// Let's add a backline between the created variables and the rest of the code: |
371 | /// |
372 | /// ```rust,no_run |
373 | /// extern crate minifier; |
374 | /// use minifier::js::{ |
375 | /// aggregate_strings_with_separation, |
376 | /// clean_tokens, |
377 | /// simple_minify, |
378 | /// Token, |
379 | /// ReservedChar, |
380 | /// }; |
381 | /// use std::fs; |
382 | /// |
383 | /// fn main() { |
384 | /// let content = fs::read("some_file.js" ).expect("file not found" ); |
385 | /// let source = String::from_utf8_lossy(&content); |
386 | /// let s = simple_minify(&source); // First we get the tokens list. |
387 | /// let s = s.apply(|f| { |
388 | /// aggregate_strings_with_separation(f, Token::Char(ReservedChar::Backline)) |
389 | /// }) // We add a backline between the variable and the rest. |
390 | /// .apply(clean_tokens) // We clean the tokens. |
391 | /// .to_string(); // And we finally convert to string. |
392 | /// println!("result: {}" , s); |
393 | /// } |
394 | /// ``` |
395 | #[inline ] |
396 | pub fn aggregate_strings_with_separation<'a, 'b: 'a>( |
397 | tokens: Tokens<'a>, |
398 | separation_token: Token<'b>, |
399 | ) -> Tokens<'a> { |
400 | aggregate_strings_inner(tokens, separation_token:Some(separation_token)) |
401 | } |
402 | |
403 | fn aggregate_strings_into_array_inner<'a, 'b: 'a, T: Fn(&Tokens<'a>, usize) -> bool>( |
404 | mut tokens: Tokens<'a>, |
405 | array_name: &str, |
406 | separation_token: Option<Token<'b>>, |
407 | filter: T, |
408 | ) -> Tokens<'a> { |
409 | let mut to_insert = Vec::with_capacity(100); |
410 | let mut to_replace = Vec::with_capacity(100); |
411 | |
412 | { |
413 | let mut to_ignore = HashSet::new(); |
414 | // key: the token string |
415 | // value: (position in the array, positions in the tokens list, need creation) |
416 | let mut strs: HashMap<&str, (usize, Vec<usize>, bool)> = HashMap::with_capacity(1000); |
417 | let (current_array_values, need_recreate, mut end_bracket) = |
418 | match get_array(&tokens, array_name) { |
419 | Some((s, p)) => (s, false, p), |
420 | None => (Vec::new(), true, 0), |
421 | }; |
422 | let mut validated: HashSet<&str> = HashSet::new(); |
423 | |
424 | let mut array_pos = 0; |
425 | for s in current_array_values.iter() { |
426 | if let Some(st) = tokens.0[*s].get_string() { |
427 | strs.insert(&st[1..st.len() - 1], (array_pos, vec![], false)); |
428 | array_pos += 1; |
429 | validated.insert(&st[1..st.len() - 1]); |
430 | to_ignore.insert(*s); |
431 | } |
432 | } |
433 | |
434 | let mut array_pos_str = array_pos.to_string(); |
435 | for pos in 0..tokens.len() { |
436 | if to_ignore.contains(&pos) { |
437 | continue; |
438 | } |
439 | let token = &tokens[pos]; |
440 | if let Some(str_token) = token.get_string() { |
441 | if !filter(&tokens, pos) { |
442 | continue; |
443 | } |
444 | let s = &str_token[1..str_token.len() - 1]; |
445 | let x = strs |
446 | .entry(s) |
447 | .or_insert_with(|| (0, Vec::with_capacity(1), true)); |
448 | x.1.push(pos); |
449 | if x.1.len() > 1 && !validated.contains(s) { |
450 | let len = s.len(); |
451 | if len * x.1.len() |
452 | > (array_name.len() + array_pos_str.len() + 2) * x.1.len() |
453 | + array_pos_str.len() |
454 | + 2 |
455 | { |
456 | validated.insert(&str_token[1..str_token.len() - 1]); |
457 | x.0 = array_pos; |
458 | array_pos += 1; |
459 | array_pos_str = array_pos.to_string(); |
460 | } |
461 | } |
462 | } |
463 | } |
464 | |
465 | // TODO: |
466 | // 1. Sort strings by length (the smallest should take the smallest numbers |
467 | // for bigger gains). |
468 | // 2. Compute "score" for all strings of the same length and sort the strings |
469 | // of the same length with this score. |
470 | // 3. Loop again over strings and remove those who shouldn't be there anymore. |
471 | // 4. Repeat. |
472 | // |
473 | // ALTERNATIVE: |
474 | // |
475 | // Compute the score based on: |
476 | // current number of digits * str length * str occurence |
477 | // |
478 | // ^ This second solution should bring even better results. |
479 | // |
480 | // ALSO: if an array with such strings already exists, it'd be worth it to recompute |
481 | // everything again. |
482 | let mut validated = validated.iter().map(|v| (strs[v].0, v)).collect::<Vec<_>>(); |
483 | validated.sort_unstable_by(|(p1, _), (p2, _)| p2.cmp(p1)); |
484 | |
485 | if need_recreate && !validated.is_empty() { |
486 | if let Some(token) = separation_token { |
487 | to_insert.push((0, token)); |
488 | } |
489 | to_insert.push((0, Token::Char(ReservedChar::SemiColon))); |
490 | to_insert.push((0, Token::Char(ReservedChar::CloseBracket))); |
491 | to_insert.push((0, Token::Char(ReservedChar::OpenBracket))); |
492 | to_insert.push((0, Token::CreatedVarDecl(format!("var {}=" , array_name)))); |
493 | |
494 | end_bracket = 2; |
495 | } |
496 | |
497 | let mut iter = validated.iter().peekable(); |
498 | while let Some((array_pos, s)) = iter.next() { |
499 | let (_, ref tokens_pos, create_array_entry) = strs[*s]; |
500 | let array_index = Token::CreatedVar(format!(" {}[ {}]" , array_name, array_pos)); |
501 | for token in tokens_pos.iter() { |
502 | to_replace.push((*token, array_index.clone())); |
503 | } |
504 | if !create_array_entry { |
505 | continue; |
506 | } |
507 | to_insert.push((end_bracket, Token::CreatedVar(format!(" \"{}\"" , *s)))); |
508 | if iter.peek().is_none() && current_array_values.is_empty() { |
509 | continue; |
510 | } |
511 | to_insert.push((end_bracket, Token::Char(ReservedChar::Comma))); |
512 | } |
513 | } |
514 | for (pos, rep) in to_replace.into_iter() { |
515 | tokens.0[pos] = rep; |
516 | } |
517 | for (pos, rep) in to_insert.into_iter() { |
518 | tokens.0.insert(pos, rep); |
519 | } |
520 | tokens |
521 | } |
522 | |
523 | /// Exactly like `aggregate_strings_into_array` except this one expects a separation token |
524 | /// to be passed. This token will be placed between the created array for the |
525 | /// strings aggregation and the rest. |
526 | /// |
527 | /// # Example |
528 | /// |
529 | /// Let's add a backline between the created variables and the rest of the code: |
530 | /// |
531 | /// ```rust,no_run |
532 | /// extern crate minifier; |
533 | /// use minifier::js::{ |
534 | /// aggregate_strings_into_array_with_separation, |
535 | /// clean_tokens, |
536 | /// simple_minify, |
537 | /// Token, |
538 | /// ReservedChar, |
539 | /// }; |
540 | /// use std::fs; |
541 | /// |
542 | /// fn main() { |
543 | /// let content = fs::read("some_file.js" ).expect("file not found" ); |
544 | /// let source = String::from_utf8_lossy(&content); |
545 | /// let s = simple_minify(&source); // First we get the tokens list. |
546 | /// let s = s.apply(|f| { |
547 | /// aggregate_strings_into_array_with_separation(f, "R" , Token::Char(ReservedChar::Backline)) |
548 | /// }) // We add a backline between the variable and the rest. |
549 | /// .apply(clean_tokens) // We clean the tokens. |
550 | /// .to_string(); // And we finally convert to string. |
551 | /// println!("result: {}" , s); |
552 | /// } |
553 | /// ``` |
554 | #[inline ] |
555 | pub fn aggregate_strings_into_array_with_separation<'a, 'b: 'a>( |
556 | tokens: Tokens<'a>, |
557 | array_name: &str, |
558 | separation_token: Token<'b>, |
559 | ) -> Tokens<'a> { |
560 | aggregate_strings_into_array_inner(tokens, array_name, separation_token:Some(separation_token), |_, _| true) |
561 | } |
562 | |
563 | /// Same as [`aggregate_strings_into_array_with_separation`] except it allows certain strings to |
564 | /// not be aggregated thanks to the `filter` parameter. If it returns `false`, then the string will |
565 | /// be ignored. |
566 | #[inline ] |
567 | pub fn aggregate_strings_into_array_with_separation_filter<'a, 'b: 'a, T>( |
568 | tokens: Tokens<'a>, |
569 | array_name: &str, |
570 | separation_token: Token<'b>, |
571 | filter: T, |
572 | ) -> Tokens<'a> |
573 | where |
574 | T: Fn(&Tokens<'a>, usize) -> bool, |
575 | { |
576 | aggregate_strings_into_array_inner(tokens, array_name, separation_token:Some(separation_token), filter) |
577 | } |
578 | |
579 | /// Aggregate litteral strings. For instance, if the string litteral "Oh look over there!" |
580 | /// appears more than once, it will be added to the generated array and used everywhere the |
581 | /// string appears. Of course, this replacement is only performed when it allows to take |
582 | /// less space. |
583 | /// |
584 | /// # Example |
585 | /// |
586 | /// ```rust,no_run |
587 | /// extern crate minifier; |
588 | /// use minifier::js::{aggregate_strings_into_array, clean_tokens, simple_minify}; |
589 | /// use std::fs; |
590 | /// |
591 | /// fn main() { |
592 | /// let content = fs::read("some_file.js" ).expect("file not found" ); |
593 | /// let source = String::from_utf8_lossy(&content); |
594 | /// let s = simple_minify(&source); // First we get the tokens list. |
595 | /// let s = s.apply(|f| aggregate_strings_into_array(f, "R" )) // This `apply` aggregates string litterals. |
596 | /// .apply(clean_tokens) // This one is used to remove useless chars. |
597 | /// .to_string(); // And we finally convert to string. |
598 | /// println!("result: {}" , s); |
599 | /// } |
600 | /// ``` |
601 | #[inline ] |
602 | pub fn aggregate_strings_into_array<'a>(tokens: Tokens<'a>, array_name: &str) -> Tokens<'a> { |
603 | aggregate_strings_into_array_inner(tokens, array_name, separation_token:None, |_, _| true) |
604 | } |
605 | |
606 | /// Same as [`aggregate_strings_into_array`] except it allows certain strings to not be aggregated |
607 | /// thanks to the `filter` parameter. If it returns `false`, then the string will be ignored. |
608 | #[inline ] |
609 | pub fn aggregate_strings_into_array_filter<'a, T>( |
610 | tokens: Tokens<'a>, |
611 | array_name: &str, |
612 | filter: T, |
613 | ) -> Tokens<'a> |
614 | where |
615 | T: Fn(&Tokens<'a>, usize) -> bool, |
616 | { |
617 | aggregate_strings_into_array_inner(tokens, array_name, separation_token:None, filter) |
618 | } |
619 | |
620 | /// Simple function to get the untouched token list. Useful in case you want to perform some |
621 | /// actions directly on it. |
622 | /// |
623 | /// # Example |
624 | /// |
625 | /// ```rust,no_run |
626 | /// extern crate minifier; |
627 | /// use minifier::js::simple_minify; |
628 | /// use std::fs; |
629 | /// |
630 | /// fn main() { |
631 | /// let content = fs::read("some_file.js" ).expect("file not found" ); |
632 | /// let source = String::from_utf8_lossy(&content); |
633 | /// let s = simple_minify(&source); |
634 | /// println!("result: {:?}" , s); // We now have the tokens list. |
635 | /// } |
636 | /// ``` |
637 | #[inline ] |
638 | pub fn simple_minify(source: &str) -> Tokens<'_> { |
639 | token::tokenize(source) |
640 | } |
641 | |
642 | #[test ] |
643 | fn aggregate_strings_in_array() { |
644 | let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!", |
645 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
646 | let expected_result = "var R=[ \"a nice string \", \"cake! \"];var x=[R[0],R[0],\ |
647 | \"another nice string \",R[1],R[1],R[0],R[1],R[1],R[1]]" ; |
648 | |
649 | let result = simple_minify(source) |
650 | .apply(crate::js::clean_tokens) |
651 | .apply(|c| aggregate_strings_into_array(c, "R" )) |
652 | .to_string(); |
653 | assert_eq!(result, expected_result); |
654 | |
655 | let source = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!", |
656 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
657 | let expected_result = "var R=[ \"a nice string \", \"cake! \"]; \nvar x=[R[0],R[0],\ |
658 | \"another nice string \",R[1],R[1],R[0],R[1],R[1],R[1]]" ; |
659 | |
660 | let result = simple_minify(source) |
661 | .apply(crate::js::clean_tokens) |
662 | .apply(|c| { |
663 | aggregate_strings_into_array_with_separation( |
664 | c, |
665 | "R" , |
666 | Token::Char(ReservedChar::Backline), |
667 | ) |
668 | }) |
669 | .to_string(); |
670 | assert_eq!(result, expected_result); |
671 | |
672 | let source = r#"var x = ["a nice string", "a nice string", "another nice string", "another nice string", "another nice string", "another nice string","cake!","cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
673 | let expected_result = "var R=[ \"a nice string \", \"another nice string \", \"cake! \"]; \n\ |
674 | var x=[R[0],R[0],R[1],R[1],R[1],R[1],R[2],R[2],R[0],R[2],\ |
675 | R[2],R[2]]" ; |
676 | |
677 | let result = simple_minify(source) |
678 | .apply(crate::js::clean_tokens) |
679 | .apply(|c| { |
680 | aggregate_strings_into_array_with_separation( |
681 | c, |
682 | "R" , |
683 | Token::Char(ReservedChar::Backline), |
684 | ) |
685 | }) |
686 | .to_string(); |
687 | assert_eq!(result, expected_result); |
688 | } |
689 | |
690 | #[test ] |
691 | fn aggregate_strings_in_array_filter() { |
692 | let source = r#"var searchIndex = {};searchIndex['duplicate_paths'] = {'aaaaaaaa': 'bbbbbbbb', 'bbbbbbbb': 'aaaaaaaa', 'duplicate_paths': 'aaaaaaaa'};"# ; |
693 | let expected_result = "var R=[ \"bbbbbbbb \", \"aaaaaaaa \"]; \nvar searchIndex={};searchIndex['duplicate_paths']={R[1]:R[0],R[0]:R[1],'duplicate_paths':R[1]}" ; |
694 | |
695 | let result = simple_minify(source) |
696 | .apply(crate::js::clean_tokens) |
697 | .apply(|c| { |
698 | aggregate_strings_into_array_with_separation_filter( |
699 | c, |
700 | "R" , |
701 | Token::Char(ReservedChar::Backline), |
702 | |tokens, pos| { |
703 | pos < 2 |
704 | || !tokens[pos - 1].eq_char(ReservedChar::OpenBracket) |
705 | || tokens[pos - 2].get_other() != Some("searchIndex" ) |
706 | }, |
707 | ) |
708 | }) |
709 | .to_string(); |
710 | assert_eq!(result, expected_result); |
711 | |
712 | let source = r#"var searchIndex = {};searchIndex['duplicate_paths'] = {'aaaaaaaa': 'bbbbbbbb', 'bbbbbbbb': 'aaaaaaaa', 'duplicate_paths': 'aaaaaaaa', 'x': 'duplicate_paths'};"# ; |
713 | let expected_result = "var R=[ \"bbbbbbbb \", \"aaaaaaaa \", \"duplicate_paths \"]; \nvar searchIndex={};searchIndex['duplicate_paths']={R[1]:R[0],R[0]:R[1],R[2]:R[1],'x':R[2]}" ; |
714 | |
715 | let result = simple_minify(source) |
716 | .apply(crate::js::clean_tokens) |
717 | .apply(|c| { |
718 | aggregate_strings_into_array_with_separation_filter( |
719 | c, |
720 | "R" , |
721 | Token::Char(ReservedChar::Backline), |
722 | |tokens, pos| { |
723 | pos < 2 |
724 | || !tokens[pos - 1].eq_char(ReservedChar::OpenBracket) |
725 | || tokens[pos - 2].get_other() != Some("searchIndex" ) |
726 | }, |
727 | ) |
728 | }) |
729 | .to_string(); |
730 | assert_eq!(result, expected_result); |
731 | } |
732 | |
733 | #[test ] |
734 | fn aggregate_strings_in_array_existing() { |
735 | let source = r#"var R=[];var x = ["a nice string", "a nice string", "another nice string", "cake!", |
736 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
737 | let expected_result = "var R=[ \"a nice string \", \"cake! \"];var x=[R[0],R[0],\ |
738 | \"another nice string \",R[1],R[1],R[0],R[1],R[1],R[1]]" ; |
739 | |
740 | let result = simple_minify(source) |
741 | .apply(crate::js::clean_tokens) |
742 | .apply(|c| aggregate_strings_into_array(c, "R" )) |
743 | .to_string(); |
744 | assert_eq!(result, expected_result); |
745 | |
746 | let source = r#"var R=["a nice string"];var x = ["a nice string", "a nice string", "another nice string", "cake!", |
747 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
748 | let expected_result = "var R=[ \"a nice string \", \"cake! \"];var x=[R[0],R[0],\ |
749 | \"another nice string \",R[1],R[1],R[0],R[1],R[1],R[1]]" ; |
750 | |
751 | let result = simple_minify(source) |
752 | .apply(crate::js::clean_tokens) |
753 | .apply(|c| aggregate_strings_into_array(c, "R" )) |
754 | .to_string(); |
755 | assert_eq!(result, expected_result); |
756 | |
757 | let source = r#"var y = 12;var R=["a nice string"];var x = ["a nice string", "a nice string", "another nice string", "cake!", |
758 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
759 | let expected_result = "var y=12;var R=[ \"a nice string \", \"cake! \"];var x=[R[0],R[0],\ |
760 | \"another nice string \",R[1],R[1],R[0],R[1],R[1],R[1]]" ; |
761 | |
762 | let result = simple_minify(source) |
763 | .apply(crate::js::clean_tokens) |
764 | .apply(|c| aggregate_strings_into_array(c, "R" )) |
765 | .to_string(); |
766 | assert_eq!(result, expected_result); |
767 | |
768 | let source = r#"var R=["osef1", "o2", "damn"]; |
769 | var x = ["a nice string", "a nice string", "another nice string", "cake!", |
770 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
771 | let expected_result = "var R=[ \"osef1 \", \"o2 \", \"damn \", \"a nice string \", \"cake! \"];\ |
772 | var x=[R[3],R[3], \"another nice string \",R[4],R[4],R[3],R[4],R[4],R[4]]" ; |
773 | |
774 | let result = simple_minify(source) |
775 | .apply(crate::js::clean_tokens) |
776 | .apply(|c| aggregate_strings_into_array(c, "R" )) |
777 | .to_string(); |
778 | assert_eq!(result, expected_result); |
779 | } |
780 | |
781 | #[test ] |
782 | fn string_duplicates() { |
783 | let source: &str = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!", |
784 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
785 | let expected_result: &str = "var r_aa= \"a nice string \",r_ba= \"cake! \";var x=[r_aa,r_aa,\ |
786 | \"another nice string \",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba]" ; |
787 | |
788 | let result: String = simple_minifyTokens<'_>(source) |
789 | .apply(aggregate_strings) |
790 | .apply(func:crate::js::clean_tokens) |
791 | .to_string(); |
792 | assert_eq!(result, expected_result); |
793 | } |
794 | |
795 | #[test ] |
796 | fn already_existing_var() { |
797 | let source: &str = r#"var r_aa = "a nice string"; var x = ["a nice string", "a nice string", |
798 | "another nice string", "cake!", |
799 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
800 | let expected_result: &str = "var r_ba= \"cake! \";var r_aa= \"a nice string \";var x=[r_aa,r_aa,\ |
801 | \"another nice string \",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba]" ; |
802 | |
803 | let result: String = simple_minifyTokens<'_>(source) |
804 | .apply(aggregate_strings) |
805 | .apply(func:crate::js::clean_tokens) |
806 | .to_string(); |
807 | assert_eq!(result, expected_result); |
808 | } |
809 | |
810 | #[test ] |
811 | fn string_duplicates_variables_already_exist() { |
812 | let source: &str = r#"var r_aa=1;var x = ["a nice string", "a nice string", "another nice string", "cake!", |
813 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
814 | let expected_result: &str = "var r_ba= \"a nice string \",r_ca= \"cake! \";\ |
815 | var r_aa=1;var x=[r_ba,r_ba,\ |
816 | \"another nice string \",r_ca,r_ca,r_ba,r_ca,r_ca,r_ca]" ; |
817 | |
818 | let result: String = simple_minifyTokens<'_>(source) |
819 | .apply(aggregate_strings) |
820 | .apply(func:crate::js::clean_tokens) |
821 | .to_string(); |
822 | assert_eq!(result, expected_result); |
823 | } |
824 | |
825 | #[test ] |
826 | fn string_duplicates_with_separator() { |
827 | use self::token::ReservedChar; |
828 | |
829 | let source: &str = r#"var x = ["a nice string", "a nice string", "another nice string", "cake!", |
830 | "cake!", "a nice string", "cake!", "cake!", "cake!"];"# ; |
831 | let expected_result: &str = "var r_aa= \"a nice string \",r_ba= \"cake! \"; \nvar x=[r_aa,r_aa,\ |
832 | \"another nice string \",r_ba,r_ba,r_aa,r_ba,r_ba,r_ba]" ; |
833 | let result: String = simple_minifyTokens<'_>(source) |
834 | .apply(crate::js::clean_tokens) |
835 | .apply(|f: Tokens<'_>| aggregate_strings_with_separation(tokens:f, separation_token:Token::Char(ReservedChar::Backline))) |
836 | .to_string(); |
837 | assert_eq!(result, expected_result); |
838 | } |
839 | |
840 | #[test ] |
841 | fn clean_except() { |
842 | use self::token::ReservedChar; |
843 | |
844 | let source: &str = r#"var x = [1, 2, 3]; |
845 | var y = "salut"; |
846 | var z = "ok!";"# ; |
847 | let expected: &str = r#"var x=[1,2,3]; |
848 | var y="salut"; |
849 | var z="ok!""# ; |
850 | |
851 | let result: String = simple_minifyTokens<'_>(source) |
852 | .apply(|f: Tokens<'_>| { |
853 | crate::js::clean_tokens_except(tokens:f, |c: &Token<'_>| c.get_char() != Some(ReservedChar::Backline)) |
854 | }) |
855 | .to_string(); |
856 | assert_eq!(result, expected); |
857 | } |
858 | |
859 | #[test ] |
860 | fn clean_except2() { |
861 | use self::token::ReservedChar; |
862 | |
863 | let source: &str = "let x = [ 1, 2, \t3];" ; |
864 | let expected: &str = "let x = [ 1, 2, 3];" ; |
865 | |
866 | let result: String = simple_minifyTokens<'_>(source) |
867 | .apply(|f: Tokens<'_>| { |
868 | crate::js::clean_tokens_except(tokens:f, |c: &Token<'_>| { |
869 | c.get_char() != Some(ReservedChar::Space) |
870 | && c.get_char() != Some(ReservedChar::SemiColon) |
871 | }) |
872 | }) |
873 | .to_string(); |
874 | assert_eq!(result, expected); |
875 | } |
876 | |
877 | #[test ] |
878 | fn clean_except3() { |
879 | use self::token::ReservedChar; |
880 | |
881 | let source: &str = "let x = [ 1, 2, \t3];" ; |
882 | let expected: &str = "let x=[1,2, \t3];" ; |
883 | |
884 | let result: String = simple_minifyTokens<'_>(source) |
885 | .apply(|f: Tokens<'_>| { |
886 | crate::js::clean_tokens_except(tokens:f, |c: &Token<'_>| { |
887 | c.get_char() != Some(ReservedChar::Tab) |
888 | && c.get_char() != Some(ReservedChar::SemiColon) |
889 | }) |
890 | }) |
891 | .to_string(); |
892 | assert_eq!(result, expected); |
893 | } |
894 | |
895 | #[test ] |
896 | fn name_generator() { |
897 | let s: String = std::iter::repeat(elt:'a' ).take(36).collect::<String>(); |
898 | // We need to generate enough long strings to reach the point that the name generator |
899 | // generates names with 3 characters. |
900 | let s: Vec = stdimpl Iterator ::iter::repeat(elt:s) |
901 | .take(20000) |
902 | .enumerate() |
903 | .map(|(pos: usize, s: String)| format!(" {}{}" , s, pos)) |
904 | .collect::<Vec<_>>(); |
905 | let source: String = format!( |
906 | "var x = [ {}];" , |
907 | s.iter() |
908 | .map(|s| format!(" \"{0}\", \"{0}\"" , s)) |
909 | .collect::<Vec<_>>() |
910 | .join("," ) |
911 | ); |
912 | let result: String = simple_minifyTokens<'_>(&source) |
913 | .apply(crate::js::clean_tokens) |
914 | .apply(func:aggregate_strings) |
915 | .to_string(); |
916 | assert!(result.find(",r_aaa=" ).is_some()); |
917 | assert!(result.find(",r_ab=" ).unwrap() < result.find(",r_ba=" ).unwrap()); |
918 | } |
919 | |
920 | #[test ] |
921 | fn simple_quote() { |
922 | let source: &str = r#"var x = "\\";"# ; |
923 | let expected_result: &str = r#"var x="\\""# ; |
924 | assert_eq!(minify(source).to_string(), expected_result); |
925 | } |
926 | |
927 | #[test ] |
928 | fn js_minify_test() { |
929 | let source = r##" |
930 | var foo = "something"; |
931 | |
932 | var another_var = 2348323; |
933 | |
934 | // who doesn't like comments? |
935 | /* and even longer comments? |
936 | |
937 | like |
938 | on |
939 | a |
940 | lot |
941 | of |
942 | lines! |
943 | |
944 | Fun! |
945 | */ |
946 | function far_away(x, y) { |
947 | var x2 = x + 4; |
948 | return x * x2 + y; |
949 | } |
950 | |
951 | // this call is useless |
952 | far_away(another_var, 12); |
953 | // this call is useless too |
954 | far_away(another_var, 12); |
955 | "## ; |
956 | |
957 | let expected_result = "var foo= \"something \";var another_var=2348323;function far_away(x,y){\ |
958 | var x2=x+4;return x*x2+y}far_away(another_var,12);far_away(another_var,\ |
959 | 12)" ; |
960 | assert_eq!(minify(source).to_string(), expected_result); |
961 | } |
962 | |
963 | #[test ] |
964 | fn another_js_test() { |
965 | let source = r#" |
966 | /*! let's keep this license |
967 | * |
968 | * because everyone likes licenses! |
969 | * |
970 | * right? |
971 | */ |
972 | |
973 | function forEach(data, func) { |
974 | for (var i = 0; i < data.length; ++i) { |
975 | func(data[i]); |
976 | } |
977 | } |
978 | |
979 | forEach([0, 1, 2, 3, 4, |
980 | 5, 6, 7, 8, 9], function (x) { |
981 | console.log(x); |
982 | }); |
983 | // I think we're done? |
984 | console.log('done!'); |
985 | "# ; |
986 | |
987 | let expected_result = r#"/*! let's keep this license |
988 | * |
989 | * because everyone likes licenses! |
990 | * |
991 | * right? |
992 | */function forEach(data,func){for(var i=0;i<data.length;++i){func(data[i])}}forEach([0,1,2,3,4,5,6,7,8,9],function(x){console.log(x)});console.log('done!')"# ; |
993 | assert_eq!(minify(source).to_string(), expected_result); |
994 | } |
995 | |
996 | #[test ] |
997 | fn comment_issue() { |
998 | let source: &str = r#" |
999 | search_input.onchange = function(e) { |
1000 | // Do NOT e.preventDefault() here. It will prevent pasting. |
1001 | clearTimeout(searchTimeout); |
1002 | // zero-timeout necessary here because at the time of event handler execution the |
1003 | // pasted content is not in the input field yet. Shouldn’t make any difference for |
1004 | // change, though. |
1005 | setTimeout(search, 0); |
1006 | }; |
1007 | "# ; |
1008 | let expected_result: &str = "search_input.onchange=function(e){clearTimeout(searchTimeout);\ |
1009 | setTimeout(search,0)}" ; |
1010 | assert_eq!(minify(source).to_string(), expected_result); |
1011 | } |
1012 | |
1013 | #[test ] |
1014 | fn missing_whitespace() { |
1015 | let source: &str = r#" |
1016 | for (var entry in results) { |
1017 | if (results.hasOwnProperty(entry)) { |
1018 | ar.push(results[entry]); |
1019 | } |
1020 | }"# ; |
1021 | let expected_result: &str = "for(var entry in results){if(results.hasOwnProperty(entry)){\ |
1022 | ar.push(results[entry])}}" ; |
1023 | assert_eq!(minify(source).to_string(), expected_result); |
1024 | } |
1025 | |
1026 | #[test ] |
1027 | fn weird_regex_issue() { |
1028 | let source: &str = r#" |
1029 | val = val.replace(/\_/g, ""); |
1030 | |
1031 | var valGenerics = extractGenerics(val);"# ; |
1032 | let expected_result: &str = "val=val.replace(/ \\_/g, \"\");var valGenerics=extractGenerics(val)" ; |
1033 | assert_eq!(minify(source).to_string(), expected_result); |
1034 | } |
1035 | |
1036 | #[test ] |
1037 | fn keep_space() { |
1038 | fn inner_double_checks(source: &str, expected: &str) { |
1039 | assert_eq!(minify(source).to_string(), expected); |
1040 | let s = minify(source); |
1041 | let mut out: Vec<u8> = Vec::new(); |
1042 | s.write(&mut out).unwrap(); |
1043 | assert_eq!(String::from_utf8(out).unwrap(), expected); |
1044 | } |
1045 | |
1046 | inner_double_checks("return 12;return x;" , "return 12;return x" ); |
1047 | inner_double_checks("t in e" , "t in e" ); |
1048 | inner_double_checks("t + 1 in e" , "t+1 in e" ); |
1049 | inner_double_checks("t - 1 in e" , "t-1 in e" ); |
1050 | inner_double_checks("'a' in e" , "'a'in e" ); |
1051 | inner_double_checks("/a/g in e" , "/a/g in e" ); |
1052 | inner_double_checks("/a/i in e" , "/a/i in e" ); |
1053 | |
1054 | inner_double_checks("t instanceof e" , "t instanceof e" ); |
1055 | inner_double_checks("t + 1 instanceof e" , "t+1 instanceof e" ); |
1056 | inner_double_checks("t - 1 instanceof e" , "t-1 instanceof e" ); |
1057 | inner_double_checks("'a' instanceof e" , "'a'instanceof e" ); |
1058 | inner_double_checks("/a/g instanceof e" , "/a/g instanceof e" ); |
1059 | inner_double_checks("/a/i instanceof e" , "/a/i instanceof e" ); |
1060 | |
1061 | inner_double_checks("function foo() { let x = 12; }" , "function foo(){let x=12}" ); |
1062 | inner_double_checks( |
1063 | r#""use strict"; |
1064 | |
1065 | (function() { |
1066 | const itemTypes = [ |
1067 | "mod", |
1068 | "externcrate", |
1069 | "import", |
1070 | "struct", |
1071 | ]; |
1072 | const TY_PRIMITIVE = itemTypes; |
1073 | function hasOwnPropertyRustdoc() {} |
1074 | })();"# , |
1075 | " \"use strict \";(function(){const itemTypes=[ \"mod \", \"externcrate \", \"import \", \"struct \"\ |
1076 | ,];const TY_PRIMITIVE=itemTypes;function hasOwnPropertyRustdoc(){}})()" , |
1077 | ); |
1078 | } |
1079 | |
1080 | #[test ] |
1081 | fn test_remove_extra_whitespace_before_typeof() { |
1082 | let source: &str = "var x = typeof 'foo';var y = typeof x;case typeof 'foo': 'bla'" ; |
1083 | |
1084 | let expected_result: &str = "var x=typeof'foo';var y=typeof x;case typeof'foo':'bla'" ; |
1085 | assert_eq!(minify(source).to_string(), expected_result); |
1086 | } |
1087 | |
1088 | #[test ] |
1089 | fn test_remove_extra_whitespace_before_in() { |
1090 | let source: &str = r#"if ("key" in ev && typeof ev) { return true; } |
1091 | if (x in ev && typeof ev) { return true; } |
1092 | if (true in ev) { return true; }"# ; |
1093 | |
1094 | let expected_result: &str = r#"if("key"in ev&&typeof ev){return true}if(x in ev&&typeof ev){return true}if(true in ev){return true}"# ; |
1095 | assert_eq!(minify(source).to_string(), expected_result); |
1096 | } |
1097 | |
1098 | #[test ] |
1099 | fn test_remove_extra_whitespace_before_operator() { |
1100 | let source: &str = "( x ) / 2; x / y;x /= y" ; |
1101 | |
1102 | let expected_result: &str = "(x)/2;x/y;x/=y" ; |
1103 | assert_eq!(minify(source).to_string(), expected_result); |
1104 | } |
1105 | |
1106 | #[test ] |
1107 | fn check_regex_syntax() { |
1108 | let source: &str = "console.log(/MSIE|Trident|Edge/.test(window.navigator.userAgent));" ; |
1109 | let expected: &str = "console.log(/MSIE|Trident|Edge/.test(window.navigator.userAgent))" ; |
1110 | assert_eq!(minify(source).to_string(), expected); |
1111 | } |
1112 | |
1113 | #[test ] |
1114 | fn minify_minified() { |
1115 | let source: &str = "function (i, n, a) { i[n].type.replace(/ *;(.| \\s)*/, \"\")===t&&a.push(i[n].MathJax.elementJax);return a}" ; |
1116 | let expected: &str = "function(i,n,a){i[n].type.replace(/ *;(.| \\s)*/, \"\")===t&&a.push(i[n].MathJax.elementJax);return a}" ; |
1117 | assert_eq!(minify(source).to_string(), expected); |
1118 | } |
1119 | |
1120 | #[test ] |
1121 | fn check_string() { |
1122 | let source: &str = r###" |
1123 | const a = 123; |
1124 | const b = "123"; |
1125 | const c = `the number is ${a} <-- note the spaces here`; |
1126 | const d = ` ${a} ${b} `; |
1127 | "### ; |
1128 | let expected: &str = "const a=123;const b= \"123 \";const c=`the number is ${a} <-- note the spaces \ |
1129 | here`;const d=` ${a} ${b} `" ; |
1130 | assert_eq!(minify(source).to_string(), expected); |
1131 | } |
1132 | |
1133 | // TODO: requires AST to fix this issue! |
1134 | /*#[test] |
1135 | fn no_semi_colon() { |
1136 | let source = r#" |
1137 | console.log(1) |
1138 | console.log(2) |
1139 | var x = 12; |
1140 | "#; |
1141 | let expected_result = r#"console.log(1);console.log(2);var x=12;"#; |
1142 | assert_eq!(minify(source).to_string(), expected_result); |
1143 | }*/ |
1144 | |
1145 | // TODO: requires AST to fix this issue! |
1146 | /*#[test] |
1147 | fn correct_replace_for_backline() { |
1148 | let source = r#" |
1149 | function foo() { |
1150 | return |
1151 | 12; |
1152 | } |
1153 | "#; |
1154 | let expected_result = r#"function foo(){return 12;}"#; |
1155 | assert_eq!(minify(source).to_string(), expected_result); |
1156 | }*/ |
1157 | |