1#[cfg(feature = "plotters")]
2use criterion::SamplingMode;
3use criterion::{
4 criterion_group, criterion_main, profiler::Profiler, BatchSize, BenchmarkId, Criterion,
5};
6use serde_json::value::Value;
7use std::cell::{Cell, RefCell};
8use std::cmp::max;
9use std::fs::File;
10use std::path::{Path, PathBuf};
11use std::rc::Rc;
12use std::time::{Duration, SystemTime};
13use tempfile::{tempdir, TempDir};
14use walkdir::WalkDir;
15
16/*
17 * Please note that these tests are not complete examples of how to use
18 * Criterion.rs. See the benches folder for actual examples.
19 */
20fn temp_dir() -> TempDir {
21 tempdir().unwrap()
22}
23
24// Configure a Criterion struct to perform really fast benchmarks. This is not
25// recommended for real benchmarking, only for testing.
26fn short_benchmark(dir: &TempDir) -> Criterion {
27 Criterion::default()
28 .output_directory(dir.path())
29 .warm_up_time(Duration::from_millis(250))
30 .measurement_time(Duration::from_millis(500))
31 .nresamples(2000)
32}
33
34#[derive(Clone)]
35struct Counter {
36 counter: Rc<RefCell<usize>>,
37}
38impl Counter {
39 fn count(&self) {
40 *(*self.counter).borrow_mut() += 1;
41 }
42
43 fn read(&self) -> usize {
44 *(*self.counter).borrow()
45 }
46}
47impl Default for Counter {
48 fn default() -> Counter {
49 Counter {
50 counter: Rc::new(RefCell::new(0)),
51 }
52 }
53}
54
55fn verify_file(dir: &PathBuf, path: &str) -> PathBuf {
56 let full_path = dir.join(path);
57 assert!(
58 full_path.is_file(),
59 "File {:?} does not exist or is not a file",
60 full_path
61 );
62 let metadata = full_path.metadata().unwrap();
63 assert!(metadata.len() > 0);
64 full_path
65}
66
67fn verify_json(dir: &PathBuf, path: &str) {
68 let full_path = verify_file(dir, path);
69 let f = File::open(full_path).unwrap();
70 serde_json::from_reader::<File, Value>(f).unwrap();
71}
72
73#[cfg(feature = "html_reports")]
74fn verify_svg(dir: &PathBuf, path: &str) {
75 verify_file(dir, path);
76}
77
78#[cfg(feature = "html_reports")]
79fn verify_html(dir: &PathBuf, path: &str) {
80 verify_file(dir, path);
81}
82
83fn verify_stats(dir: &PathBuf, baseline: &str) {
84 verify_json(dir, &format!("{}/estimates.json", baseline));
85 verify_json(dir, &format!("{}/sample.json", baseline));
86 verify_json(dir, &format!("{}/tukey.json", baseline));
87 verify_json(dir, &format!("{}/benchmark.json", baseline));
88 #[cfg(feature = "csv_output")]
89 verify_file(&dir, &format!("{}/raw.csv", baseline));
90}
91
92fn verify_not_exists(dir: &PathBuf, path: &str) {
93 assert!(!dir.join(path).exists());
94}
95
96fn latest_modified(dir: &PathBuf) -> SystemTime {
97 let mut newest_update: Option<SystemTime> = None;
98 for entry in WalkDir::new(dir) {
99 let entry = entry.unwrap();
100 let modified = entry.metadata().unwrap().modified().unwrap();
101 newest_update = match newest_update {
102 Some(latest) => Some(max(latest, modified)),
103 None => Some(modified),
104 };
105 }
106
107 newest_update.expect("failed to find a single time in directory")
108}
109
110#[test]
111fn test_creates_directory() {
112 let dir = temp_dir();
113 short_benchmark(&dir).bench_function("test_creates_directory", |b| b.iter(|| 10));
114 assert!(dir.path().join("test_creates_directory").is_dir());
115}
116
117#[test]
118fn test_without_plots() {
119 let dir = temp_dir();
120 short_benchmark(&dir)
121 .without_plots()
122 .bench_function("test_without_plots", |b| b.iter(|| 10));
123
124 for entry in WalkDir::new(dir.path().join("test_without_plots")) {
125 let entry = entry.ok();
126 let is_svg = entry
127 .as_ref()
128 .and_then(|entry| entry.path().extension())
129 .and_then(|ext| ext.to_str())
130 .map(|ext| ext == "svg")
131 .unwrap_or(false);
132 assert!(
133 !is_svg,
134 "Found SVG file ({:?}) in output directory with plots disabled",
135 entry.unwrap().file_name()
136 );
137 }
138}
139
140#[test]
141fn test_save_baseline() {
142 let dir = temp_dir();
143 println!("tmp directory is {:?}", dir.path());
144 short_benchmark(&dir)
145 .save_baseline("some-baseline".to_owned())
146 .bench_function("test_save_baseline", |b| b.iter(|| 10));
147
148 let dir = dir.path().join("test_save_baseline");
149 verify_stats(&dir, "some-baseline");
150
151 verify_not_exists(&dir, "base");
152}
153
154#[test]
155fn test_retain_baseline() {
156 // Initial benchmark to populate
157 let dir = temp_dir();
158 short_benchmark(&dir)
159 .save_baseline("some-baseline".to_owned())
160 .bench_function("test_retain_baseline", |b| b.iter(|| 10));
161
162 let pre_modified = latest_modified(&dir.path().join("test_retain_baseline/some-baseline"));
163
164 short_benchmark(&dir)
165 .retain_baseline("some-baseline".to_owned(), true)
166 .bench_function("test_retain_baseline", |b| b.iter(|| 10));
167
168 let post_modified = latest_modified(&dir.path().join("test_retain_baseline/some-baseline"));
169
170 assert_eq!(pre_modified, post_modified, "baseline modified by retain");
171}
172
173#[test]
174#[should_panic(expected = "Baseline 'some-baseline' must exist before comparison is allowed")]
175fn test_compare_baseline_strict_panics_when_missing_baseline() {
176 let dir = temp_dir();
177 short_benchmark(&dir)
178 .retain_baseline("some-baseline".to_owned(), true)
179 .bench_function("test_compare_baseline", |b| b.iter(|| 10));
180}
181
182#[test]
183fn test_compare_baseline_lenient_when_missing_baseline() {
184 let dir = temp_dir();
185 short_benchmark(&dir)
186 .retain_baseline("some-baseline".to_owned(), false)
187 .bench_function("test_compare_baseline", |b| b.iter(|| 10));
188}
189
190#[test]
191fn test_sample_size() {
192 let dir = temp_dir();
193 let counter = Counter::default();
194
195 let clone = counter.clone();
196 short_benchmark(&dir)
197 .sample_size(50)
198 .bench_function("test_sample_size", move |b| {
199 clone.count();
200 b.iter(|| 10)
201 });
202
203 // This function will be called more than sample_size times because of the
204 // warmup.
205 assert!(counter.read() > 50);
206}
207
208#[test]
209fn test_warmup_time() {
210 let dir = temp_dir();
211 let counter1 = Counter::default();
212
213 let clone = counter1.clone();
214 short_benchmark(&dir)
215 .warm_up_time(Duration::from_millis(100))
216 .bench_function("test_warmup_time_1", move |b| {
217 clone.count();
218 b.iter(|| 10)
219 });
220
221 let counter2 = Counter::default();
222 let clone = counter2.clone();
223 short_benchmark(&dir)
224 .warm_up_time(Duration::from_millis(2000))
225 .bench_function("test_warmup_time_2", move |b| {
226 clone.count();
227 b.iter(|| 10)
228 });
229
230 assert!(counter1.read() < counter2.read());
231}
232
233#[test]
234fn test_measurement_time() {
235 let dir = temp_dir();
236 let counter1 = Counter::default();
237
238 let clone = counter1.clone();
239 short_benchmark(&dir)
240 .measurement_time(Duration::from_millis(100))
241 .bench_function("test_meas_time_1", move |b| b.iter(|| clone.count()));
242
243 let counter2 = Counter::default();
244 let clone = counter2.clone();
245 short_benchmark(&dir)
246 .measurement_time(Duration::from_millis(2000))
247 .bench_function("test_meas_time_2", move |b| b.iter(|| clone.count()));
248
249 assert!(counter1.read() < counter2.read());
250}
251
252#[test]
253fn test_bench_function() {
254 let dir = temp_dir();
255 short_benchmark(&dir).bench_function("test_bench_function", move |b| b.iter(|| 10));
256}
257
258#[test]
259fn test_filtering() {
260 let dir = temp_dir();
261 let counter = Counter::default();
262 let clone = counter.clone();
263
264 short_benchmark(&dir)
265 .with_filter("Foo")
266 .bench_function("test_filtering", move |b| b.iter(|| clone.count()));
267
268 assert_eq!(counter.read(), 0);
269 assert!(!dir.path().join("test_filtering").is_dir());
270}
271
272#[test]
273fn test_timing_loops() {
274 let dir = temp_dir();
275 let mut c = short_benchmark(&dir);
276 let mut group = c.benchmark_group("test_timing_loops");
277 group.bench_function("iter_with_setup", |b| {
278 b.iter_with_setup(|| vec![10], |v| v[0])
279 });
280 group.bench_function("iter_with_large_setup", |b| {
281 b.iter_batched(|| vec![10], |v| v[0], BatchSize::NumBatches(1))
282 });
283 group.bench_function("iter_with_large_drop", |b| {
284 b.iter_with_large_drop(|| vec![10; 100])
285 });
286 group.bench_function("iter_batched_small", |b| {
287 b.iter_batched(|| vec![10], |v| v[0], BatchSize::SmallInput)
288 });
289 group.bench_function("iter_batched_large", |b| {
290 b.iter_batched(|| vec![10], |v| v[0], BatchSize::LargeInput)
291 });
292 group.bench_function("iter_batched_per_iteration", |b| {
293 b.iter_batched(|| vec![10], |v| v[0], BatchSize::PerIteration)
294 });
295 group.bench_function("iter_batched_one_batch", |b| {
296 b.iter_batched(|| vec![10], |v| v[0], BatchSize::NumBatches(1))
297 });
298 group.bench_function("iter_batched_10_iterations", |b| {
299 b.iter_batched(|| vec![10], |v| v[0], BatchSize::NumIterations(10))
300 });
301 group.bench_function("iter_batched_ref_small", |b| {
302 b.iter_batched_ref(|| vec![10], |v| v[0], BatchSize::SmallInput)
303 });
304 group.bench_function("iter_batched_ref_large", |b| {
305 b.iter_batched_ref(|| vec![10], |v| v[0], BatchSize::LargeInput)
306 });
307 group.bench_function("iter_batched_ref_per_iteration", |b| {
308 b.iter_batched_ref(|| vec![10], |v| v[0], BatchSize::PerIteration)
309 });
310 group.bench_function("iter_batched_ref_one_batch", |b| {
311 b.iter_batched_ref(|| vec![10], |v| v[0], BatchSize::NumBatches(1))
312 });
313 group.bench_function("iter_batched_ref_10_iterations", |b| {
314 b.iter_batched_ref(|| vec![10], |v| v[0], BatchSize::NumIterations(10))
315 });
316}
317
318// Verify that all expected output files are present
319#[cfg(feature = "plotters")]
320#[test]
321fn test_output_files() {
322 let tempdir = temp_dir();
323 // Run benchmarks twice to produce comparisons
324 for _ in 0..2 {
325 let mut c = short_benchmark(&tempdir);
326 let mut group = c.benchmark_group("test_output");
327 group.sampling_mode(SamplingMode::Linear);
328 group.bench_function("output_1", |b| b.iter(|| 10));
329 group.bench_function("output_2", |b| b.iter(|| 20));
330 group.bench_function("output_\\/*\"?", |b| b.iter(|| 30));
331 }
332
333 // For each benchmark, assert that the expected files are present.
334 for x in 0..3 {
335 let dir = if x == 2 {
336 // Check that certain special characters are replaced with underscores
337 tempdir.path().join("test_output/output______")
338 } else {
339 tempdir.path().join(format!("test_output/output_{}", x + 1))
340 };
341
342 verify_stats(&dir, "new");
343 verify_stats(&dir, "base");
344 verify_json(&dir, "change/estimates.json");
345
346 #[cfg(feature = "html_reports")]
347 {
348 verify_svg(&dir, "report/MAD.svg");
349 verify_svg(&dir, "report/mean.svg");
350 verify_svg(&dir, "report/median.svg");
351 verify_svg(&dir, "report/pdf.svg");
352 verify_svg(&dir, "report/regression.svg");
353 verify_svg(&dir, "report/SD.svg");
354 verify_svg(&dir, "report/slope.svg");
355 verify_svg(&dir, "report/typical.svg");
356 verify_svg(&dir, "report/both/pdf.svg");
357 verify_svg(&dir, "report/both/regression.svg");
358 verify_svg(&dir, "report/change/mean.svg");
359 verify_svg(&dir, "report/change/median.svg");
360 verify_svg(&dir, "report/change/t-test.svg");
361
362 verify_svg(&dir, "report/pdf_small.svg");
363 verify_svg(&dir, "report/regression_small.svg");
364 verify_svg(&dir, "report/relative_pdf_small.svg");
365 verify_svg(&dir, "report/relative_regression_small.svg");
366 verify_html(&dir, "report/index.html");
367 }
368 }
369
370 #[cfg(feature = "html_reports")]
371 {
372 // Check for overall report files
373 let dir = tempdir.path().join("test_output");
374
375 verify_svg(&dir, "report/violin.svg");
376 verify_html(&dir, "report/index.html");
377 }
378
379 // Run the final summary process and check for the report that produces
380 short_benchmark(&tempdir).final_summary();
381
382 #[cfg(feature = "html_reports")]
383 {
384 let dir = tempdir.path().to_owned();
385 verify_html(&dir, "report/index.html");
386 }
387}
388
389#[cfg(feature = "plotters")]
390#[test]
391fn test_output_files_flat_sampling() {
392 let tempdir = temp_dir();
393 // Run benchmark twice to produce comparisons
394 for _ in 0..2 {
395 let mut c = short_benchmark(&tempdir);
396 let mut group = c.benchmark_group("test_output");
397 group.sampling_mode(SamplingMode::Flat);
398 group.bench_function("output_flat", |b| b.iter(|| 10));
399 }
400
401 let dir = tempdir.path().join("test_output/output_flat");
402
403 verify_stats(&dir, "new");
404 verify_stats(&dir, "base");
405 verify_json(&dir, "change/estimates.json");
406
407 #[cfg(feature = "html_reports")]
408 {
409 verify_svg(&dir, "report/MAD.svg");
410 verify_svg(&dir, "report/mean.svg");
411 verify_svg(&dir, "report/median.svg");
412 verify_svg(&dir, "report/pdf.svg");
413 verify_svg(&dir, "report/iteration_times.svg");
414 verify_svg(&dir, "report/SD.svg");
415 verify_svg(&dir, "report/typical.svg");
416 verify_svg(&dir, "report/both/pdf.svg");
417 verify_svg(&dir, "report/both/iteration_times.svg");
418 verify_svg(&dir, "report/change/mean.svg");
419 verify_svg(&dir, "report/change/median.svg");
420 verify_svg(&dir, "report/change/t-test.svg");
421
422 verify_svg(&dir, "report/pdf_small.svg");
423 verify_svg(&dir, "report/iteration_times_small.svg");
424 verify_svg(&dir, "report/relative_pdf_small.svg");
425 verify_svg(&dir, "report/relative_iteration_times_small.svg");
426 verify_html(&dir, "report/index.html");
427 }
428}
429
430#[test]
431#[should_panic(expected = "Benchmark function must call Bencher::iter or related method.")]
432fn test_bench_with_no_iteration_panics() {
433 let dir = temp_dir();
434 short_benchmark(&dir).bench_function("no_iter", |_b| {});
435}
436
437#[test]
438fn test_benchmark_group_with_input() {
439 let dir = temp_dir();
440 let mut c = short_benchmark(&dir);
441 let mut group = c.benchmark_group("Test Group");
442 for x in 0..2 {
443 group.bench_with_input(BenchmarkId::new("Test 1", x), &x, |b, i| b.iter(|| i));
444 group.bench_with_input(BenchmarkId::new("Test 2", x), &x, |b, i| b.iter(|| i));
445 }
446 group.finish();
447}
448
449#[test]
450fn test_benchmark_group_without_input() {
451 let dir = temp_dir();
452 let mut c = short_benchmark(&dir);
453 let mut group = c.benchmark_group("Test Group 2");
454 group.bench_function("Test 1", |b| b.iter(|| 30));
455 group.bench_function("Test 2", |b| b.iter(|| 20));
456 group.finish();
457}
458
459#[test]
460fn test_criterion_doesnt_panic_if_measured_time_is_zero() {
461 let dir = temp_dir();
462 let mut c = short_benchmark(&dir);
463 c.bench_function("zero_time", |bencher| {
464 bencher.iter_custom(|_iters| Duration::new(0, 0))
465 });
466}
467
468mod macros {
469 use super::{criterion_group, criterion_main, Criterion};
470
471 #[test]
472 #[should_panic(expected = "group executed")]
473 fn criterion_main() {
474 fn group() {}
475 fn group2() {
476 panic!("group executed");
477 }
478
479 criterion_main!(group, group2);
480
481 main();
482 }
483
484 #[test]
485 fn criterion_main_trailing_comma() {
486 // make this a compile-only check
487 // as the second logger initialization causes panic
488 #[allow(dead_code)]
489 fn group() {}
490 #[allow(dead_code)]
491 fn group2() {}
492
493 criterion_main!(group, group2,);
494
495 // silence dead_code warning
496 if false {
497 main()
498 }
499 }
500
501 #[test]
502 #[should_panic(expected = "group executed")]
503 fn criterion_group() {
504 fn group(_crit: &mut Criterion) {}
505 fn group2(_crit: &mut Criterion) {
506 panic!("group executed");
507 }
508
509 criterion_group!(test_group, group, group2);
510
511 test_group();
512 }
513
514 #[test]
515 #[should_panic(expected = "group executed")]
516 fn criterion_group_trailing_comma() {
517 fn group(_crit: &mut Criterion) {}
518 fn group2(_crit: &mut Criterion) {
519 panic!("group executed");
520 }
521
522 criterion_group!(test_group, group, group2,);
523
524 test_group();
525 }
526}
527
528struct TestProfiler {
529 started: Rc<Cell<u32>>,
530 stopped: Rc<Cell<u32>>,
531}
532impl Profiler for TestProfiler {
533 fn start_profiling(&mut self, benchmark_id: &str, _benchmark_path: &Path) {
534 assert!(benchmark_id.contains("profile_test"));
535 self.started.set(self.started.get() + 1);
536 }
537 fn stop_profiling(&mut self, benchmark_id: &str, _benchmark_path: &Path) {
538 assert!(benchmark_id.contains("profile_test"));
539 self.stopped.set(self.stopped.get() + 1);
540 }
541}
542
543// Verify that profilers are started and stopped as expected
544#[test]
545fn test_profiler_called() {
546 let started = Rc::new(Cell::new(0u32));
547 let stopped = Rc::new(Cell::new(0u32));
548 let profiler = TestProfiler {
549 started: started.clone(),
550 stopped: stopped.clone(),
551 };
552 let dir = temp_dir();
553 let mut criterion = short_benchmark(&dir)
554 .with_profiler(profiler)
555 .profile_time(Some(Duration::from_secs(1)));
556 criterion.bench_function("profile_test", |b| b.iter(|| 10));
557 assert_eq!(1, started.get());
558 assert_eq!(1, stopped.get());
559}
560