1//! Module providing interface for running tests in the console.
2
3use std::fs::File;
4use std::io;
5use std::io::prelude::Write;
6use std::time::Instant;
7
8use super::{
9 bench::fmt_bench_samples,
10 cli::TestOpts,
11 event::{CompletedTest, TestEvent},
12 filter_tests,
13 formatters::{JsonFormatter, JunitFormatter, OutputFormatter, PrettyFormatter, TerseFormatter},
14 helpers::{concurrency::get_concurrency, metrics::MetricMap},
15 options::{Options, OutputFormat},
16 run_tests, term,
17 test_result::TestResult,
18 time::{TestExecTime, TestSuiteExecTime},
19 types::{NamePadding, TestDesc, TestDescAndFn},
20};
21
22/// Generic wrapper over stdout.
23pub enum OutputLocation<T> {
24 Pretty(Box<term::StdoutTerminal>),
25 Raw(T),
26}
27
28impl<T: Write> Write for OutputLocation<T> {
29 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
30 match *self {
31 OutputLocation::Pretty(ref mut term: &mut {unknown}) => term.write(buf),
32 OutputLocation::Raw(ref mut stdout: &mut T) => stdout.write(buf),
33 }
34 }
35
36 fn flush(&mut self) -> io::Result<()> {
37 match *self {
38 OutputLocation::Pretty(ref mut term: &mut {unknown}) => term.flush(),
39 OutputLocation::Raw(ref mut stdout: &mut T) => stdout.flush(),
40 }
41 }
42}
43
44pub struct ConsoleTestDiscoveryState {
45 pub log_out: Option<File>,
46 pub tests: usize,
47 pub benchmarks: usize,
48 pub ignored: usize,
49 pub options: Options,
50}
51
52impl ConsoleTestDiscoveryState {
53 pub fn new(opts: &TestOpts) -> io::Result<ConsoleTestDiscoveryState> {
54 let log_out = match opts.logfile {
55 Some(ref path) => Some(File::create(path)?),
56 None => None,
57 };
58
59 Ok(ConsoleTestDiscoveryState {
60 log_out,
61 tests: 0,
62 benchmarks: 0,
63 ignored: 0,
64 options: opts.options,
65 })
66 }
67
68 pub fn write_log<F, S>(&mut self, msg: F) -> io::Result<()>
69 where
70 S: AsRef<str>,
71 F: FnOnce() -> S,
72 {
73 match self.log_out {
74 None => Ok(()),
75 Some(ref mut o) => {
76 let msg = msg();
77 let msg = msg.as_ref();
78 o.write_all(msg.as_bytes())
79 }
80 }
81 }
82}
83
84pub struct ConsoleTestState {
85 pub log_out: Option<File>,
86 pub total: usize,
87 pub passed: usize,
88 pub failed: usize,
89 pub ignored: usize,
90 pub filtered_out: usize,
91 pub measured: usize,
92 pub exec_time: Option<TestSuiteExecTime>,
93 pub metrics: MetricMap,
94 pub failures: Vec<(TestDesc, Vec<u8>)>,
95 pub not_failures: Vec<(TestDesc, Vec<u8>)>,
96 pub ignores: Vec<(TestDesc, Vec<u8>)>,
97 pub time_failures: Vec<(TestDesc, Vec<u8>)>,
98 pub options: Options,
99}
100
101impl ConsoleTestState {
102 pub fn new(opts: &TestOpts) -> io::Result<ConsoleTestState> {
103 let log_out = match opts.logfile {
104 Some(ref path) => Some(File::create(path)?),
105 None => None,
106 };
107
108 Ok(ConsoleTestState {
109 log_out,
110 total: 0,
111 passed: 0,
112 failed: 0,
113 ignored: 0,
114 filtered_out: 0,
115 measured: 0,
116 exec_time: None,
117 metrics: MetricMap::new(),
118 failures: Vec::new(),
119 not_failures: Vec::new(),
120 ignores: Vec::new(),
121 time_failures: Vec::new(),
122 options: opts.options,
123 })
124 }
125
126 pub fn write_log<F, S>(&mut self, msg: F) -> io::Result<()>
127 where
128 S: AsRef<str>,
129 F: FnOnce() -> S,
130 {
131 match self.log_out {
132 None => Ok(()),
133 Some(ref mut o) => {
134 let msg = msg();
135 let msg = msg.as_ref();
136 o.write_all(msg.as_bytes())
137 }
138 }
139 }
140
141 pub fn write_log_result(
142 &mut self,
143 test: &TestDesc,
144 result: &TestResult,
145 exec_time: Option<&TestExecTime>,
146 ) -> io::Result<()> {
147 self.write_log(|| {
148 let TestDesc { name, ignore_message, .. } = test;
149 format!(
150 "{} {}",
151 match *result {
152 TestResult::TrOk => "ok".to_owned(),
153 TestResult::TrFailed => "failed".to_owned(),
154 TestResult::TrFailedMsg(ref msg) => format!("failed: {msg}"),
155 TestResult::TrIgnored => {
156 if let Some(msg) = ignore_message {
157 format!("ignored: {msg}")
158 } else {
159 "ignored".to_owned()
160 }
161 }
162 TestResult::TrBench(ref bs) => fmt_bench_samples(bs),
163 TestResult::TrTimedFail => "failed (time limit exceeded)".to_owned(),
164 },
165 name,
166 )
167 })?;
168 if let Some(exec_time) = exec_time {
169 self.write_log(|| format!(" <{exec_time}>"))?;
170 }
171 self.write_log(|| "\n")
172 }
173
174 fn current_test_count(&self) -> usize {
175 self.passed + self.failed + self.ignored + self.measured
176 }
177}
178
179// List the tests to console, and optionally to logfile. Filters are honored.
180pub fn list_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> io::Result<()> {
181 let output = match term::stdout() {
182 None => OutputLocation::Raw(io::stdout().lock()),
183 Some(t) => OutputLocation::Pretty(t),
184 };
185
186 let mut out: Box<dyn OutputFormatter> = match opts.format {
187 OutputFormat::Pretty | OutputFormat::Junit => {
188 Box::new(PrettyFormatter::new(output, false, 0, false, None))
189 }
190 OutputFormat::Terse => Box::new(TerseFormatter::new(output, false, 0, false)),
191 OutputFormat::Json => Box::new(JsonFormatter::new(output)),
192 };
193 let mut st = ConsoleTestDiscoveryState::new(opts)?;
194
195 out.write_discovery_start()?;
196 for test in filter_tests(opts, tests).into_iter() {
197 use crate::TestFn::*;
198
199 let TestDescAndFn { desc, testfn } = test;
200
201 let fntype = match testfn {
202 StaticTestFn(..) | DynTestFn(..) | StaticBenchAsTestFn(..) | DynBenchAsTestFn(..) => {
203 st.tests += 1;
204 "test"
205 }
206 StaticBenchFn(..) | DynBenchFn(..) => {
207 st.benchmarks += 1;
208 "benchmark"
209 }
210 };
211
212 st.ignored += if desc.ignore { 1 } else { 0 };
213
214 out.write_test_discovered(&desc, fntype)?;
215 st.write_log(|| format!("{fntype} {}\n", desc.name))?;
216 }
217
218 out.write_discovery_finish(&st)
219}
220
221// Updates `ConsoleTestState` depending on result of the test execution.
222fn handle_test_result(st: &mut ConsoleTestState, completed_test: CompletedTest) {
223 let test = completed_test.desc;
224 let stdout = completed_test.stdout;
225 match completed_test.result {
226 TestResult::TrOk => {
227 st.passed += 1;
228 st.not_failures.push((test, stdout));
229 }
230 TestResult::TrIgnored => {
231 st.ignored += 1;
232 st.ignores.push((test, stdout));
233 }
234 TestResult::TrBench(bs) => {
235 st.metrics.insert_metric(
236 test.name.as_slice(),
237 bs.ns_iter_summ.median,
238 bs.ns_iter_summ.max - bs.ns_iter_summ.min,
239 );
240 st.measured += 1
241 }
242 TestResult::TrFailed => {
243 st.failed += 1;
244 st.failures.push((test, stdout));
245 }
246 TestResult::TrFailedMsg(msg) => {
247 st.failed += 1;
248 let mut stdout = stdout;
249 stdout.extend_from_slice(format!("note: {msg}").as_bytes());
250 st.failures.push((test, stdout));
251 }
252 TestResult::TrTimedFail => {
253 st.failed += 1;
254 st.time_failures.push((test, stdout));
255 }
256 }
257}
258
259// Handler for events that occur during test execution.
260// It is provided as a callback to the `run_tests` function.
261fn on_test_event(
262 event: &TestEvent,
263 st: &mut ConsoleTestState,
264 out: &mut dyn OutputFormatter,
265) -> io::Result<()> {
266 match (*event).clone() {
267 TestEvent::TeFiltered(filtered_tests, shuffle_seed) => {
268 st.total = filtered_tests;
269 out.write_run_start(filtered_tests, shuffle_seed)?;
270 }
271 TestEvent::TeFilteredOut(filtered_out) => {
272 st.filtered_out = filtered_out;
273 }
274 TestEvent::TeWait(ref test) => out.write_test_start(test)?,
275 TestEvent::TeTimeout(ref test) => out.write_timeout(test)?,
276 TestEvent::TeResult(completed_test) => {
277 let test = &completed_test.desc;
278 let result = &completed_test.result;
279 let exec_time = &completed_test.exec_time;
280 let stdout = &completed_test.stdout;
281
282 st.write_log_result(test, result, exec_time.as_ref())?;
283 out.write_result(test, result, exec_time.as_ref(), stdout, st)?;
284 handle_test_result(st, completed_test);
285 }
286 }
287
288 Ok(())
289}
290
291/// A simple console test runner.
292/// Runs provided tests reporting process and results to the stdout.
293pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> io::Result<bool> {
294 let output = match term::stdout() {
295 None => OutputLocation::Raw(io::stdout()),
296 Some(t) => OutputLocation::Pretty(t),
297 };
298
299 let max_name_len = tests
300 .iter()
301 .max_by_key(|t| len_if_padded(t))
302 .map(|t| t.desc.name.as_slice().len())
303 .unwrap_or(0);
304
305 let is_multithreaded = opts.test_threads.unwrap_or_else(get_concurrency) > 1;
306
307 let mut out: Box<dyn OutputFormatter> = match opts.format {
308 OutputFormat::Pretty => Box::new(PrettyFormatter::new(
309 output,
310 opts.use_color(),
311 max_name_len,
312 is_multithreaded,
313 opts.time_options,
314 )),
315 OutputFormat::Terse => {
316 Box::new(TerseFormatter::new(output, opts.use_color(), max_name_len, is_multithreaded))
317 }
318 OutputFormat::Json => Box::new(JsonFormatter::new(output)),
319 OutputFormat::Junit => Box::new(JunitFormatter::new(output)),
320 };
321 let mut st = ConsoleTestState::new(opts)?;
322
323 // Prevent the usage of `Instant` in some cases:
324 // - It's currently not supported for wasm targets.
325 // - We disable it for miri because it's not available when isolation is enabled.
326 let is_instant_supported =
327 !cfg!(target_family = "wasm") && !cfg!(target_os = "zkvm") && !cfg!(miri);
328
329 let start_time = is_instant_supported.then(Instant::now);
330 run_tests(opts, tests, |x| on_test_event(&x, &mut st, &mut *out))?;
331 st.exec_time = start_time.map(|t| TestSuiteExecTime(t.elapsed()));
332
333 assert!(opts.fail_fast || st.current_test_count() == st.total);
334
335 out.write_run_finish(&st)
336}
337
338// Calculates padding for given test description.
339fn len_if_padded(t: &TestDescAndFn) -> usize {
340 match t.testfn.padding() {
341 NamePadding::PadNone => 0,
342 NamePadding::PadOnRight => t.desc.name.as_slice().len(),
343 }
344}
345