1 | |
2 | #undef NDEBUG |
3 | #include <cassert> |
4 | #include <vector> |
5 | |
6 | #include "../src/check.h" // NOTE: check.h is for internal use only! |
7 | #include "benchmark/benchmark.h" |
8 | |
9 | namespace { |
10 | |
11 | class TestReporter : public benchmark::ConsoleReporter { |
12 | public: |
13 | bool ReportContext(const Context& context) override { |
14 | return ConsoleReporter::ReportContext(context); |
15 | }; |
16 | |
17 | void ReportRuns(const std::vector<Run>& report) override { |
18 | all_runs_.insert(position: all_runs_.end(), first: begin(cont: report), last: end(cont: report)); |
19 | ConsoleReporter::ReportRuns(reports: report); |
20 | } |
21 | |
22 | TestReporter() {} |
23 | ~TestReporter() override {} |
24 | |
25 | mutable std::vector<Run> all_runs_; |
26 | }; |
27 | |
28 | struct TestCase { |
29 | std::string name; |
30 | bool error_occurred; |
31 | std::string error_message; |
32 | |
33 | typedef benchmark::BenchmarkReporter::Run Run; |
34 | |
35 | void CheckRun(Run const& run) const { |
36 | BM_CHECK(name == run.benchmark_name()) |
37 | << "expected " << name << " got " << run.benchmark_name(); |
38 | BM_CHECK_EQ(error_occurred, |
39 | benchmark::internal::SkippedWithError == run.skipped); |
40 | BM_CHECK(error_message == run.skip_message); |
41 | if (error_occurred) { |
42 | // BM_CHECK(run.iterations == 0); |
43 | } else { |
44 | BM_CHECK(run.iterations != 0); |
45 | } |
46 | } |
47 | }; |
48 | |
49 | std::vector<TestCase> ExpectedResults; |
50 | |
51 | int AddCases(const std::string& base_name, |
52 | std::initializer_list<TestCase> const& v) { |
53 | for (auto TC : v) { |
54 | TC.name = base_name + TC.name; |
55 | ExpectedResults.push_back(x: std::move(TC)); |
56 | } |
57 | return 0; |
58 | } |
59 | |
60 | #define CONCAT(x, y) CONCAT2(x, y) |
61 | #define CONCAT2(x, y) x##y |
62 | #define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__) |
63 | |
64 | } // end namespace |
65 | |
66 | void BM_error_no_running(benchmark::State& state) { |
67 | state.SkipWithError(msg: "error message" ); |
68 | } |
69 | BENCHMARK(BM_error_no_running); |
70 | ADD_CASES("BM_error_no_running" , {{"" , true, "error message" }}); |
71 | |
72 | void BM_error_before_running(benchmark::State& state) { |
73 | state.SkipWithError(msg: "error message" ); |
74 | while (state.KeepRunning()) { |
75 | assert(false); |
76 | } |
77 | } |
78 | BENCHMARK(BM_error_before_running); |
79 | ADD_CASES("BM_error_before_running" , {{"" , true, "error message" }}); |
80 | |
81 | void BM_error_before_running_batch(benchmark::State& state) { |
82 | state.SkipWithError(msg: "error message" ); |
83 | while (state.KeepRunningBatch(n: 17)) { |
84 | assert(false); |
85 | } |
86 | } |
87 | BENCHMARK(BM_error_before_running_batch); |
88 | ADD_CASES("BM_error_before_running_batch" , {{"" , true, "error message" }}); |
89 | |
90 | void BM_error_before_running_range_for(benchmark::State& state) { |
91 | state.SkipWithError(msg: "error message" ); |
92 | for (auto _ : state) { |
93 | assert(false); |
94 | } |
95 | } |
96 | BENCHMARK(BM_error_before_running_range_for); |
97 | ADD_CASES("BM_error_before_running_range_for" , {{"" , true, "error message" }}); |
98 | |
99 | void BM_error_during_running(benchmark::State& state) { |
100 | int first_iter = true; |
101 | while (state.KeepRunning()) { |
102 | if (state.range(pos: 0) == 1 && state.thread_index() <= (state.threads() / 2)) { |
103 | assert(first_iter); |
104 | first_iter = false; |
105 | state.SkipWithError(msg: "error message" ); |
106 | } else { |
107 | state.PauseTiming(); |
108 | state.ResumeTiming(); |
109 | } |
110 | } |
111 | } |
112 | BENCHMARK(BM_error_during_running)->Arg(x: 1)->Arg(x: 2)->ThreadRange(min_threads: 1, max_threads: 8); |
113 | ADD_CASES("BM_error_during_running" , {{"/1/threads:1" , true, "error message" }, |
114 | {"/1/threads:2" , true, "error message" }, |
115 | {"/1/threads:4" , true, "error message" }, |
116 | {"/1/threads:8" , true, "error message" }, |
117 | {"/2/threads:1" , false, "" }, |
118 | {"/2/threads:2" , false, "" }, |
119 | {"/2/threads:4" , false, "" }, |
120 | {"/2/threads:8" , false, "" }}); |
121 | |
122 | void BM_error_during_running_ranged_for(benchmark::State& state) { |
123 | assert(state.max_iterations > 3 && "test requires at least a few iterations" ); |
124 | bool first_iter = true; |
125 | // NOTE: Users should not write the for loop explicitly. |
126 | for (auto It = state.begin(), End = state.end(); It != End; ++It) { |
127 | if (state.range(pos: 0) == 1) { |
128 | assert(first_iter); |
129 | first_iter = false; |
130 | (void)first_iter; |
131 | state.SkipWithError(msg: "error message" ); |
132 | // Test the unfortunate but documented behavior that the ranged-for loop |
133 | // doesn't automatically terminate when SkipWithError is set. |
134 | assert(++It != End); |
135 | break; // Required behavior |
136 | } |
137 | } |
138 | } |
139 | BENCHMARK(BM_error_during_running_ranged_for)->Arg(x: 1)->Arg(x: 2)->Iterations(n: 5); |
140 | ADD_CASES("BM_error_during_running_ranged_for" , |
141 | {{"/1/iterations:5" , true, "error message" }, |
142 | {"/2/iterations:5" , false, "" }}); |
143 | |
144 | void BM_error_after_running(benchmark::State& state) { |
145 | for (auto _ : state) { |
146 | auto iterations = double(state.iterations()) * double(state.iterations()); |
147 | benchmark::DoNotOptimize(value&: iterations); |
148 | } |
149 | if (state.thread_index() <= (state.threads() / 2)) |
150 | state.SkipWithError(msg: "error message" ); |
151 | } |
152 | BENCHMARK(BM_error_after_running)->ThreadRange(min_threads: 1, max_threads: 8); |
153 | ADD_CASES("BM_error_after_running" , {{"/threads:1" , true, "error message" }, |
154 | {"/threads:2" , true, "error message" }, |
155 | {"/threads:4" , true, "error message" }, |
156 | {"/threads:8" , true, "error message" }}); |
157 | |
158 | void BM_error_while_paused(benchmark::State& state) { |
159 | bool first_iter = true; |
160 | while (state.KeepRunning()) { |
161 | if (state.range(pos: 0) == 1 && state.thread_index() <= (state.threads() / 2)) { |
162 | assert(first_iter); |
163 | first_iter = false; |
164 | state.PauseTiming(); |
165 | state.SkipWithError(msg: "error message" ); |
166 | } else { |
167 | state.PauseTiming(); |
168 | state.ResumeTiming(); |
169 | } |
170 | } |
171 | } |
172 | BENCHMARK(BM_error_while_paused)->Arg(x: 1)->Arg(x: 2)->ThreadRange(min_threads: 1, max_threads: 8); |
173 | ADD_CASES("BM_error_while_paused" , {{"/1/threads:1" , true, "error message" }, |
174 | {"/1/threads:2" , true, "error message" }, |
175 | {"/1/threads:4" , true, "error message" }, |
176 | {"/1/threads:8" , true, "error message" }, |
177 | {"/2/threads:1" , false, "" }, |
178 | {"/2/threads:2" , false, "" }, |
179 | {"/2/threads:4" , false, "" }, |
180 | {"/2/threads:8" , false, "" }}); |
181 | |
182 | int main(int argc, char* argv[]) { |
183 | benchmark::Initialize(argc: &argc, argv); |
184 | |
185 | TestReporter test_reporter; |
186 | benchmark::RunSpecifiedBenchmarks(display_reporter: &test_reporter); |
187 | |
188 | typedef benchmark::BenchmarkReporter::Run Run; |
189 | auto EB = ExpectedResults.begin(); |
190 | |
191 | for (Run const& run : test_reporter.all_runs_) { |
192 | assert(EB != ExpectedResults.end()); |
193 | EB->CheckRun(run); |
194 | ++EB; |
195 | } |
196 | assert(EB == ExpectedResults.end()); |
197 | |
198 | return 0; |
199 | } |
200 | |