| 1 | // Copyright 2005, Google Inc. |
| 2 | // All rights reserved. |
| 3 | // |
| 4 | // Redistribution and use in source and binary forms, with or without |
| 5 | // modification, are permitted provided that the following conditions are |
| 6 | // met: |
| 7 | // |
| 8 | // * Redistributions of source code must retain the above copyright |
| 9 | // notice, this list of conditions and the following disclaimer. |
| 10 | // * Redistributions in binary form must reproduce the above |
| 11 | // copyright notice, this list of conditions and the following disclaimer |
| 12 | // in the documentation and/or other materials provided with the |
| 13 | // distribution. |
| 14 | // * Neither the name of Google Inc. nor the names of its |
| 15 | // contributors may be used to endorse or promote products derived from |
| 16 | // this software without specific prior written permission. |
| 17 | // |
| 18 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 19 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 20 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 21 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 22 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 23 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 24 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 25 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 26 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 27 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 28 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 29 | |
| 30 | // |
| 31 | // The Google C++ Testing and Mocking Framework (Google Test) |
| 32 | |
| 33 | #include "gtest/gtest.h" |
| 34 | |
| 35 | #include <ctype.h> |
| 36 | #include <stdarg.h> |
| 37 | #include <stdio.h> |
| 38 | #include <stdlib.h> |
| 39 | #include <time.h> |
| 40 | #include <wchar.h> |
| 41 | #include <wctype.h> |
| 42 | |
| 43 | #include <algorithm> |
| 44 | #include <chrono> // NOLINT |
| 45 | #include <cmath> |
| 46 | #include <cstdint> |
| 47 | #include <cstdlib> |
| 48 | #include <cstring> |
| 49 | #include <initializer_list> |
| 50 | #include <iomanip> |
| 51 | #include <ios> |
| 52 | #include <iostream> |
| 53 | #include <iterator> |
| 54 | #include <limits> |
| 55 | #include <list> |
| 56 | #include <map> |
| 57 | #include <ostream> // NOLINT |
| 58 | #include <set> |
| 59 | #include <sstream> |
| 60 | #include <unordered_set> |
| 61 | #include <utility> |
| 62 | #include <vector> |
| 63 | |
| 64 | #include "gtest/gtest-assertion-result.h" |
| 65 | #include "gtest/gtest-spi.h" |
| 66 | #include "gtest/internal/custom/gtest.h" |
| 67 | #include "gtest/internal/gtest-port.h" |
| 68 | |
| 69 | #ifdef GTEST_OS_LINUX |
| 70 | |
| 71 | #include <fcntl.h> // NOLINT |
| 72 | #include <limits.h> // NOLINT |
| 73 | #include <sched.h> // NOLINT |
| 74 | // Declares vsnprintf(). This header is not available on Windows. |
| 75 | #include <strings.h> // NOLINT |
| 76 | #include <sys/mman.h> // NOLINT |
| 77 | #include <sys/time.h> // NOLINT |
| 78 | #include <unistd.h> // NOLINT |
| 79 | |
| 80 | #include <string> |
| 81 | |
| 82 | #elif defined(GTEST_OS_ZOS) |
| 83 | #include <sys/time.h> // NOLINT |
| 84 | |
| 85 | // On z/OS we additionally need strings.h for strcasecmp. |
| 86 | #include <strings.h> // NOLINT |
| 87 | |
| 88 | #elif defined(GTEST_OS_WINDOWS_MOBILE) // We are on Windows CE. |
| 89 | |
| 90 | #include <windows.h> // NOLINT |
| 91 | #undef min |
| 92 | |
| 93 | #elif defined(GTEST_OS_WINDOWS) // We are on Windows proper. |
| 94 | |
| 95 | #include <windows.h> // NOLINT |
| 96 | #undef min |
| 97 | |
| 98 | #ifdef _MSC_VER |
| 99 | #include <crtdbg.h> // NOLINT |
| 100 | #endif |
| 101 | |
| 102 | #include <io.h> // NOLINT |
| 103 | #include <sys/stat.h> // NOLINT |
| 104 | #include <sys/timeb.h> // NOLINT |
| 105 | #include <sys/types.h> // NOLINT |
| 106 | |
| 107 | #ifdef GTEST_OS_WINDOWS_MINGW |
| 108 | #include <sys/time.h> // NOLINT |
| 109 | #endif // GTEST_OS_WINDOWS_MINGW |
| 110 | |
| 111 | #else |
| 112 | |
| 113 | // cpplint thinks that the header is already included, so we want to |
| 114 | // silence it. |
| 115 | #include <sys/time.h> // NOLINT |
| 116 | #include <unistd.h> // NOLINT |
| 117 | |
| 118 | #endif // GTEST_OS_LINUX |
| 119 | |
| 120 | #if GTEST_HAS_EXCEPTIONS |
| 121 | #include <stdexcept> |
| 122 | #endif |
| 123 | |
| 124 | #if GTEST_CAN_STREAM_RESULTS_ |
| 125 | #include <arpa/inet.h> // NOLINT |
| 126 | #include <netdb.h> // NOLINT |
| 127 | #include <sys/socket.h> // NOLINT |
| 128 | #include <sys/types.h> // NOLINT |
| 129 | #endif |
| 130 | |
| 131 | #include "src/gtest-internal-inl.h" |
| 132 | |
| 133 | #ifdef GTEST_OS_WINDOWS |
| 134 | #define vsnprintf _vsnprintf |
| 135 | #endif // GTEST_OS_WINDOWS |
| 136 | |
| 137 | #ifdef GTEST_OS_MAC |
| 138 | #ifndef GTEST_OS_IOS |
| 139 | #include <crt_externs.h> |
| 140 | #endif |
| 141 | #endif |
| 142 | |
| 143 | #ifdef GTEST_HAS_ABSL |
| 144 | #include "absl/container/flat_hash_set.h" |
| 145 | #include "absl/debugging/failure_signal_handler.h" |
| 146 | #include "absl/debugging/stacktrace.h" |
| 147 | #include "absl/debugging/symbolize.h" |
| 148 | #include "absl/flags/parse.h" |
| 149 | #include "absl/flags/usage.h" |
| 150 | #include "absl/strings/str_cat.h" |
| 151 | #include "absl/strings/str_replace.h" |
| 152 | #include "absl/strings/string_view.h" |
| 153 | #include "absl/strings/strip.h" |
| 154 | #endif // GTEST_HAS_ABSL |
| 155 | |
| 156 | // Checks builtin compiler feature |x| while avoiding an extra layer of #ifdefs |
| 157 | // at the callsite. |
| 158 | #if defined(__has_builtin) |
| 159 | #define GTEST_HAS_BUILTIN(x) __has_builtin(x) |
| 160 | #else |
| 161 | #define GTEST_HAS_BUILTIN(x) 0 |
| 162 | #endif // defined(__has_builtin) |
| 163 | |
| 164 | namespace testing { |
| 165 | |
| 166 | using internal::CountIf; |
| 167 | using internal::ForEach; |
| 168 | using internal::GetElementOr; |
| 169 | using internal::Shuffle; |
| 170 | |
| 171 | // Constants. |
| 172 | |
| 173 | // A test whose test suite name or test name matches this filter is |
| 174 | // disabled and not run. |
| 175 | static const char kDisableTestFilter[] = "DISABLED_*:*/DISABLED_*" ; |
| 176 | |
| 177 | // A test suite whose name matches this filter is considered a death |
| 178 | // test suite and will be run before test suites whose name doesn't |
| 179 | // match this filter. |
| 180 | static const char kDeathTestSuiteFilter[] = "*DeathTest:*DeathTest/*" ; |
| 181 | |
| 182 | // A test filter that matches everything. |
| 183 | static const char kUniversalFilter[] = "*" ; |
| 184 | |
| 185 | // The default output format. |
| 186 | static const char kDefaultOutputFormat[] = "xml" ; |
| 187 | // The default output file. |
| 188 | static const char kDefaultOutputFile[] = "test_detail" ; |
| 189 | |
| 190 | // The environment variable name for the test shard index. |
| 191 | static const char kTestShardIndex[] = "GTEST_SHARD_INDEX" ; |
| 192 | // The environment variable name for the total number of test shards. |
| 193 | static const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS" ; |
| 194 | // The environment variable name for the test shard status file. |
| 195 | static const char kTestShardStatusFile[] = "GTEST_SHARD_STATUS_FILE" ; |
| 196 | |
| 197 | namespace internal { |
| 198 | |
| 199 | // The text used in failure messages to indicate the start of the |
| 200 | // stack trace. |
| 201 | const char kStackTraceMarker[] = "\nStack trace:\n" ; |
| 202 | |
| 203 | // g_help_flag is true if and only if the --help flag or an equivalent form |
| 204 | // is specified on the command line. |
| 205 | bool g_help_flag = false; |
| 206 | |
| 207 | #if GTEST_HAS_FILE_SYSTEM |
| 208 | // Utility function to Open File for Writing |
| 209 | static FILE* OpenFileForWriting(const std::string& output_file) { |
| 210 | FILE* fileout = nullptr; |
| 211 | FilePath output_file_path(output_file); |
| 212 | FilePath output_dir(output_file_path.RemoveFileName()); |
| 213 | |
| 214 | if (output_dir.CreateDirectoriesRecursively()) { |
| 215 | fileout = posix::FOpen(path: output_file.c_str(), mode: "w" ); |
| 216 | } |
| 217 | if (fileout == nullptr) { |
| 218 | GTEST_LOG_(FATAL) << "Unable to open file \"" << output_file << "\"" ; |
| 219 | } |
| 220 | return fileout; |
| 221 | } |
| 222 | #endif // GTEST_HAS_FILE_SYSTEM |
| 223 | |
| 224 | } // namespace internal |
| 225 | |
| 226 | // Bazel passes in the argument to '--test_filter' via the TESTBRIDGE_TEST_ONLY |
| 227 | // environment variable. |
| 228 | static const char* GetDefaultFilter() { |
| 229 | const char* const testbridge_test_only = |
| 230 | internal::posix::GetEnv(name: "TESTBRIDGE_TEST_ONLY" ); |
| 231 | if (testbridge_test_only != nullptr) { |
| 232 | return testbridge_test_only; |
| 233 | } |
| 234 | return kUniversalFilter; |
| 235 | } |
| 236 | |
| 237 | // Bazel passes in the argument to '--test_runner_fail_fast' via the |
| 238 | // TESTBRIDGE_TEST_RUNNER_FAIL_FAST environment variable. |
| 239 | static bool GetDefaultFailFast() { |
| 240 | const char* const testbridge_test_runner_fail_fast = |
| 241 | internal::posix::GetEnv(name: "TESTBRIDGE_TEST_RUNNER_FAIL_FAST" ); |
| 242 | if (testbridge_test_runner_fail_fast != nullptr) { |
| 243 | return strcmp(s1: testbridge_test_runner_fail_fast, s2: "1" ) == 0; |
| 244 | } |
| 245 | return false; |
| 246 | } |
| 247 | |
| 248 | } // namespace testing |
| 249 | |
| 250 | GTEST_DEFINE_bool_( |
| 251 | fail_fast, |
| 252 | testing::internal::BoolFromGTestEnv("fail_fast" , |
| 253 | testing::GetDefaultFailFast()), |
| 254 | "True if and only if a test failure should stop further test execution." ); |
| 255 | |
| 256 | GTEST_DEFINE_bool_( |
| 257 | also_run_disabled_tests, |
| 258 | testing::internal::BoolFromGTestEnv("also_run_disabled_tests" , false), |
| 259 | "Run disabled tests too, in addition to the tests normally being run." ); |
| 260 | |
| 261 | GTEST_DEFINE_bool_( |
| 262 | break_on_failure, |
| 263 | testing::internal::BoolFromGTestEnv("break_on_failure" , false), |
| 264 | "True if and only if a failed assertion should be a debugger " |
| 265 | "break-point." ); |
| 266 | |
| 267 | GTEST_DEFINE_bool_(catch_exceptions, |
| 268 | testing::internal::BoolFromGTestEnv("catch_exceptions" , |
| 269 | true), |
| 270 | "True if and only if " GTEST_NAME_ |
| 271 | " should catch exceptions and treat them as test failures." ); |
| 272 | |
| 273 | GTEST_DEFINE_string_( |
| 274 | color, testing::internal::StringFromGTestEnv("color" , "auto" ), |
| 275 | "Whether to use colors in the output. Valid values: yes, no, " |
| 276 | "and auto. 'auto' means to use colors if the output is " |
| 277 | "being sent to a terminal and the TERM environment variable " |
| 278 | "is set to a terminal type that supports colors." ); |
| 279 | |
| 280 | GTEST_DEFINE_string_( |
| 281 | filter, |
| 282 | testing::internal::StringFromGTestEnv("filter" , |
| 283 | testing::GetDefaultFilter()), |
| 284 | "A colon-separated list of glob (not regex) patterns " |
| 285 | "for filtering the tests to run, optionally followed by a " |
| 286 | "'-' and a : separated list of negative patterns (tests to " |
| 287 | "exclude). A test is run if it matches one of the positive " |
| 288 | "patterns and does not match any of the negative patterns." ); |
| 289 | |
| 290 | GTEST_DEFINE_bool_( |
| 291 | install_failure_signal_handler, |
| 292 | testing::internal::BoolFromGTestEnv("install_failure_signal_handler" , |
| 293 | false), |
| 294 | "If true and supported on the current platform, " GTEST_NAME_ |
| 295 | " should " |
| 296 | "install a signal handler that dumps debugging information when fatal " |
| 297 | "signals are raised." ); |
| 298 | |
| 299 | GTEST_DEFINE_bool_(list_tests, false, "List all tests without running them." ); |
| 300 | |
| 301 | // The net priority order after flag processing is thus: |
| 302 | // --gtest_output command line flag |
| 303 | // GTEST_OUTPUT environment variable |
| 304 | // XML_OUTPUT_FILE environment variable |
| 305 | // '' |
| 306 | GTEST_DEFINE_string_( |
| 307 | output, |
| 308 | testing::internal::StringFromGTestEnv( |
| 309 | "output" , testing::internal::OutputFlagAlsoCheckEnvVar().c_str()), |
| 310 | "A format (defaults to \"xml\" but can be specified to be \"json\"), " |
| 311 | "optionally followed by a colon and an output file name or directory. " |
| 312 | "A directory is indicated by a trailing pathname separator. " |
| 313 | "Examples: \"xml:filename.xml\", \"xml::directoryname/\". " |
| 314 | "If a directory is specified, output files will be created " |
| 315 | "within that directory, with file-names based on the test " |
| 316 | "executable's name and, if necessary, made unique by adding " |
| 317 | "digits." ); |
| 318 | |
| 319 | GTEST_DEFINE_bool_( |
| 320 | brief, testing::internal::BoolFromGTestEnv("brief" , false), |
| 321 | "True if only test failures should be displayed in text output." ); |
| 322 | |
| 323 | GTEST_DEFINE_bool_(print_time, |
| 324 | testing::internal::BoolFromGTestEnv("print_time" , true), |
| 325 | "True if and only if " GTEST_NAME_ |
| 326 | " should display elapsed time in text output." ); |
| 327 | |
| 328 | GTEST_DEFINE_bool_(print_utf8, |
| 329 | testing::internal::BoolFromGTestEnv("print_utf8" , true), |
| 330 | "True if and only if " GTEST_NAME_ |
| 331 | " prints UTF8 characters as text." ); |
| 332 | |
| 333 | GTEST_DEFINE_int32_( |
| 334 | random_seed, testing::internal::Int32FromGTestEnv("random_seed" , 0), |
| 335 | "Random number seed to use when shuffling test orders. Must be in range " |
| 336 | "[1, 99999], or 0 to use a seed based on the current time." ); |
| 337 | |
| 338 | GTEST_DEFINE_int32_( |
| 339 | repeat, testing::internal::Int32FromGTestEnv("repeat" , 1), |
| 340 | "How many times to repeat each test. Specify a negative number " |
| 341 | "for repeating forever. Useful for shaking out flaky tests." ); |
| 342 | |
| 343 | GTEST_DEFINE_bool_( |
| 344 | recreate_environments_when_repeating, |
| 345 | testing::internal::BoolFromGTestEnv("recreate_environments_when_repeating" , |
| 346 | false), |
| 347 | "Controls whether global test environments are recreated for each repeat " |
| 348 | "of the tests. If set to false the global test environments are only set " |
| 349 | "up once, for the first iteration, and only torn down once, for the last. " |
| 350 | "Useful for shaking out flaky tests with stable, expensive test " |
| 351 | "environments. If --gtest_repeat is set to a negative number, meaning " |
| 352 | "there is no last run, the environments will always be recreated to avoid " |
| 353 | "leaks." ); |
| 354 | |
| 355 | GTEST_DEFINE_bool_(show_internal_stack_frames, false, |
| 356 | "True if and only if " GTEST_NAME_ |
| 357 | " should include internal stack frames when " |
| 358 | "printing test failure stack traces." ); |
| 359 | |
| 360 | GTEST_DEFINE_bool_(shuffle, |
| 361 | testing::internal::BoolFromGTestEnv("shuffle" , false), |
| 362 | "True if and only if " GTEST_NAME_ |
| 363 | " should randomize tests' order on every run." ); |
| 364 | |
| 365 | GTEST_DEFINE_int32_( |
| 366 | stack_trace_depth, |
| 367 | testing::internal::Int32FromGTestEnv("stack_trace_depth" , |
| 368 | testing::kMaxStackTraceDepth), |
| 369 | "The maximum number of stack frames to print when an " |
| 370 | "assertion fails. The valid range is 0 through 100, inclusive." ); |
| 371 | |
| 372 | GTEST_DEFINE_string_( |
| 373 | stream_result_to, |
| 374 | testing::internal::StringFromGTestEnv("stream_result_to" , "" ), |
| 375 | "This flag specifies the host name and the port number on which to stream " |
| 376 | "test results. Example: \"localhost:555\". The flag is effective only on " |
| 377 | "Linux." ); |
| 378 | |
| 379 | GTEST_DEFINE_bool_( |
| 380 | throw_on_failure, |
| 381 | testing::internal::BoolFromGTestEnv("throw_on_failure" , false), |
| 382 | "When this flag is specified, a failed assertion will throw an exception " |
| 383 | "if exceptions are enabled or exit the program with a non-zero code " |
| 384 | "otherwise. For use with an external test framework." ); |
| 385 | |
| 386 | #if GTEST_USE_OWN_FLAGFILE_FLAG_ |
| 387 | GTEST_DEFINE_string_( |
| 388 | flagfile, testing::internal::StringFromGTestEnv("flagfile" , "" ), |
| 389 | "This flag specifies the flagfile to read command-line flags from." ); |
| 390 | #endif // GTEST_USE_OWN_FLAGFILE_FLAG_ |
| 391 | |
| 392 | namespace testing { |
| 393 | namespace internal { |
| 394 | |
| 395 | const uint32_t Random::kMaxRange; |
| 396 | |
| 397 | // Generates a random number from [0, range), using a Linear |
| 398 | // Congruential Generator (LCG). Crashes if 'range' is 0 or greater |
| 399 | // than kMaxRange. |
| 400 | uint32_t Random::Generate(uint32_t range) { |
| 401 | // These constants are the same as are used in glibc's rand(3). |
| 402 | // Use wider types than necessary to prevent unsigned overflow diagnostics. |
| 403 | state_ = static_cast<uint32_t>(1103515245ULL * state_ + 12345U) % kMaxRange; |
| 404 | |
| 405 | GTEST_CHECK_(range > 0) << "Cannot generate a number in the range [0, 0)." ; |
| 406 | GTEST_CHECK_(range <= kMaxRange) |
| 407 | << "Generation of a number in [0, " << range << ") was requested, " |
| 408 | << "but this can only generate numbers in [0, " << kMaxRange << ")." ; |
| 409 | |
| 410 | // Converting via modulus introduces a bit of downward bias, but |
| 411 | // it's simple, and a linear congruential generator isn't too good |
| 412 | // to begin with. |
| 413 | return state_ % range; |
| 414 | } |
| 415 | |
| 416 | // GTestIsInitialized() returns true if and only if the user has initialized |
| 417 | // Google Test. Useful for catching the user mistake of not initializing |
| 418 | // Google Test before calling RUN_ALL_TESTS(). |
| 419 | static bool GTestIsInitialized() { return !GetArgvs().empty(); } |
| 420 | |
| 421 | // Iterates over a vector of TestSuites, keeping a running sum of the |
| 422 | // results of calling a given int-returning method on each. |
| 423 | // Returns the sum. |
| 424 | static int SumOverTestSuiteList(const std::vector<TestSuite*>& case_list, |
| 425 | int (TestSuite::*method)() const) { |
| 426 | int sum = 0; |
| 427 | for (size_t i = 0; i < case_list.size(); i++) { |
| 428 | sum += (case_list[i]->*method)(); |
| 429 | } |
| 430 | return sum; |
| 431 | } |
| 432 | |
| 433 | // Returns true if and only if the test suite passed. |
| 434 | static bool TestSuitePassed(const TestSuite* test_suite) { |
| 435 | return test_suite->should_run() && test_suite->Passed(); |
| 436 | } |
| 437 | |
| 438 | // Returns true if and only if the test suite failed. |
| 439 | static bool TestSuiteFailed(const TestSuite* test_suite) { |
| 440 | return test_suite->should_run() && test_suite->Failed(); |
| 441 | } |
| 442 | |
| 443 | // Returns true if and only if test_suite contains at least one test that |
| 444 | // should run. |
| 445 | static bool ShouldRunTestSuite(const TestSuite* test_suite) { |
| 446 | return test_suite->should_run(); |
| 447 | } |
| 448 | |
| 449 | // AssertHelper constructor. |
| 450 | AssertHelper::AssertHelper(TestPartResult::Type type, const char* file, |
| 451 | int line, const char* message) |
| 452 | : data_(new AssertHelperData(type, file, line, message)) {} |
| 453 | |
| 454 | AssertHelper::~AssertHelper() { delete data_; } |
| 455 | |
| 456 | // Message assignment, for assertion streaming support. |
| 457 | void AssertHelper::operator=(const Message& message) const { |
| 458 | UnitTest::GetInstance()->AddTestPartResult( |
| 459 | result_type: data_->type, file_name: data_->file, line_number: data_->line, |
| 460 | message: AppendUserMessage(gtest_msg: data_->message, user_msg: message), |
| 461 | os_stack_trace: UnitTest::GetInstance()->impl()->CurrentOsStackTraceExceptTop(skip_count: 1) |
| 462 | // Skips the stack frame for this function itself. |
| 463 | ); // NOLINT |
| 464 | } |
| 465 | |
| 466 | namespace { |
| 467 | |
| 468 | // When TEST_P is found without a matching INSTANTIATE_TEST_SUITE_P |
| 469 | // to creates test cases for it, a synthetic test case is |
| 470 | // inserted to report ether an error or a log message. |
| 471 | // |
| 472 | // This configuration bit will likely be removed at some point. |
| 473 | constexpr bool kErrorOnUninstantiatedParameterizedTest = true; |
| 474 | constexpr bool kErrorOnUninstantiatedTypeParameterizedTest = true; |
| 475 | |
| 476 | // A test that fails at a given file/line location with a given message. |
| 477 | class FailureTest : public Test { |
| 478 | public: |
| 479 | explicit FailureTest(const CodeLocation& loc, std::string error_message, |
| 480 | bool as_error) |
| 481 | : loc_(loc), |
| 482 | error_message_(std::move(error_message)), |
| 483 | as_error_(as_error) {} |
| 484 | |
| 485 | void TestBody() override { |
| 486 | if (as_error_) { |
| 487 | AssertHelper(TestPartResult::kNonFatalFailure, loc_.file.c_str(), |
| 488 | loc_.line, "" ) = Message() << error_message_; |
| 489 | } else { |
| 490 | std::cout << error_message_ << std::endl; |
| 491 | } |
| 492 | } |
| 493 | |
| 494 | private: |
| 495 | const CodeLocation loc_; |
| 496 | const std::string error_message_; |
| 497 | const bool as_error_; |
| 498 | }; |
| 499 | |
| 500 | } // namespace |
| 501 | |
| 502 | std::set<std::string>* GetIgnoredParameterizedTestSuites() { |
| 503 | return UnitTest::GetInstance()->impl()->ignored_parameterized_test_suites(); |
| 504 | } |
| 505 | |
| 506 | // Add a given test_suit to the list of them allow to go un-instantiated. |
| 507 | MarkAsIgnored::MarkAsIgnored(const char* test_suite) { |
| 508 | GetIgnoredParameterizedTestSuites()->insert(x: test_suite); |
| 509 | } |
| 510 | |
| 511 | // If this parameterized test suite has no instantiations (and that |
| 512 | // has not been marked as okay), emit a test case reporting that. |
| 513 | void InsertSyntheticTestCase(const std::string& name, CodeLocation location, |
| 514 | bool has_test_p) { |
| 515 | const auto& ignored = *GetIgnoredParameterizedTestSuites(); |
| 516 | if (ignored.find(x: name) != ignored.end()) return; |
| 517 | |
| 518 | const char kMissingInstantiation[] = // |
| 519 | " is defined via TEST_P, but never instantiated. None of the test cases " |
| 520 | "will run. Either no INSTANTIATE_TEST_SUITE_P is provided or the only " |
| 521 | "ones provided expand to nothing." |
| 522 | "\n\n" |
| 523 | "Ideally, TEST_P definitions should only ever be included as part of " |
| 524 | "binaries that intend to use them. (As opposed to, for example, being " |
| 525 | "placed in a library that may be linked in to get other utilities.)" ; |
| 526 | |
| 527 | const char kMissingTestCase[] = // |
| 528 | " is instantiated via INSTANTIATE_TEST_SUITE_P, but no tests are " |
| 529 | "defined via TEST_P . No test cases will run." |
| 530 | "\n\n" |
| 531 | "Ideally, INSTANTIATE_TEST_SUITE_P should only ever be invoked from " |
| 532 | "code that always depend on code that provides TEST_P. Failing to do " |
| 533 | "so is often an indication of dead code, e.g. the last TEST_P was " |
| 534 | "removed but the rest got left behind." ; |
| 535 | |
| 536 | std::string message = |
| 537 | "Parameterized test suite " + name + |
| 538 | (has_test_p ? kMissingInstantiation : kMissingTestCase) + |
| 539 | "\n\n" |
| 540 | "To suppress this error for this test suite, insert the following line " |
| 541 | "(in a non-header) in the namespace it is defined in:" |
| 542 | "\n\n" |
| 543 | "GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(" + |
| 544 | name + ");" ; |
| 545 | |
| 546 | std::string full_name = "UninstantiatedParameterizedTestSuite<" + name + ">" ; |
| 547 | RegisterTest( // |
| 548 | test_suite_name: "GoogleTestVerification" , test_name: full_name.c_str(), |
| 549 | type_param: nullptr, // No type parameter. |
| 550 | value_param: nullptr, // No value parameter. |
| 551 | file: location.file.c_str(), line: location.line, factory: [message, location] { |
| 552 | return new FailureTest(location, message, |
| 553 | kErrorOnUninstantiatedParameterizedTest); |
| 554 | }); |
| 555 | } |
| 556 | |
| 557 | void RegisterTypeParameterizedTestSuite(const char* test_suite_name, |
| 558 | CodeLocation code_location) { |
| 559 | GetUnitTestImpl()->type_parameterized_test_registry().RegisterTestSuite( |
| 560 | test_suite_name, code_location); |
| 561 | } |
| 562 | |
| 563 | void RegisterTypeParameterizedTestSuiteInstantiation(const char* case_name) { |
| 564 | GetUnitTestImpl()->type_parameterized_test_registry().RegisterInstantiation( |
| 565 | test_suite_name: case_name); |
| 566 | } |
| 567 | |
| 568 | void TypeParameterizedTestSuiteRegistry::RegisterTestSuite( |
| 569 | const char* test_suite_name, CodeLocation code_location) { |
| 570 | suites_.emplace(args: std::string(test_suite_name), |
| 571 | args: TypeParameterizedTestSuiteInfo(code_location)); |
| 572 | } |
| 573 | |
| 574 | void TypeParameterizedTestSuiteRegistry::RegisterInstantiation( |
| 575 | const char* test_suite_name) { |
| 576 | auto it = suites_.find(x: std::string(test_suite_name)); |
| 577 | if (it != suites_.end()) { |
| 578 | it->second.instantiated = true; |
| 579 | } else { |
| 580 | GTEST_LOG_(ERROR) << "Unknown type parameterized test suit '" |
| 581 | << test_suite_name << "'" ; |
| 582 | } |
| 583 | } |
| 584 | |
| 585 | void TypeParameterizedTestSuiteRegistry::CheckForInstantiations() { |
| 586 | const auto& ignored = *GetIgnoredParameterizedTestSuites(); |
| 587 | for (const auto& testcase : suites_) { |
| 588 | if (testcase.second.instantiated) continue; |
| 589 | if (ignored.find(x: testcase.first) != ignored.end()) continue; |
| 590 | |
| 591 | std::string message = |
| 592 | "Type parameterized test suite " + testcase.first + |
| 593 | " is defined via REGISTER_TYPED_TEST_SUITE_P, but never instantiated " |
| 594 | "via INSTANTIATE_TYPED_TEST_SUITE_P. None of the test cases will run." |
| 595 | "\n\n" |
| 596 | "Ideally, TYPED_TEST_P definitions should only ever be included as " |
| 597 | "part of binaries that intend to use them. (As opposed to, for " |
| 598 | "example, being placed in a library that may be linked in to get other " |
| 599 | "utilities.)" |
| 600 | "\n\n" |
| 601 | "To suppress this error for this test suite, insert the following line " |
| 602 | "(in a non-header) in the namespace it is defined in:" |
| 603 | "\n\n" |
| 604 | "GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(" + |
| 605 | testcase.first + ");" ; |
| 606 | |
| 607 | std::string full_name = |
| 608 | "UninstantiatedTypeParameterizedTestSuite<" + testcase.first + ">" ; |
| 609 | RegisterTest( // |
| 610 | test_suite_name: "GoogleTestVerification" , test_name: full_name.c_str(), |
| 611 | type_param: nullptr, // No type parameter. |
| 612 | value_param: nullptr, // No value parameter. |
| 613 | file: testcase.second.code_location.file.c_str(), |
| 614 | line: testcase.second.code_location.line, factory: [message, testcase] { |
| 615 | return new FailureTest(testcase.second.code_location, message, |
| 616 | kErrorOnUninstantiatedTypeParameterizedTest); |
| 617 | }); |
| 618 | } |
| 619 | } |
| 620 | |
| 621 | // A copy of all command line arguments. Set by InitGoogleTest(). |
| 622 | static ::std::vector<std::string> g_argvs; |
| 623 | |
| 624 | ::std::vector<std::string> GetArgvs() { |
| 625 | #if defined(GTEST_CUSTOM_GET_ARGVS_) |
| 626 | // GTEST_CUSTOM_GET_ARGVS_() may return a container of std::string or |
| 627 | // ::string. This code converts it to the appropriate type. |
| 628 | const auto& custom = GTEST_CUSTOM_GET_ARGVS_(); |
| 629 | return ::std::vector<std::string>(custom.begin(), custom.end()); |
| 630 | #else // defined(GTEST_CUSTOM_GET_ARGVS_) |
| 631 | return g_argvs; |
| 632 | #endif // defined(GTEST_CUSTOM_GET_ARGVS_) |
| 633 | } |
| 634 | |
| 635 | #if GTEST_HAS_FILE_SYSTEM |
| 636 | // Returns the current application's name, removing directory path if that |
| 637 | // is present. |
| 638 | FilePath GetCurrentExecutableName() { |
| 639 | FilePath result; |
| 640 | |
| 641 | #if defined(GTEST_OS_WINDOWS) || defined(GTEST_OS_OS2) |
| 642 | result.Set(FilePath(GetArgvs()[0]).RemoveExtension("exe" )); |
| 643 | #else |
| 644 | result.Set(FilePath(GetArgvs()[0])); |
| 645 | #endif // GTEST_OS_WINDOWS |
| 646 | |
| 647 | return result.RemoveDirectoryName(); |
| 648 | } |
| 649 | #endif // GTEST_HAS_FILE_SYSTEM |
| 650 | |
| 651 | // Functions for processing the gtest_output flag. |
| 652 | |
| 653 | // Returns the output format, or "" for normal printed output. |
| 654 | std::string UnitTestOptions::GetOutputFormat() { |
| 655 | std::string s = GTEST_FLAG_GET(output); |
| 656 | const char* const gtest_output_flag = s.c_str(); |
| 657 | const char* const colon = strchr(s: gtest_output_flag, c: ':'); |
| 658 | return (colon == nullptr) |
| 659 | ? std::string(gtest_output_flag) |
| 660 | : std::string(gtest_output_flag, |
| 661 | static_cast<size_t>(colon - gtest_output_flag)); |
| 662 | } |
| 663 | |
| 664 | #if GTEST_HAS_FILE_SYSTEM |
| 665 | // Returns the name of the requested output file, or the default if none |
| 666 | // was explicitly specified. |
| 667 | std::string UnitTestOptions::GetAbsolutePathToOutputFile() { |
| 668 | std::string s = GTEST_FLAG_GET(output); |
| 669 | const char* const gtest_output_flag = s.c_str(); |
| 670 | |
| 671 | std::string format = GetOutputFormat(); |
| 672 | if (format.empty()) format = std::string(kDefaultOutputFormat); |
| 673 | |
| 674 | const char* const colon = strchr(s: gtest_output_flag, c: ':'); |
| 675 | if (colon == nullptr) |
| 676 | return internal::FilePath::MakeFileName( |
| 677 | directory: internal::FilePath( |
| 678 | UnitTest::GetInstance()->original_working_dir()), |
| 679 | base_name: internal::FilePath(kDefaultOutputFile), number: 0, extension: format.c_str()) |
| 680 | .string(); |
| 681 | |
| 682 | internal::FilePath output_name(colon + 1); |
| 683 | if (!output_name.IsAbsolutePath()) |
| 684 | output_name = internal::FilePath::ConcatPaths( |
| 685 | directory: internal::FilePath(UnitTest::GetInstance()->original_working_dir()), |
| 686 | relative_path: internal::FilePath(colon + 1)); |
| 687 | |
| 688 | if (!output_name.IsDirectory()) return output_name.string(); |
| 689 | |
| 690 | internal::FilePath result(internal::FilePath::GenerateUniqueFileName( |
| 691 | directory: output_name, base_name: internal::GetCurrentExecutableName(), |
| 692 | extension: GetOutputFormat().c_str())); |
| 693 | return result.string(); |
| 694 | } |
| 695 | #endif // GTEST_HAS_FILE_SYSTEM |
| 696 | |
| 697 | // Returns true if and only if the wildcard pattern matches the string. Each |
| 698 | // pattern consists of regular characters, single-character wildcards (?), and |
| 699 | // multi-character wildcards (*). |
| 700 | // |
| 701 | // This function implements a linear-time string globbing algorithm based on |
| 702 | // https://research.swtch.com/glob. |
| 703 | static bool PatternMatchesString(const std::string& name_str, |
| 704 | const char* pattern, const char* pattern_end) { |
| 705 | const char* name = name_str.c_str(); |
| 706 | const char* const name_begin = name; |
| 707 | const char* const name_end = name + name_str.size(); |
| 708 | |
| 709 | const char* pattern_next = pattern; |
| 710 | const char* name_next = name; |
| 711 | |
| 712 | while (pattern < pattern_end || name < name_end) { |
| 713 | if (pattern < pattern_end) { |
| 714 | switch (*pattern) { |
| 715 | default: // Match an ordinary character. |
| 716 | if (name < name_end && *name == *pattern) { |
| 717 | ++pattern; |
| 718 | ++name; |
| 719 | continue; |
| 720 | } |
| 721 | break; |
| 722 | case '?': // Match any single character. |
| 723 | if (name < name_end) { |
| 724 | ++pattern; |
| 725 | ++name; |
| 726 | continue; |
| 727 | } |
| 728 | break; |
| 729 | case '*': |
| 730 | // Match zero or more characters. Start by skipping over the wildcard |
| 731 | // and matching zero characters from name. If that fails, restart and |
| 732 | // match one more character than the last attempt. |
| 733 | pattern_next = pattern; |
| 734 | name_next = name + 1; |
| 735 | ++pattern; |
| 736 | continue; |
| 737 | } |
| 738 | } |
| 739 | // Failed to match a character. Restart if possible. |
| 740 | if (name_begin < name_next && name_next <= name_end) { |
| 741 | pattern = pattern_next; |
| 742 | name = name_next; |
| 743 | continue; |
| 744 | } |
| 745 | return false; |
| 746 | } |
| 747 | return true; |
| 748 | } |
| 749 | |
| 750 | namespace { |
| 751 | |
| 752 | bool IsGlobPattern(const std::string& pattern) { |
| 753 | return std::any_of(first: pattern.begin(), last: pattern.end(), |
| 754 | pred: [](const char c) { return c == '?' || c == '*'; }); |
| 755 | } |
| 756 | |
| 757 | class UnitTestFilter { |
| 758 | public: |
| 759 | UnitTestFilter() = default; |
| 760 | |
| 761 | // Constructs a filter from a string of patterns separated by `:`. |
| 762 | explicit UnitTestFilter(const std::string& filter) { |
| 763 | // By design "" filter matches "" string. |
| 764 | std::vector<std::string> all_patterns; |
| 765 | SplitString(str: filter, delimiter: ':', dest: &all_patterns); |
| 766 | const auto exact_match_patterns_begin = std::partition( |
| 767 | first: all_patterns.begin(), last: all_patterns.end(), pred: &IsGlobPattern); |
| 768 | |
| 769 | glob_patterns_.reserve(n: static_cast<size_t>( |
| 770 | std::distance(first: all_patterns.begin(), last: exact_match_patterns_begin))); |
| 771 | std::move(first: all_patterns.begin(), last: exact_match_patterns_begin, |
| 772 | result: std::inserter(x&: glob_patterns_, i: glob_patterns_.begin())); |
| 773 | std::move( |
| 774 | first: exact_match_patterns_begin, last: all_patterns.end(), |
| 775 | result: std::inserter(x&: exact_match_patterns_, i: exact_match_patterns_.begin())); |
| 776 | } |
| 777 | |
| 778 | // Returns true if and only if name matches at least one of the patterns in |
| 779 | // the filter. |
| 780 | bool MatchesName(const std::string& name) const { |
| 781 | return exact_match_patterns_.count(x: name) > 0 || |
| 782 | std::any_of(first: glob_patterns_.begin(), last: glob_patterns_.end(), |
| 783 | pred: [&name](const std::string& pattern) { |
| 784 | return PatternMatchesString( |
| 785 | name_str: name, pattern: pattern.c_str(), |
| 786 | pattern_end: pattern.c_str() + pattern.size()); |
| 787 | }); |
| 788 | } |
| 789 | |
| 790 | private: |
| 791 | std::vector<std::string> glob_patterns_; |
| 792 | std::unordered_set<std::string> exact_match_patterns_; |
| 793 | }; |
| 794 | |
| 795 | class PositiveAndNegativeUnitTestFilter { |
| 796 | public: |
| 797 | // Constructs a positive and a negative filter from a string. The string |
| 798 | // contains a positive filter optionally followed by a '-' character and a |
| 799 | // negative filter. In case only a negative filter is provided the positive |
| 800 | // filter will be assumed "*". |
| 801 | // A filter is a list of patterns separated by ':'. |
| 802 | explicit PositiveAndNegativeUnitTestFilter(const std::string& filter) { |
| 803 | std::vector<std::string> positive_and_negative_filters; |
| 804 | |
| 805 | // NOTE: `SplitString` always returns a non-empty container. |
| 806 | SplitString(str: filter, delimiter: '-', dest: &positive_and_negative_filters); |
| 807 | const auto& positive_filter = positive_and_negative_filters.front(); |
| 808 | |
| 809 | if (positive_and_negative_filters.size() > 1) { |
| 810 | positive_filter_ = UnitTestFilter( |
| 811 | positive_filter.empty() ? kUniversalFilter : positive_filter); |
| 812 | |
| 813 | // TODO(b/214626361): Fail on multiple '-' characters |
| 814 | // For the moment to preserve old behavior we concatenate the rest of the |
| 815 | // string parts with `-` as separator to generate the negative filter. |
| 816 | auto negative_filter_string = positive_and_negative_filters[1]; |
| 817 | for (std::size_t i = 2; i < positive_and_negative_filters.size(); i++) |
| 818 | negative_filter_string = |
| 819 | negative_filter_string + '-' + positive_and_negative_filters[i]; |
| 820 | negative_filter_ = UnitTestFilter(negative_filter_string); |
| 821 | } else { |
| 822 | // In case we don't have a negative filter and positive filter is "" |
| 823 | // we do not use kUniversalFilter by design as opposed to when we have a |
| 824 | // negative filter. |
| 825 | positive_filter_ = UnitTestFilter(positive_filter); |
| 826 | } |
| 827 | } |
| 828 | |
| 829 | // Returns true if and only if test name (this is generated by appending test |
| 830 | // suit name and test name via a '.' character) matches the positive filter |
| 831 | // and does not match the negative filter. |
| 832 | bool MatchesTest(const std::string& test_suite_name, |
| 833 | const std::string& test_name) const { |
| 834 | return MatchesName(name: test_suite_name + "." + test_name); |
| 835 | } |
| 836 | |
| 837 | // Returns true if and only if name matches the positive filter and does not |
| 838 | // match the negative filter. |
| 839 | bool MatchesName(const std::string& name) const { |
| 840 | return positive_filter_.MatchesName(name) && |
| 841 | !negative_filter_.MatchesName(name); |
| 842 | } |
| 843 | |
| 844 | private: |
| 845 | UnitTestFilter positive_filter_; |
| 846 | UnitTestFilter negative_filter_; |
| 847 | }; |
| 848 | } // namespace |
| 849 | |
| 850 | bool UnitTestOptions::MatchesFilter(const std::string& name_str, |
| 851 | const char* filter) { |
| 852 | return UnitTestFilter(filter).MatchesName(name: name_str); |
| 853 | } |
| 854 | |
| 855 | // Returns true if and only if the user-specified filter matches the test |
| 856 | // suite name and the test name. |
| 857 | bool UnitTestOptions::FilterMatchesTest(const std::string& test_suite_name, |
| 858 | const std::string& test_name) { |
| 859 | // Split --gtest_filter at '-', if there is one, to separate into |
| 860 | // positive filter and negative filter portions |
| 861 | return PositiveAndNegativeUnitTestFilter(GTEST_FLAG_GET(filter)) |
| 862 | .MatchesTest(test_suite_name, test_name); |
| 863 | } |
| 864 | |
| 865 | #if GTEST_HAS_SEH |
| 866 | static std::string FormatSehExceptionMessage(DWORD exception_code, |
| 867 | const char* location) { |
| 868 | Message message; |
| 869 | message << "SEH exception with code 0x" << std::setbase(16) << exception_code |
| 870 | << std::setbase(10) << " thrown in " << location << "." ; |
| 871 | return message.GetString(); |
| 872 | } |
| 873 | |
| 874 | int UnitTestOptions::GTestProcessSEH(DWORD seh_code, const char* location) { |
| 875 | // Google Test should handle a SEH exception if: |
| 876 | // 1. the user wants it to, AND |
| 877 | // 2. this is not a breakpoint exception or stack overflow, AND |
| 878 | // 3. this is not a C++ exception (VC++ implements them via SEH, |
| 879 | // apparently). |
| 880 | // |
| 881 | // SEH exception code for C++ exceptions. |
| 882 | // (see http://support.microsoft.com/kb/185294 for more information). |
| 883 | const DWORD kCxxExceptionCode = 0xe06d7363; |
| 884 | |
| 885 | if (!GTEST_FLAG_GET(catch_exceptions) || seh_code == kCxxExceptionCode || |
| 886 | seh_code == EXCEPTION_BREAKPOINT || |
| 887 | seh_code == EXCEPTION_STACK_OVERFLOW) { |
| 888 | return EXCEPTION_CONTINUE_SEARCH; // Don't handle these exceptions |
| 889 | } |
| 890 | |
| 891 | internal::ReportFailureInUnknownLocation( |
| 892 | TestPartResult::kFatalFailure, |
| 893 | FormatSehExceptionMessage(seh_code, location) + |
| 894 | "\n" |
| 895 | "Stack trace:\n" + |
| 896 | ::testing::internal::GetCurrentOsStackTraceExceptTop(1)); |
| 897 | |
| 898 | return EXCEPTION_EXECUTE_HANDLER; |
| 899 | } |
| 900 | #endif // GTEST_HAS_SEH |
| 901 | |
| 902 | } // namespace internal |
| 903 | |
| 904 | // The c'tor sets this object as the test part result reporter used by |
| 905 | // Google Test. The 'result' parameter specifies where to report the |
| 906 | // results. Intercepts only failures from the current thread. |
| 907 | ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter( |
| 908 | TestPartResultArray* result) |
| 909 | : intercept_mode_(INTERCEPT_ONLY_CURRENT_THREAD), result_(result) { |
| 910 | Init(); |
| 911 | } |
| 912 | |
| 913 | // The c'tor sets this object as the test part result reporter used by |
| 914 | // Google Test. The 'result' parameter specifies where to report the |
| 915 | // results. |
| 916 | ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter( |
| 917 | InterceptMode intercept_mode, TestPartResultArray* result) |
| 918 | : intercept_mode_(intercept_mode), result_(result) { |
| 919 | Init(); |
| 920 | } |
| 921 | |
| 922 | void ScopedFakeTestPartResultReporter::Init() { |
| 923 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 924 | if (intercept_mode_ == INTERCEPT_ALL_THREADS) { |
| 925 | old_reporter_ = impl->GetGlobalTestPartResultReporter(); |
| 926 | impl->SetGlobalTestPartResultReporter(this); |
| 927 | } else { |
| 928 | old_reporter_ = impl->GetTestPartResultReporterForCurrentThread(); |
| 929 | impl->SetTestPartResultReporterForCurrentThread(this); |
| 930 | } |
| 931 | } |
| 932 | |
| 933 | // The d'tor restores the test part result reporter used by Google Test |
| 934 | // before. |
| 935 | ScopedFakeTestPartResultReporter::~ScopedFakeTestPartResultReporter() { |
| 936 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 937 | if (intercept_mode_ == INTERCEPT_ALL_THREADS) { |
| 938 | impl->SetGlobalTestPartResultReporter(old_reporter_); |
| 939 | } else { |
| 940 | impl->SetTestPartResultReporterForCurrentThread(old_reporter_); |
| 941 | } |
| 942 | } |
| 943 | |
| 944 | // Increments the test part result count and remembers the result. |
| 945 | // This method is from the TestPartResultReporterInterface interface. |
| 946 | void ScopedFakeTestPartResultReporter::ReportTestPartResult( |
| 947 | const TestPartResult& result) { |
| 948 | result_->Append(result); |
| 949 | } |
| 950 | |
| 951 | namespace internal { |
| 952 | |
| 953 | // Returns the type ID of ::testing::Test. We should always call this |
| 954 | // instead of GetTypeId< ::testing::Test>() to get the type ID of |
| 955 | // testing::Test. This is to work around a suspected linker bug when |
| 956 | // using Google Test as a framework on Mac OS X. The bug causes |
| 957 | // GetTypeId< ::testing::Test>() to return different values depending |
| 958 | // on whether the call is from the Google Test framework itself or |
| 959 | // from user test code. GetTestTypeId() is guaranteed to always |
| 960 | // return the same value, as it always calls GetTypeId<>() from the |
| 961 | // gtest.cc, which is within the Google Test framework. |
| 962 | TypeId GetTestTypeId() { return GetTypeId<Test>(); } |
| 963 | |
| 964 | // The value of GetTestTypeId() as seen from within the Google Test |
| 965 | // library. This is solely for testing GetTestTypeId(). |
| 966 | extern const TypeId kTestTypeIdInGoogleTest = GetTestTypeId(); |
| 967 | |
| 968 | // This predicate-formatter checks that 'results' contains a test part |
| 969 | // failure of the given type and that the failure message contains the |
| 970 | // given substring. |
| 971 | static AssertionResult HasOneFailure(const char* /* results_expr */, |
| 972 | const char* /* type_expr */, |
| 973 | const char* /* substr_expr */, |
| 974 | const TestPartResultArray& results, |
| 975 | TestPartResult::Type type, |
| 976 | const std::string& substr) { |
| 977 | const std::string expected(type == TestPartResult::kFatalFailure |
| 978 | ? "1 fatal failure" |
| 979 | : "1 non-fatal failure" ); |
| 980 | Message msg; |
| 981 | if (results.size() != 1) { |
| 982 | msg << "Expected: " << expected << "\n" |
| 983 | << " Actual: " << results.size() << " failures" ; |
| 984 | for (int i = 0; i < results.size(); i++) { |
| 985 | msg << "\n" << results.GetTestPartResult(index: i); |
| 986 | } |
| 987 | return AssertionFailure() << msg; |
| 988 | } |
| 989 | |
| 990 | const TestPartResult& r = results.GetTestPartResult(index: 0); |
| 991 | if (r.type() != type) { |
| 992 | return AssertionFailure() << "Expected: " << expected << "\n" |
| 993 | << " Actual:\n" |
| 994 | << r; |
| 995 | } |
| 996 | |
| 997 | if (strstr(haystack: r.message(), needle: substr.c_str()) == nullptr) { |
| 998 | return AssertionFailure() |
| 999 | << "Expected: " << expected << " containing \"" << substr << "\"\n" |
| 1000 | << " Actual:\n" |
| 1001 | << r; |
| 1002 | } |
| 1003 | |
| 1004 | return AssertionSuccess(); |
| 1005 | } |
| 1006 | |
| 1007 | // The constructor of SingleFailureChecker remembers where to look up |
| 1008 | // test part results, what type of failure we expect, and what |
| 1009 | // substring the failure message should contain. |
| 1010 | SingleFailureChecker::SingleFailureChecker(const TestPartResultArray* results, |
| 1011 | TestPartResult::Type type, |
| 1012 | const std::string& substr) |
| 1013 | : results_(results), type_(type), substr_(substr) {} |
| 1014 | |
| 1015 | // The destructor of SingleFailureChecker verifies that the given |
| 1016 | // TestPartResultArray contains exactly one failure that has the given |
| 1017 | // type and contains the given substring. If that's not the case, a |
| 1018 | // non-fatal failure will be generated. |
| 1019 | SingleFailureChecker::~SingleFailureChecker() { |
| 1020 | EXPECT_PRED_FORMAT3(HasOneFailure, *results_, type_, substr_); |
| 1021 | } |
| 1022 | |
| 1023 | DefaultGlobalTestPartResultReporter::DefaultGlobalTestPartResultReporter( |
| 1024 | UnitTestImpl* unit_test) |
| 1025 | : unit_test_(unit_test) {} |
| 1026 | |
| 1027 | void DefaultGlobalTestPartResultReporter::ReportTestPartResult( |
| 1028 | const TestPartResult& result) { |
| 1029 | unit_test_->current_test_result()->AddTestPartResult(test_part_result: result); |
| 1030 | unit_test_->listeners()->repeater()->OnTestPartResult(test_part_result: result); |
| 1031 | } |
| 1032 | |
| 1033 | DefaultPerThreadTestPartResultReporter::DefaultPerThreadTestPartResultReporter( |
| 1034 | UnitTestImpl* unit_test) |
| 1035 | : unit_test_(unit_test) {} |
| 1036 | |
| 1037 | void DefaultPerThreadTestPartResultReporter::ReportTestPartResult( |
| 1038 | const TestPartResult& result) { |
| 1039 | unit_test_->GetGlobalTestPartResultReporter()->ReportTestPartResult(result); |
| 1040 | } |
| 1041 | |
| 1042 | // Returns the global test part result reporter. |
| 1043 | TestPartResultReporterInterface* |
| 1044 | UnitTestImpl::GetGlobalTestPartResultReporter() { |
| 1045 | internal::MutexLock lock(&global_test_part_result_reporter_mutex_); |
| 1046 | return global_test_part_result_reporter_; |
| 1047 | } |
| 1048 | |
| 1049 | // Sets the global test part result reporter. |
| 1050 | void UnitTestImpl::SetGlobalTestPartResultReporter( |
| 1051 | TestPartResultReporterInterface* reporter) { |
| 1052 | internal::MutexLock lock(&global_test_part_result_reporter_mutex_); |
| 1053 | global_test_part_result_reporter_ = reporter; |
| 1054 | } |
| 1055 | |
| 1056 | // Returns the test part result reporter for the current thread. |
| 1057 | TestPartResultReporterInterface* |
| 1058 | UnitTestImpl::GetTestPartResultReporterForCurrentThread() { |
| 1059 | return per_thread_test_part_result_reporter_.get(); |
| 1060 | } |
| 1061 | |
| 1062 | // Sets the test part result reporter for the current thread. |
| 1063 | void UnitTestImpl::SetTestPartResultReporterForCurrentThread( |
| 1064 | TestPartResultReporterInterface* reporter) { |
| 1065 | per_thread_test_part_result_reporter_.set(reporter); |
| 1066 | } |
| 1067 | |
| 1068 | // Gets the number of successful test suites. |
| 1069 | int UnitTestImpl::successful_test_suite_count() const { |
| 1070 | return CountIf(c: test_suites_, predicate: TestSuitePassed); |
| 1071 | } |
| 1072 | |
| 1073 | // Gets the number of failed test suites. |
| 1074 | int UnitTestImpl::failed_test_suite_count() const { |
| 1075 | return CountIf(c: test_suites_, predicate: TestSuiteFailed); |
| 1076 | } |
| 1077 | |
| 1078 | // Gets the number of all test suites. |
| 1079 | int UnitTestImpl::total_test_suite_count() const { |
| 1080 | return static_cast<int>(test_suites_.size()); |
| 1081 | } |
| 1082 | |
| 1083 | // Gets the number of all test suites that contain at least one test |
| 1084 | // that should run. |
| 1085 | int UnitTestImpl::test_suite_to_run_count() const { |
| 1086 | return CountIf(c: test_suites_, predicate: ShouldRunTestSuite); |
| 1087 | } |
| 1088 | |
| 1089 | // Gets the number of successful tests. |
| 1090 | int UnitTestImpl::successful_test_count() const { |
| 1091 | return SumOverTestSuiteList(case_list: test_suites_, method: &TestSuite::successful_test_count); |
| 1092 | } |
| 1093 | |
| 1094 | // Gets the number of skipped tests. |
| 1095 | int UnitTestImpl::skipped_test_count() const { |
| 1096 | return SumOverTestSuiteList(case_list: test_suites_, method: &TestSuite::skipped_test_count); |
| 1097 | } |
| 1098 | |
| 1099 | // Gets the number of failed tests. |
| 1100 | int UnitTestImpl::failed_test_count() const { |
| 1101 | return SumOverTestSuiteList(case_list: test_suites_, method: &TestSuite::failed_test_count); |
| 1102 | } |
| 1103 | |
| 1104 | // Gets the number of disabled tests that will be reported in the XML report. |
| 1105 | int UnitTestImpl::reportable_disabled_test_count() const { |
| 1106 | return SumOverTestSuiteList(case_list: test_suites_, |
| 1107 | method: &TestSuite::reportable_disabled_test_count); |
| 1108 | } |
| 1109 | |
| 1110 | // Gets the number of disabled tests. |
| 1111 | int UnitTestImpl::disabled_test_count() const { |
| 1112 | return SumOverTestSuiteList(case_list: test_suites_, method: &TestSuite::disabled_test_count); |
| 1113 | } |
| 1114 | |
| 1115 | // Gets the number of tests to be printed in the XML report. |
| 1116 | int UnitTestImpl::reportable_test_count() const { |
| 1117 | return SumOverTestSuiteList(case_list: test_suites_, method: &TestSuite::reportable_test_count); |
| 1118 | } |
| 1119 | |
| 1120 | // Gets the number of all tests. |
| 1121 | int UnitTestImpl::total_test_count() const { |
| 1122 | return SumOverTestSuiteList(case_list: test_suites_, method: &TestSuite::total_test_count); |
| 1123 | } |
| 1124 | |
| 1125 | // Gets the number of tests that should run. |
| 1126 | int UnitTestImpl::test_to_run_count() const { |
| 1127 | return SumOverTestSuiteList(case_list: test_suites_, method: &TestSuite::test_to_run_count); |
| 1128 | } |
| 1129 | |
| 1130 | // Returns the current OS stack trace as an std::string. |
| 1131 | // |
| 1132 | // The maximum number of stack frames to be included is specified by |
| 1133 | // the gtest_stack_trace_depth flag. The skip_count parameter |
| 1134 | // specifies the number of top frames to be skipped, which doesn't |
| 1135 | // count against the number of frames to be included. |
| 1136 | // |
| 1137 | // For example, if Foo() calls Bar(), which in turn calls |
| 1138 | // CurrentOsStackTraceExceptTop(1), Foo() will be included in the |
| 1139 | // trace but Bar() and CurrentOsStackTraceExceptTop() won't. |
| 1140 | std::string UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) { |
| 1141 | return os_stack_trace_getter()->CurrentStackTrace( |
| 1142 | max_depth: static_cast<int>(GTEST_FLAG_GET(stack_trace_depth)), skip_count: skip_count + 1 |
| 1143 | // Skips the user-specified number of frames plus this function |
| 1144 | // itself. |
| 1145 | ); // NOLINT |
| 1146 | } |
| 1147 | |
| 1148 | // A helper class for measuring elapsed times. |
| 1149 | class Timer { |
| 1150 | public: |
| 1151 | Timer() : start_(clock::now()) {} |
| 1152 | |
| 1153 | // Return time elapsed in milliseconds since the timer was created. |
| 1154 | TimeInMillis Elapsed() { |
| 1155 | return std::chrono::duration_cast<std::chrono::milliseconds>(d: clock::now() - |
| 1156 | start_) |
| 1157 | .count(); |
| 1158 | } |
| 1159 | |
| 1160 | private: |
| 1161 | // Fall back to the system_clock when building with newlib on a system |
| 1162 | // without a monotonic clock. |
| 1163 | #if defined(_NEWLIB_VERSION) && !defined(CLOCK_MONOTONIC) |
| 1164 | using clock = std::chrono::system_clock; |
| 1165 | #else |
| 1166 | using clock = std::chrono::steady_clock; |
| 1167 | #endif |
| 1168 | clock::time_point start_; |
| 1169 | }; |
| 1170 | |
| 1171 | // Returns a timestamp as milliseconds since the epoch. Note this time may jump |
| 1172 | // around subject to adjustments by the system, to measure elapsed time use |
| 1173 | // Timer instead. |
| 1174 | TimeInMillis GetTimeInMillis() { |
| 1175 | return std::chrono::duration_cast<std::chrono::milliseconds>( |
| 1176 | d: std::chrono::system_clock::now() - |
| 1177 | std::chrono::system_clock::from_time_t(t: 0)) |
| 1178 | .count(); |
| 1179 | } |
| 1180 | |
| 1181 | // Utilities |
| 1182 | |
| 1183 | // class String. |
| 1184 | |
| 1185 | #ifdef GTEST_OS_WINDOWS_MOBILE |
| 1186 | // Creates a UTF-16 wide string from the given ANSI string, allocating |
| 1187 | // memory using new. The caller is responsible for deleting the return |
| 1188 | // value using delete[]. Returns the wide string, or NULL if the |
| 1189 | // input is NULL. |
| 1190 | LPCWSTR String::AnsiToUtf16(const char* ansi) { |
| 1191 | if (!ansi) return nullptr; |
| 1192 | const int length = strlen(ansi); |
| 1193 | const int unicode_length = |
| 1194 | MultiByteToWideChar(CP_ACP, 0, ansi, length, nullptr, 0); |
| 1195 | WCHAR* unicode = new WCHAR[unicode_length + 1]; |
| 1196 | MultiByteToWideChar(CP_ACP, 0, ansi, length, unicode, unicode_length); |
| 1197 | unicode[unicode_length] = 0; |
| 1198 | return unicode; |
| 1199 | } |
| 1200 | |
| 1201 | // Creates an ANSI string from the given wide string, allocating |
| 1202 | // memory using new. The caller is responsible for deleting the return |
| 1203 | // value using delete[]. Returns the ANSI string, or NULL if the |
| 1204 | // input is NULL. |
| 1205 | const char* String::Utf16ToAnsi(LPCWSTR utf16_str) { |
| 1206 | if (!utf16_str) return nullptr; |
| 1207 | const int ansi_length = WideCharToMultiByte(CP_ACP, 0, utf16_str, -1, nullptr, |
| 1208 | 0, nullptr, nullptr); |
| 1209 | char* ansi = new char[ansi_length + 1]; |
| 1210 | WideCharToMultiByte(CP_ACP, 0, utf16_str, -1, ansi, ansi_length, nullptr, |
| 1211 | nullptr); |
| 1212 | ansi[ansi_length] = 0; |
| 1213 | return ansi; |
| 1214 | } |
| 1215 | |
| 1216 | #endif // GTEST_OS_WINDOWS_MOBILE |
| 1217 | |
| 1218 | // Compares two C strings. Returns true if and only if they have the same |
| 1219 | // content. |
| 1220 | // |
| 1221 | // Unlike strcmp(), this function can handle NULL argument(s). A NULL |
| 1222 | // C string is considered different to any non-NULL C string, |
| 1223 | // including the empty string. |
| 1224 | bool String::CStringEquals(const char* lhs, const char* rhs) { |
| 1225 | if (lhs == nullptr) return rhs == nullptr; |
| 1226 | |
| 1227 | if (rhs == nullptr) return false; |
| 1228 | |
| 1229 | return strcmp(s1: lhs, s2: rhs) == 0; |
| 1230 | } |
| 1231 | |
| 1232 | #if GTEST_HAS_STD_WSTRING |
| 1233 | |
| 1234 | // Converts an array of wide chars to a narrow string using the UTF-8 |
| 1235 | // encoding, and streams the result to the given Message object. |
| 1236 | static void StreamWideCharsToMessage(const wchar_t* wstr, size_t length, |
| 1237 | Message* msg) { |
| 1238 | for (size_t i = 0; i != length;) { // NOLINT |
| 1239 | if (wstr[i] != L'\0') { |
| 1240 | *msg << WideStringToUtf8(str: wstr + i, num_chars: static_cast<int>(length - i)); |
| 1241 | while (i != length && wstr[i] != L'\0') i++; |
| 1242 | } else { |
| 1243 | *msg << '\0'; |
| 1244 | i++; |
| 1245 | } |
| 1246 | } |
| 1247 | } |
| 1248 | |
| 1249 | #endif // GTEST_HAS_STD_WSTRING |
| 1250 | |
| 1251 | void SplitString(const ::std::string& str, char delimiter, |
| 1252 | ::std::vector< ::std::string>* dest) { |
| 1253 | ::std::vector< ::std::string> parsed; |
| 1254 | ::std::string::size_type pos = 0; |
| 1255 | while (::testing::internal::AlwaysTrue()) { |
| 1256 | const ::std::string::size_type colon = str.find(c: delimiter, pos: pos); |
| 1257 | if (colon == ::std::string::npos) { |
| 1258 | parsed.push_back(x: str.substr(pos: pos)); |
| 1259 | break; |
| 1260 | } else { |
| 1261 | parsed.push_back(x: str.substr(pos: pos, n: colon - pos)); |
| 1262 | pos = colon + 1; |
| 1263 | } |
| 1264 | } |
| 1265 | dest->swap(x&: parsed); |
| 1266 | } |
| 1267 | |
| 1268 | } // namespace internal |
| 1269 | |
| 1270 | // Constructs an empty Message. |
| 1271 | // We allocate the stringstream separately because otherwise each use of |
| 1272 | // ASSERT/EXPECT in a procedure adds over 200 bytes to the procedure's |
| 1273 | // stack frame leading to huge stack frames in some cases; gcc does not reuse |
| 1274 | // the stack space. |
| 1275 | Message::Message() : ss_(new ::std::stringstream) { |
| 1276 | // By default, we want there to be enough precision when printing |
| 1277 | // a double to a Message. |
| 1278 | *ss_ << std::setprecision(std::numeric_limits<double>::digits10 + 2); |
| 1279 | } |
| 1280 | |
| 1281 | // These two overloads allow streaming a wide C string to a Message |
| 1282 | // using the UTF-8 encoding. |
| 1283 | Message& Message::operator<<(const wchar_t* wide_c_str) { |
| 1284 | return *this << internal::String::ShowWideCString(wide_c_str); |
| 1285 | } |
| 1286 | Message& Message::operator<<(wchar_t* wide_c_str) { |
| 1287 | return *this << internal::String::ShowWideCString(wide_c_str); |
| 1288 | } |
| 1289 | |
| 1290 | #if GTEST_HAS_STD_WSTRING |
| 1291 | // Converts the given wide string to a narrow string using the UTF-8 |
| 1292 | // encoding, and streams the result to this Message object. |
| 1293 | Message& Message::operator<<(const ::std::wstring& wstr) { |
| 1294 | internal::StreamWideCharsToMessage(wstr: wstr.c_str(), length: wstr.length(), msg: this); |
| 1295 | return *this; |
| 1296 | } |
| 1297 | #endif // GTEST_HAS_STD_WSTRING |
| 1298 | |
| 1299 | // Gets the text streamed to this object so far as an std::string. |
| 1300 | // Each '\0' character in the buffer is replaced with "\\0". |
| 1301 | std::string Message::GetString() const { |
| 1302 | return internal::StringStreamToString(stream: ss_.get()); |
| 1303 | } |
| 1304 | |
| 1305 | namespace internal { |
| 1306 | |
| 1307 | namespace edit_distance { |
| 1308 | std::vector<EditType> CalculateOptimalEdits(const std::vector<size_t>& left, |
| 1309 | const std::vector<size_t>& right) { |
| 1310 | std::vector<std::vector<double> > costs( |
| 1311 | left.size() + 1, std::vector<double>(right.size() + 1)); |
| 1312 | std::vector<std::vector<EditType> > best_move( |
| 1313 | left.size() + 1, std::vector<EditType>(right.size() + 1)); |
| 1314 | |
| 1315 | // Populate for empty right. |
| 1316 | for (size_t l_i = 0; l_i < costs.size(); ++l_i) { |
| 1317 | costs[l_i][0] = static_cast<double>(l_i); |
| 1318 | best_move[l_i][0] = kRemove; |
| 1319 | } |
| 1320 | // Populate for empty left. |
| 1321 | for (size_t r_i = 1; r_i < costs[0].size(); ++r_i) { |
| 1322 | costs[0][r_i] = static_cast<double>(r_i); |
| 1323 | best_move[0][r_i] = kAdd; |
| 1324 | } |
| 1325 | |
| 1326 | for (size_t l_i = 0; l_i < left.size(); ++l_i) { |
| 1327 | for (size_t r_i = 0; r_i < right.size(); ++r_i) { |
| 1328 | if (left[l_i] == right[r_i]) { |
| 1329 | // Found a match. Consume it. |
| 1330 | costs[l_i + 1][r_i + 1] = costs[l_i][r_i]; |
| 1331 | best_move[l_i + 1][r_i + 1] = kMatch; |
| 1332 | continue; |
| 1333 | } |
| 1334 | |
| 1335 | const double add = costs[l_i + 1][r_i]; |
| 1336 | const double remove = costs[l_i][r_i + 1]; |
| 1337 | const double replace = costs[l_i][r_i]; |
| 1338 | if (add < remove && add < replace) { |
| 1339 | costs[l_i + 1][r_i + 1] = add + 1; |
| 1340 | best_move[l_i + 1][r_i + 1] = kAdd; |
| 1341 | } else if (remove < add && remove < replace) { |
| 1342 | costs[l_i + 1][r_i + 1] = remove + 1; |
| 1343 | best_move[l_i + 1][r_i + 1] = kRemove; |
| 1344 | } else { |
| 1345 | // We make replace a little more expensive than add/remove to lower |
| 1346 | // their priority. |
| 1347 | costs[l_i + 1][r_i + 1] = replace + 1.00001; |
| 1348 | best_move[l_i + 1][r_i + 1] = kReplace; |
| 1349 | } |
| 1350 | } |
| 1351 | } |
| 1352 | |
| 1353 | // Reconstruct the best path. We do it in reverse order. |
| 1354 | std::vector<EditType> best_path; |
| 1355 | for (size_t l_i = left.size(), r_i = right.size(); l_i > 0 || r_i > 0;) { |
| 1356 | EditType move = best_move[l_i][r_i]; |
| 1357 | best_path.push_back(x: move); |
| 1358 | l_i -= move != kAdd; |
| 1359 | r_i -= move != kRemove; |
| 1360 | } |
| 1361 | std::reverse(first: best_path.begin(), last: best_path.end()); |
| 1362 | return best_path; |
| 1363 | } |
| 1364 | |
| 1365 | namespace { |
| 1366 | |
| 1367 | // Helper class to convert string into ids with deduplication. |
| 1368 | class InternalStrings { |
| 1369 | public: |
| 1370 | size_t GetId(const std::string& str) { |
| 1371 | IdMap::iterator it = ids_.find(x: str); |
| 1372 | if (it != ids_.end()) return it->second; |
| 1373 | size_t id = ids_.size(); |
| 1374 | return ids_[str] = id; |
| 1375 | } |
| 1376 | |
| 1377 | private: |
| 1378 | typedef std::map<std::string, size_t> IdMap; |
| 1379 | IdMap ids_; |
| 1380 | }; |
| 1381 | |
| 1382 | } // namespace |
| 1383 | |
| 1384 | std::vector<EditType> CalculateOptimalEdits( |
| 1385 | const std::vector<std::string>& left, |
| 1386 | const std::vector<std::string>& right) { |
| 1387 | std::vector<size_t> left_ids, right_ids; |
| 1388 | { |
| 1389 | InternalStrings intern_table; |
| 1390 | for (size_t i = 0; i < left.size(); ++i) { |
| 1391 | left_ids.push_back(x: intern_table.GetId(str: left[i])); |
| 1392 | } |
| 1393 | for (size_t i = 0; i < right.size(); ++i) { |
| 1394 | right_ids.push_back(x: intern_table.GetId(str: right[i])); |
| 1395 | } |
| 1396 | } |
| 1397 | return CalculateOptimalEdits(left: left_ids, right: right_ids); |
| 1398 | } |
| 1399 | |
| 1400 | namespace { |
| 1401 | |
| 1402 | // Helper class that holds the state for one hunk and prints it out to the |
| 1403 | // stream. |
| 1404 | // It reorders adds/removes when possible to group all removes before all |
| 1405 | // adds. It also adds the hunk header before printint into the stream. |
| 1406 | class Hunk { |
| 1407 | public: |
| 1408 | Hunk(size_t left_start, size_t right_start) |
| 1409 | : left_start_(left_start), |
| 1410 | right_start_(right_start), |
| 1411 | adds_(), |
| 1412 | removes_(), |
| 1413 | common_() {} |
| 1414 | |
| 1415 | void PushLine(char edit, const char* line) { |
| 1416 | switch (edit) { |
| 1417 | case ' ': |
| 1418 | ++common_; |
| 1419 | FlushEdits(); |
| 1420 | hunk_.push_back(x: std::make_pair(x: ' ', y&: line)); |
| 1421 | break; |
| 1422 | case '-': |
| 1423 | ++removes_; |
| 1424 | hunk_removes_.push_back(x: std::make_pair(x: '-', y&: line)); |
| 1425 | break; |
| 1426 | case '+': |
| 1427 | ++adds_; |
| 1428 | hunk_adds_.push_back(x: std::make_pair(x: '+', y&: line)); |
| 1429 | break; |
| 1430 | } |
| 1431 | } |
| 1432 | |
| 1433 | void PrintTo(std::ostream* os) { |
| 1434 | PrintHeader(ss: os); |
| 1435 | FlushEdits(); |
| 1436 | for (std::list<std::pair<char, const char*> >::const_iterator it = |
| 1437 | hunk_.begin(); |
| 1438 | it != hunk_.end(); ++it) { |
| 1439 | *os << it->first << it->second << "\n" ; |
| 1440 | } |
| 1441 | } |
| 1442 | |
| 1443 | bool has_edits() const { return adds_ || removes_; } |
| 1444 | |
| 1445 | private: |
| 1446 | void FlushEdits() { |
| 1447 | hunk_.splice(position: hunk_.end(), x&: hunk_removes_); |
| 1448 | hunk_.splice(position: hunk_.end(), x&: hunk_adds_); |
| 1449 | } |
| 1450 | |
| 1451 | // Print a unified diff header for one hunk. |
| 1452 | // The format is |
| 1453 | // "@@ -<left_start>,<left_length> +<right_start>,<right_length> @@" |
| 1454 | // where the left/right parts are omitted if unnecessary. |
| 1455 | void (std::ostream* ss) const { |
| 1456 | *ss << "@@ " ; |
| 1457 | if (removes_) { |
| 1458 | *ss << "-" << left_start_ << "," << (removes_ + common_); |
| 1459 | } |
| 1460 | if (removes_ && adds_) { |
| 1461 | *ss << " " ; |
| 1462 | } |
| 1463 | if (adds_) { |
| 1464 | *ss << "+" << right_start_ << "," << (adds_ + common_); |
| 1465 | } |
| 1466 | *ss << " @@\n" ; |
| 1467 | } |
| 1468 | |
| 1469 | size_t left_start_, right_start_; |
| 1470 | size_t adds_, removes_, common_; |
| 1471 | std::list<std::pair<char, const char*> > hunk_, hunk_adds_, hunk_removes_; |
| 1472 | }; |
| 1473 | |
| 1474 | } // namespace |
| 1475 | |
| 1476 | // Create a list of diff hunks in Unified diff format. |
| 1477 | // Each hunk has a header generated by PrintHeader above plus a body with |
| 1478 | // lines prefixed with ' ' for no change, '-' for deletion and '+' for |
| 1479 | // addition. |
| 1480 | // 'context' represents the desired unchanged prefix/suffix around the diff. |
| 1481 | // If two hunks are close enough that their contexts overlap, then they are |
| 1482 | // joined into one hunk. |
| 1483 | std::string CreateUnifiedDiff(const std::vector<std::string>& left, |
| 1484 | const std::vector<std::string>& right, |
| 1485 | size_t context) { |
| 1486 | const std::vector<EditType> edits = CalculateOptimalEdits(left, right); |
| 1487 | |
| 1488 | size_t l_i = 0, r_i = 0, edit_i = 0; |
| 1489 | std::stringstream ss; |
| 1490 | while (edit_i < edits.size()) { |
| 1491 | // Find first edit. |
| 1492 | while (edit_i < edits.size() && edits[edit_i] == kMatch) { |
| 1493 | ++l_i; |
| 1494 | ++r_i; |
| 1495 | ++edit_i; |
| 1496 | } |
| 1497 | |
| 1498 | // Find the first line to include in the hunk. |
| 1499 | const size_t prefix_context = std::min(a: l_i, b: context); |
| 1500 | Hunk hunk(l_i - prefix_context + 1, r_i - prefix_context + 1); |
| 1501 | for (size_t i = prefix_context; i > 0; --i) { |
| 1502 | hunk.PushLine(edit: ' ', line: left[l_i - i].c_str()); |
| 1503 | } |
| 1504 | |
| 1505 | // Iterate the edits until we found enough suffix for the hunk or the input |
| 1506 | // is over. |
| 1507 | size_t n_suffix = 0; |
| 1508 | for (; edit_i < edits.size(); ++edit_i) { |
| 1509 | if (n_suffix >= context) { |
| 1510 | // Continue only if the next hunk is very close. |
| 1511 | auto it = edits.begin() + static_cast<int>(edit_i); |
| 1512 | while (it != edits.end() && *it == kMatch) ++it; |
| 1513 | if (it == edits.end() || |
| 1514 | static_cast<size_t>(it - edits.begin()) - edit_i >= context) { |
| 1515 | // There is no next edit or it is too far away. |
| 1516 | break; |
| 1517 | } |
| 1518 | } |
| 1519 | |
| 1520 | EditType edit = edits[edit_i]; |
| 1521 | // Reset count when a non match is found. |
| 1522 | n_suffix = edit == kMatch ? n_suffix + 1 : 0; |
| 1523 | |
| 1524 | if (edit == kMatch || edit == kRemove || edit == kReplace) { |
| 1525 | hunk.PushLine(edit: edit == kMatch ? ' ' : '-', line: left[l_i].c_str()); |
| 1526 | } |
| 1527 | if (edit == kAdd || edit == kReplace) { |
| 1528 | hunk.PushLine(edit: '+', line: right[r_i].c_str()); |
| 1529 | } |
| 1530 | |
| 1531 | // Advance indices, depending on edit type. |
| 1532 | l_i += edit != kAdd; |
| 1533 | r_i += edit != kRemove; |
| 1534 | } |
| 1535 | |
| 1536 | if (!hunk.has_edits()) { |
| 1537 | // We are done. We don't want this hunk. |
| 1538 | break; |
| 1539 | } |
| 1540 | |
| 1541 | hunk.PrintTo(os: &ss); |
| 1542 | } |
| 1543 | return ss.str(); |
| 1544 | } |
| 1545 | |
| 1546 | } // namespace edit_distance |
| 1547 | |
| 1548 | namespace { |
| 1549 | |
| 1550 | // The string representation of the values received in EqFailure() are already |
| 1551 | // escaped. Split them on escaped '\n' boundaries. Leave all other escaped |
| 1552 | // characters the same. |
| 1553 | std::vector<std::string> SplitEscapedString(const std::string& str) { |
| 1554 | std::vector<std::string> lines; |
| 1555 | size_t start = 0, end = str.size(); |
| 1556 | if (end > 2 && str[0] == '"' && str[end - 1] == '"') { |
| 1557 | ++start; |
| 1558 | --end; |
| 1559 | } |
| 1560 | bool escaped = false; |
| 1561 | for (size_t i = start; i + 1 < end; ++i) { |
| 1562 | if (escaped) { |
| 1563 | escaped = false; |
| 1564 | if (str[i] == 'n') { |
| 1565 | lines.push_back(x: str.substr(pos: start, n: i - start - 1)); |
| 1566 | start = i + 1; |
| 1567 | } |
| 1568 | } else { |
| 1569 | escaped = str[i] == '\\'; |
| 1570 | } |
| 1571 | } |
| 1572 | lines.push_back(x: str.substr(pos: start, n: end - start)); |
| 1573 | return lines; |
| 1574 | } |
| 1575 | |
| 1576 | } // namespace |
| 1577 | |
| 1578 | // Constructs and returns the message for an equality assertion |
| 1579 | // (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure. |
| 1580 | // |
| 1581 | // The first four parameters are the expressions used in the assertion |
| 1582 | // and their values, as strings. For example, for ASSERT_EQ(foo, bar) |
| 1583 | // where foo is 5 and bar is 6, we have: |
| 1584 | // |
| 1585 | // lhs_expression: "foo" |
| 1586 | // rhs_expression: "bar" |
| 1587 | // lhs_value: "5" |
| 1588 | // rhs_value: "6" |
| 1589 | // |
| 1590 | // The ignoring_case parameter is true if and only if the assertion is a |
| 1591 | // *_STRCASEEQ*. When it's true, the string "Ignoring case" will |
| 1592 | // be inserted into the message. |
| 1593 | AssertionResult EqFailure(const char* lhs_expression, |
| 1594 | const char* rhs_expression, |
| 1595 | const std::string& lhs_value, |
| 1596 | const std::string& rhs_value, bool ignoring_case) { |
| 1597 | Message msg; |
| 1598 | msg << "Expected equality of these values:" ; |
| 1599 | msg << "\n " << lhs_expression; |
| 1600 | if (lhs_value != lhs_expression) { |
| 1601 | msg << "\n Which is: " << lhs_value; |
| 1602 | } |
| 1603 | msg << "\n " << rhs_expression; |
| 1604 | if (rhs_value != rhs_expression) { |
| 1605 | msg << "\n Which is: " << rhs_value; |
| 1606 | } |
| 1607 | |
| 1608 | if (ignoring_case) { |
| 1609 | msg << "\nIgnoring case" ; |
| 1610 | } |
| 1611 | |
| 1612 | if (!lhs_value.empty() && !rhs_value.empty()) { |
| 1613 | const std::vector<std::string> lhs_lines = SplitEscapedString(str: lhs_value); |
| 1614 | const std::vector<std::string> rhs_lines = SplitEscapedString(str: rhs_value); |
| 1615 | if (lhs_lines.size() > 1 || rhs_lines.size() > 1) { |
| 1616 | msg << "\nWith diff:\n" |
| 1617 | << edit_distance::CreateUnifiedDiff(left: lhs_lines, right: rhs_lines); |
| 1618 | } |
| 1619 | } |
| 1620 | |
| 1621 | return AssertionFailure() << msg; |
| 1622 | } |
| 1623 | |
| 1624 | // Constructs a failure message for Boolean assertions such as EXPECT_TRUE. |
| 1625 | std::string GetBoolAssertionFailureMessage( |
| 1626 | const AssertionResult& assertion_result, const char* expression_text, |
| 1627 | const char* actual_predicate_value, const char* expected_predicate_value) { |
| 1628 | const char* actual_message = assertion_result.message(); |
| 1629 | Message msg; |
| 1630 | msg << "Value of: " << expression_text |
| 1631 | << "\n Actual: " << actual_predicate_value; |
| 1632 | if (actual_message[0] != '\0') msg << " (" << actual_message << ")" ; |
| 1633 | msg << "\nExpected: " << expected_predicate_value; |
| 1634 | return msg.GetString(); |
| 1635 | } |
| 1636 | |
| 1637 | // Helper function for implementing ASSERT_NEAR. |
| 1638 | AssertionResult DoubleNearPredFormat(const char* expr1, const char* expr2, |
| 1639 | const char* abs_error_expr, double val1, |
| 1640 | double val2, double abs_error) { |
| 1641 | const double diff = fabs(x: val1 - val2); |
| 1642 | if (diff <= abs_error) return AssertionSuccess(); |
| 1643 | |
| 1644 | // Find the value which is closest to zero. |
| 1645 | const double min_abs = std::min(a: fabs(x: val1), b: fabs(x: val2)); |
| 1646 | // Find the distance to the next double from that value. |
| 1647 | const double epsilon = |
| 1648 | nextafter(x: min_abs, y: std::numeric_limits<double>::infinity()) - min_abs; |
| 1649 | // Detect the case where abs_error is so small that EXPECT_NEAR is |
| 1650 | // effectively the same as EXPECT_EQUAL, and give an informative error |
| 1651 | // message so that the situation can be more easily understood without |
| 1652 | // requiring exotic floating-point knowledge. |
| 1653 | // Don't do an epsilon check if abs_error is zero because that implies |
| 1654 | // that an equality check was actually intended. |
| 1655 | if (!(std::isnan)(x: val1) && !(std::isnan)(x: val2) && abs_error > 0 && |
| 1656 | abs_error < epsilon) { |
| 1657 | return AssertionFailure() |
| 1658 | << "The difference between " << expr1 << " and " << expr2 << " is " |
| 1659 | << diff << ", where\n" |
| 1660 | << expr1 << " evaluates to " << val1 << ",\n" |
| 1661 | << expr2 << " evaluates to " << val2 << ".\nThe abs_error parameter " |
| 1662 | << abs_error_expr << " evaluates to " << abs_error |
| 1663 | << " which is smaller than the minimum distance between doubles for " |
| 1664 | "numbers of this magnitude which is " |
| 1665 | << epsilon |
| 1666 | << ", thus making this EXPECT_NEAR check equivalent to " |
| 1667 | "EXPECT_EQUAL. Consider using EXPECT_DOUBLE_EQ instead." ; |
| 1668 | } |
| 1669 | return AssertionFailure() |
| 1670 | << "The difference between " << expr1 << " and " << expr2 << " is " |
| 1671 | << diff << ", which exceeds " << abs_error_expr << ", where\n" |
| 1672 | << expr1 << " evaluates to " << val1 << ",\n" |
| 1673 | << expr2 << " evaluates to " << val2 << ", and\n" |
| 1674 | << abs_error_expr << " evaluates to " << abs_error << "." ; |
| 1675 | } |
| 1676 | |
| 1677 | // Helper template for implementing FloatLE() and DoubleLE(). |
| 1678 | template <typename RawType> |
| 1679 | AssertionResult FloatingPointLE(const char* expr1, const char* expr2, |
| 1680 | RawType val1, RawType val2) { |
| 1681 | // Returns success if val1 is less than val2, |
| 1682 | if (val1 < val2) { |
| 1683 | return AssertionSuccess(); |
| 1684 | } |
| 1685 | |
| 1686 | // or if val1 is almost equal to val2. |
| 1687 | const FloatingPoint<RawType> lhs(val1), rhs(val2); |
| 1688 | if (lhs.AlmostEquals(rhs)) { |
| 1689 | return AssertionSuccess(); |
| 1690 | } |
| 1691 | |
| 1692 | // Note that the above two checks will both fail if either val1 or |
| 1693 | // val2 is NaN, as the IEEE floating-point standard requires that |
| 1694 | // any predicate involving a NaN must return false. |
| 1695 | |
| 1696 | ::std::stringstream val1_ss; |
| 1697 | val1_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2) |
| 1698 | << val1; |
| 1699 | |
| 1700 | ::std::stringstream val2_ss; |
| 1701 | val2_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2) |
| 1702 | << val2; |
| 1703 | |
| 1704 | return AssertionFailure() |
| 1705 | << "Expected: (" << expr1 << ") <= (" << expr2 << ")\n" |
| 1706 | << " Actual: " << StringStreamToString(stream: &val1_ss) << " vs " |
| 1707 | << StringStreamToString(stream: &val2_ss); |
| 1708 | } |
| 1709 | |
| 1710 | } // namespace internal |
| 1711 | |
| 1712 | // Asserts that val1 is less than, or almost equal to, val2. Fails |
| 1713 | // otherwise. In particular, it fails if either val1 or val2 is NaN. |
| 1714 | AssertionResult FloatLE(const char* expr1, const char* expr2, float val1, |
| 1715 | float val2) { |
| 1716 | return internal::FloatingPointLE<float>(expr1, expr2, val1, val2); |
| 1717 | } |
| 1718 | |
| 1719 | // Asserts that val1 is less than, or almost equal to, val2. Fails |
| 1720 | // otherwise. In particular, it fails if either val1 or val2 is NaN. |
| 1721 | AssertionResult DoubleLE(const char* expr1, const char* expr2, double val1, |
| 1722 | double val2) { |
| 1723 | return internal::FloatingPointLE<double>(expr1, expr2, val1, val2); |
| 1724 | } |
| 1725 | |
| 1726 | namespace internal { |
| 1727 | |
| 1728 | // The helper function for {ASSERT|EXPECT}_STREQ. |
| 1729 | AssertionResult CmpHelperSTREQ(const char* lhs_expression, |
| 1730 | const char* rhs_expression, const char* lhs, |
| 1731 | const char* rhs) { |
| 1732 | if (String::CStringEquals(lhs, rhs)) { |
| 1733 | return AssertionSuccess(); |
| 1734 | } |
| 1735 | |
| 1736 | return EqFailure(lhs_expression, rhs_expression, lhs_value: PrintToString(value: lhs), |
| 1737 | rhs_value: PrintToString(value: rhs), ignoring_case: false); |
| 1738 | } |
| 1739 | |
| 1740 | // The helper function for {ASSERT|EXPECT}_STRCASEEQ. |
| 1741 | AssertionResult CmpHelperSTRCASEEQ(const char* lhs_expression, |
| 1742 | const char* rhs_expression, const char* lhs, |
| 1743 | const char* rhs) { |
| 1744 | if (String::CaseInsensitiveCStringEquals(lhs, rhs)) { |
| 1745 | return AssertionSuccess(); |
| 1746 | } |
| 1747 | |
| 1748 | return EqFailure(lhs_expression, rhs_expression, lhs_value: PrintToString(value: lhs), |
| 1749 | rhs_value: PrintToString(value: rhs), ignoring_case: true); |
| 1750 | } |
| 1751 | |
| 1752 | // The helper function for {ASSERT|EXPECT}_STRNE. |
| 1753 | AssertionResult CmpHelperSTRNE(const char* s1_expression, |
| 1754 | const char* s2_expression, const char* s1, |
| 1755 | const char* s2) { |
| 1756 | if (!String::CStringEquals(lhs: s1, rhs: s2)) { |
| 1757 | return AssertionSuccess(); |
| 1758 | } else { |
| 1759 | return AssertionFailure() |
| 1760 | << "Expected: (" << s1_expression << ") != (" << s2_expression |
| 1761 | << "), actual: \"" << s1 << "\" vs \"" << s2 << "\"" ; |
| 1762 | } |
| 1763 | } |
| 1764 | |
| 1765 | // The helper function for {ASSERT|EXPECT}_STRCASENE. |
| 1766 | AssertionResult CmpHelperSTRCASENE(const char* s1_expression, |
| 1767 | const char* s2_expression, const char* s1, |
| 1768 | const char* s2) { |
| 1769 | if (!String::CaseInsensitiveCStringEquals(lhs: s1, rhs: s2)) { |
| 1770 | return AssertionSuccess(); |
| 1771 | } else { |
| 1772 | return AssertionFailure() |
| 1773 | << "Expected: (" << s1_expression << ") != (" << s2_expression |
| 1774 | << ") (ignoring case), actual: \"" << s1 << "\" vs \"" << s2 << "\"" ; |
| 1775 | } |
| 1776 | } |
| 1777 | |
| 1778 | } // namespace internal |
| 1779 | |
| 1780 | namespace { |
| 1781 | |
| 1782 | // Helper functions for implementing IsSubString() and IsNotSubstring(). |
| 1783 | |
| 1784 | // This group of overloaded functions return true if and only if needle |
| 1785 | // is a substring of haystack. NULL is considered a substring of |
| 1786 | // itself only. |
| 1787 | |
| 1788 | bool IsSubstringPred(const char* needle, const char* haystack) { |
| 1789 | if (needle == nullptr || haystack == nullptr) return needle == haystack; |
| 1790 | |
| 1791 | return strstr(haystack: haystack, needle: needle) != nullptr; |
| 1792 | } |
| 1793 | |
| 1794 | bool IsSubstringPred(const wchar_t* needle, const wchar_t* haystack) { |
| 1795 | if (needle == nullptr || haystack == nullptr) return needle == haystack; |
| 1796 | |
| 1797 | return wcsstr(haystack: haystack, needle: needle) != nullptr; |
| 1798 | } |
| 1799 | |
| 1800 | // StringType here can be either ::std::string or ::std::wstring. |
| 1801 | template <typename StringType> |
| 1802 | bool IsSubstringPred(const StringType& needle, const StringType& haystack) { |
| 1803 | return haystack.find(needle) != StringType::npos; |
| 1804 | } |
| 1805 | |
| 1806 | // This function implements either IsSubstring() or IsNotSubstring(), |
| 1807 | // depending on the value of the expected_to_be_substring parameter. |
| 1808 | // StringType here can be const char*, const wchar_t*, ::std::string, |
| 1809 | // or ::std::wstring. |
| 1810 | template <typename StringType> |
| 1811 | AssertionResult IsSubstringImpl(bool expected_to_be_substring, |
| 1812 | const char* needle_expr, |
| 1813 | const char* haystack_expr, |
| 1814 | const StringType& needle, |
| 1815 | const StringType& haystack) { |
| 1816 | if (IsSubstringPred(needle, haystack) == expected_to_be_substring) |
| 1817 | return AssertionSuccess(); |
| 1818 | |
| 1819 | const bool is_wide_string = sizeof(needle[0]) > 1; |
| 1820 | const char* const begin_string_quote = is_wide_string ? "L\"" : "\"" ; |
| 1821 | return AssertionFailure() |
| 1822 | << "Value of: " << needle_expr << "\n" |
| 1823 | << " Actual: " << begin_string_quote << needle << "\"\n" |
| 1824 | << "Expected: " << (expected_to_be_substring ? "" : "not " ) |
| 1825 | << "a substring of " << haystack_expr << "\n" |
| 1826 | << "Which is: " << begin_string_quote << haystack << "\"" ; |
| 1827 | } |
| 1828 | |
| 1829 | } // namespace |
| 1830 | |
| 1831 | // IsSubstring() and IsNotSubstring() check whether needle is a |
| 1832 | // substring of haystack (NULL is considered a substring of itself |
| 1833 | // only), and return an appropriate error message when they fail. |
| 1834 | |
| 1835 | AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, |
| 1836 | const char* needle, const char* haystack) { |
| 1837 | return IsSubstringImpl(expected_to_be_substring: true, needle_expr, haystack_expr, needle, haystack); |
| 1838 | } |
| 1839 | |
| 1840 | AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, |
| 1841 | const wchar_t* needle, const wchar_t* haystack) { |
| 1842 | return IsSubstringImpl(expected_to_be_substring: true, needle_expr, haystack_expr, needle, haystack); |
| 1843 | } |
| 1844 | |
| 1845 | AssertionResult IsNotSubstring(const char* needle_expr, |
| 1846 | const char* haystack_expr, const char* needle, |
| 1847 | const char* haystack) { |
| 1848 | return IsSubstringImpl(expected_to_be_substring: false, needle_expr, haystack_expr, needle, haystack); |
| 1849 | } |
| 1850 | |
| 1851 | AssertionResult IsNotSubstring(const char* needle_expr, |
| 1852 | const char* haystack_expr, const wchar_t* needle, |
| 1853 | const wchar_t* haystack) { |
| 1854 | return IsSubstringImpl(expected_to_be_substring: false, needle_expr, haystack_expr, needle, haystack); |
| 1855 | } |
| 1856 | |
| 1857 | AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, |
| 1858 | const ::std::string& needle, |
| 1859 | const ::std::string& haystack) { |
| 1860 | return IsSubstringImpl(expected_to_be_substring: true, needle_expr, haystack_expr, needle, haystack); |
| 1861 | } |
| 1862 | |
| 1863 | AssertionResult IsNotSubstring(const char* needle_expr, |
| 1864 | const char* haystack_expr, |
| 1865 | const ::std::string& needle, |
| 1866 | const ::std::string& haystack) { |
| 1867 | return IsSubstringImpl(expected_to_be_substring: false, needle_expr, haystack_expr, needle, haystack); |
| 1868 | } |
| 1869 | |
| 1870 | #if GTEST_HAS_STD_WSTRING |
| 1871 | AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, |
| 1872 | const ::std::wstring& needle, |
| 1873 | const ::std::wstring& haystack) { |
| 1874 | return IsSubstringImpl(expected_to_be_substring: true, needle_expr, haystack_expr, needle, haystack); |
| 1875 | } |
| 1876 | |
| 1877 | AssertionResult IsNotSubstring(const char* needle_expr, |
| 1878 | const char* haystack_expr, |
| 1879 | const ::std::wstring& needle, |
| 1880 | const ::std::wstring& haystack) { |
| 1881 | return IsSubstringImpl(expected_to_be_substring: false, needle_expr, haystack_expr, needle, haystack); |
| 1882 | } |
| 1883 | #endif // GTEST_HAS_STD_WSTRING |
| 1884 | |
| 1885 | namespace internal { |
| 1886 | |
| 1887 | #ifdef GTEST_OS_WINDOWS |
| 1888 | |
| 1889 | namespace { |
| 1890 | |
| 1891 | // Helper function for IsHRESULT{SuccessFailure} predicates |
| 1892 | AssertionResult HRESULTFailureHelper(const char* expr, const char* expected, |
| 1893 | long hr) { // NOLINT |
| 1894 | #if defined(GTEST_OS_WINDOWS_MOBILE) || defined(GTEST_OS_WINDOWS_TV_TITLE) |
| 1895 | |
| 1896 | // Windows CE doesn't support FormatMessage. |
| 1897 | const char error_text[] = "" ; |
| 1898 | |
| 1899 | #else |
| 1900 | |
| 1901 | // Looks up the human-readable system message for the HRESULT code |
| 1902 | // and since we're not passing any params to FormatMessage, we don't |
| 1903 | // want inserts expanded. |
| 1904 | const DWORD kFlags = |
| 1905 | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS; |
| 1906 | const DWORD kBufSize = 4096; |
| 1907 | // Gets the system's human readable message string for this HRESULT. |
| 1908 | char error_text[kBufSize] = {'\0'}; |
| 1909 | DWORD message_length = ::FormatMessageA(kFlags, |
| 1910 | 0, // no source, we're asking system |
| 1911 | static_cast<DWORD>(hr), // the error |
| 1912 | 0, // no line width restrictions |
| 1913 | error_text, // output buffer |
| 1914 | kBufSize, // buf size |
| 1915 | nullptr); // no arguments for inserts |
| 1916 | // Trims tailing white space (FormatMessage leaves a trailing CR-LF) |
| 1917 | for (; message_length && IsSpace(error_text[message_length - 1]); |
| 1918 | --message_length) { |
| 1919 | error_text[message_length - 1] = '\0'; |
| 1920 | } |
| 1921 | |
| 1922 | #endif // GTEST_OS_WINDOWS_MOBILE |
| 1923 | |
| 1924 | const std::string error_hex("0x" + String::FormatHexInt(hr)); |
| 1925 | return ::testing::AssertionFailure() |
| 1926 | << "Expected: " << expr << " " << expected << ".\n" |
| 1927 | << " Actual: " << error_hex << " " << error_text << "\n" ; |
| 1928 | } |
| 1929 | |
| 1930 | } // namespace |
| 1931 | |
| 1932 | AssertionResult IsHRESULTSuccess(const char* expr, long hr) { // NOLINT |
| 1933 | if (SUCCEEDED(hr)) { |
| 1934 | return AssertionSuccess(); |
| 1935 | } |
| 1936 | return HRESULTFailureHelper(expr, "succeeds" , hr); |
| 1937 | } |
| 1938 | |
| 1939 | AssertionResult IsHRESULTFailure(const char* expr, long hr) { // NOLINT |
| 1940 | if (FAILED(hr)) { |
| 1941 | return AssertionSuccess(); |
| 1942 | } |
| 1943 | return HRESULTFailureHelper(expr, "fails" , hr); |
| 1944 | } |
| 1945 | |
| 1946 | #endif // GTEST_OS_WINDOWS |
| 1947 | |
| 1948 | // Utility functions for encoding Unicode text (wide strings) in |
| 1949 | // UTF-8. |
| 1950 | |
| 1951 | // A Unicode code-point can have up to 21 bits, and is encoded in UTF-8 |
| 1952 | // like this: |
| 1953 | // |
| 1954 | // Code-point length Encoding |
| 1955 | // 0 - 7 bits 0xxxxxxx |
| 1956 | // 8 - 11 bits 110xxxxx 10xxxxxx |
| 1957 | // 12 - 16 bits 1110xxxx 10xxxxxx 10xxxxxx |
| 1958 | // 17 - 21 bits 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx |
| 1959 | |
| 1960 | // The maximum code-point a one-byte UTF-8 sequence can represent. |
| 1961 | constexpr uint32_t kMaxCodePoint1 = (static_cast<uint32_t>(1) << 7) - 1; |
| 1962 | |
| 1963 | // The maximum code-point a two-byte UTF-8 sequence can represent. |
| 1964 | constexpr uint32_t kMaxCodePoint2 = (static_cast<uint32_t>(1) << (5 + 6)) - 1; |
| 1965 | |
| 1966 | // The maximum code-point a three-byte UTF-8 sequence can represent. |
| 1967 | constexpr uint32_t kMaxCodePoint3 = |
| 1968 | (static_cast<uint32_t>(1) << (4 + 2 * 6)) - 1; |
| 1969 | |
| 1970 | // The maximum code-point a four-byte UTF-8 sequence can represent. |
| 1971 | constexpr uint32_t kMaxCodePoint4 = |
| 1972 | (static_cast<uint32_t>(1) << (3 + 3 * 6)) - 1; |
| 1973 | |
| 1974 | // Chops off the n lowest bits from a bit pattern. Returns the n |
| 1975 | // lowest bits. As a side effect, the original bit pattern will be |
| 1976 | // shifted to the right by n bits. |
| 1977 | inline uint32_t ChopLowBits(uint32_t* bits, int n) { |
| 1978 | const uint32_t low_bits = *bits & ((static_cast<uint32_t>(1) << n) - 1); |
| 1979 | *bits >>= n; |
| 1980 | return low_bits; |
| 1981 | } |
| 1982 | |
| 1983 | // Converts a Unicode code point to a narrow string in UTF-8 encoding. |
| 1984 | // code_point parameter is of type uint32_t because wchar_t may not be |
| 1985 | // wide enough to contain a code point. |
| 1986 | // If the code_point is not a valid Unicode code point |
| 1987 | // (i.e. outside of Unicode range U+0 to U+10FFFF) it will be converted |
| 1988 | // to "(Invalid Unicode 0xXXXXXXXX)". |
| 1989 | std::string CodePointToUtf8(uint32_t code_point) { |
| 1990 | if (code_point > kMaxCodePoint4) { |
| 1991 | return "(Invalid Unicode 0x" + String::FormatHexUInt32(value: code_point) + ")" ; |
| 1992 | } |
| 1993 | |
| 1994 | char str[5]; // Big enough for the largest valid code point. |
| 1995 | if (code_point <= kMaxCodePoint1) { |
| 1996 | str[1] = '\0'; |
| 1997 | str[0] = static_cast<char>(code_point); // 0xxxxxxx |
| 1998 | } else if (code_point <= kMaxCodePoint2) { |
| 1999 | str[2] = '\0'; |
| 2000 | str[1] = static_cast<char>(0x80 | ChopLowBits(bits: &code_point, n: 6)); // 10xxxxxx |
| 2001 | str[0] = static_cast<char>(0xC0 | code_point); // 110xxxxx |
| 2002 | } else if (code_point <= kMaxCodePoint3) { |
| 2003 | str[3] = '\0'; |
| 2004 | str[2] = static_cast<char>(0x80 | ChopLowBits(bits: &code_point, n: 6)); // 10xxxxxx |
| 2005 | str[1] = static_cast<char>(0x80 | ChopLowBits(bits: &code_point, n: 6)); // 10xxxxxx |
| 2006 | str[0] = static_cast<char>(0xE0 | code_point); // 1110xxxx |
| 2007 | } else { // code_point <= kMaxCodePoint4 |
| 2008 | str[4] = '\0'; |
| 2009 | str[3] = static_cast<char>(0x80 | ChopLowBits(bits: &code_point, n: 6)); // 10xxxxxx |
| 2010 | str[2] = static_cast<char>(0x80 | ChopLowBits(bits: &code_point, n: 6)); // 10xxxxxx |
| 2011 | str[1] = static_cast<char>(0x80 | ChopLowBits(bits: &code_point, n: 6)); // 10xxxxxx |
| 2012 | str[0] = static_cast<char>(0xF0 | code_point); // 11110xxx |
| 2013 | } |
| 2014 | return str; |
| 2015 | } |
| 2016 | |
| 2017 | // The following two functions only make sense if the system |
| 2018 | // uses UTF-16 for wide string encoding. All supported systems |
| 2019 | // with 16 bit wchar_t (Windows, Cygwin) do use UTF-16. |
| 2020 | |
| 2021 | // Determines if the arguments constitute UTF-16 surrogate pair |
| 2022 | // and thus should be combined into a single Unicode code point |
| 2023 | // using CreateCodePointFromUtf16SurrogatePair. |
| 2024 | inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) { |
| 2025 | return sizeof(wchar_t) == 2 && (first & 0xFC00) == 0xD800 && |
| 2026 | (second & 0xFC00) == 0xDC00; |
| 2027 | } |
| 2028 | |
| 2029 | // Creates a Unicode code point from UTF16 surrogate pair. |
| 2030 | inline uint32_t CreateCodePointFromUtf16SurrogatePair(wchar_t first, |
| 2031 | wchar_t second) { |
| 2032 | const auto first_u = static_cast<uint32_t>(first); |
| 2033 | const auto second_u = static_cast<uint32_t>(second); |
| 2034 | const uint32_t mask = (1 << 10) - 1; |
| 2035 | return (sizeof(wchar_t) == 2) |
| 2036 | ? (((first_u & mask) << 10) | (second_u & mask)) + 0x10000 |
| 2037 | : |
| 2038 | // This function should not be called when the condition is |
| 2039 | // false, but we provide a sensible default in case it is. |
| 2040 | first_u; |
| 2041 | } |
| 2042 | |
| 2043 | // Converts a wide string to a narrow string in UTF-8 encoding. |
| 2044 | // The wide string is assumed to have the following encoding: |
| 2045 | // UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin) |
| 2046 | // UTF-32 if sizeof(wchar_t) == 4 (on Linux) |
| 2047 | // Parameter str points to a null-terminated wide string. |
| 2048 | // Parameter num_chars may additionally limit the number |
| 2049 | // of wchar_t characters processed. -1 is used when the entire string |
| 2050 | // should be processed. |
| 2051 | // If the string contains code points that are not valid Unicode code points |
| 2052 | // (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output |
| 2053 | // as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding |
| 2054 | // and contains invalid UTF-16 surrogate pairs, values in those pairs |
| 2055 | // will be encoded as individual Unicode characters from Basic Normal Plane. |
| 2056 | std::string WideStringToUtf8(const wchar_t* str, int num_chars) { |
| 2057 | if (num_chars == -1) num_chars = static_cast<int>(wcslen(s: str)); |
| 2058 | |
| 2059 | ::std::stringstream stream; |
| 2060 | for (int i = 0; i < num_chars; ++i) { |
| 2061 | uint32_t unicode_code_point; |
| 2062 | |
| 2063 | if (str[i] == L'\0') { |
| 2064 | break; |
| 2065 | } else if (i + 1 < num_chars && IsUtf16SurrogatePair(first: str[i], second: str[i + 1])) { |
| 2066 | unicode_code_point = |
| 2067 | CreateCodePointFromUtf16SurrogatePair(first: str[i], second: str[i + 1]); |
| 2068 | i++; |
| 2069 | } else { |
| 2070 | unicode_code_point = static_cast<uint32_t>(str[i]); |
| 2071 | } |
| 2072 | |
| 2073 | stream << CodePointToUtf8(code_point: unicode_code_point); |
| 2074 | } |
| 2075 | return StringStreamToString(stream: &stream); |
| 2076 | } |
| 2077 | |
| 2078 | // Converts a wide C string to an std::string using the UTF-8 encoding. |
| 2079 | // NULL will be converted to "(null)". |
| 2080 | std::string String::ShowWideCString(const wchar_t* wide_c_str) { |
| 2081 | if (wide_c_str == nullptr) return "(null)" ; |
| 2082 | |
| 2083 | return internal::WideStringToUtf8(str: wide_c_str, num_chars: -1); |
| 2084 | } |
| 2085 | |
| 2086 | // Compares two wide C strings. Returns true if and only if they have the |
| 2087 | // same content. |
| 2088 | // |
| 2089 | // Unlike wcscmp(), this function can handle NULL argument(s). A NULL |
| 2090 | // C string is considered different to any non-NULL C string, |
| 2091 | // including the empty string. |
| 2092 | bool String::WideCStringEquals(const wchar_t* lhs, const wchar_t* rhs) { |
| 2093 | if (lhs == nullptr) return rhs == nullptr; |
| 2094 | |
| 2095 | if (rhs == nullptr) return false; |
| 2096 | |
| 2097 | return wcscmp(s1: lhs, s2: rhs) == 0; |
| 2098 | } |
| 2099 | |
| 2100 | // Helper function for *_STREQ on wide strings. |
| 2101 | AssertionResult CmpHelperSTREQ(const char* lhs_expression, |
| 2102 | const char* rhs_expression, const wchar_t* lhs, |
| 2103 | const wchar_t* rhs) { |
| 2104 | if (String::WideCStringEquals(lhs, rhs)) { |
| 2105 | return AssertionSuccess(); |
| 2106 | } |
| 2107 | |
| 2108 | return EqFailure(lhs_expression, rhs_expression, lhs_value: PrintToString(value: lhs), |
| 2109 | rhs_value: PrintToString(value: rhs), ignoring_case: false); |
| 2110 | } |
| 2111 | |
| 2112 | // Helper function for *_STRNE on wide strings. |
| 2113 | AssertionResult CmpHelperSTRNE(const char* s1_expression, |
| 2114 | const char* s2_expression, const wchar_t* s1, |
| 2115 | const wchar_t* s2) { |
| 2116 | if (!String::WideCStringEquals(lhs: s1, rhs: s2)) { |
| 2117 | return AssertionSuccess(); |
| 2118 | } |
| 2119 | |
| 2120 | return AssertionFailure() |
| 2121 | << "Expected: (" << s1_expression << ") != (" << s2_expression |
| 2122 | << "), actual: " << PrintToString(value: s1) << " vs " << PrintToString(value: s2); |
| 2123 | } |
| 2124 | |
| 2125 | // Compares two C strings, ignoring case. Returns true if and only if they have |
| 2126 | // the same content. |
| 2127 | // |
| 2128 | // Unlike strcasecmp(), this function can handle NULL argument(s). A |
| 2129 | // NULL C string is considered different to any non-NULL C string, |
| 2130 | // including the empty string. |
| 2131 | bool String::CaseInsensitiveCStringEquals(const char* lhs, const char* rhs) { |
| 2132 | if (lhs == nullptr) return rhs == nullptr; |
| 2133 | if (rhs == nullptr) return false; |
| 2134 | return posix::StrCaseCmp(s1: lhs, s2: rhs) == 0; |
| 2135 | } |
| 2136 | |
| 2137 | // Compares two wide C strings, ignoring case. Returns true if and only if they |
| 2138 | // have the same content. |
| 2139 | // |
| 2140 | // Unlike wcscasecmp(), this function can handle NULL argument(s). |
| 2141 | // A NULL C string is considered different to any non-NULL wide C string, |
| 2142 | // including the empty string. |
| 2143 | // NB: The implementations on different platforms slightly differ. |
| 2144 | // On windows, this method uses _wcsicmp which compares according to LC_CTYPE |
| 2145 | // environment variable. On GNU platform this method uses wcscasecmp |
| 2146 | // which compares according to LC_CTYPE category of the current locale. |
| 2147 | // On MacOS X, it uses towlower, which also uses LC_CTYPE category of the |
| 2148 | // current locale. |
| 2149 | bool String::CaseInsensitiveWideCStringEquals(const wchar_t* lhs, |
| 2150 | const wchar_t* rhs) { |
| 2151 | if (lhs == nullptr) return rhs == nullptr; |
| 2152 | |
| 2153 | if (rhs == nullptr) return false; |
| 2154 | |
| 2155 | #ifdef GTEST_OS_WINDOWS |
| 2156 | return _wcsicmp(lhs, rhs) == 0; |
| 2157 | #elif defined(GTEST_OS_LINUX) && !defined(GTEST_OS_LINUX_ANDROID) |
| 2158 | return wcscasecmp(s1: lhs, s2: rhs) == 0; |
| 2159 | #else |
| 2160 | // Android, Mac OS X and Cygwin don't define wcscasecmp. |
| 2161 | // Other unknown OSes may not define it either. |
| 2162 | wint_t left, right; |
| 2163 | do { |
| 2164 | left = towlower(static_cast<wint_t>(*lhs++)); |
| 2165 | right = towlower(static_cast<wint_t>(*rhs++)); |
| 2166 | } while (left && left == right); |
| 2167 | return left == right; |
| 2168 | #endif // OS selector |
| 2169 | } |
| 2170 | |
| 2171 | // Returns true if and only if str ends with the given suffix, ignoring case. |
| 2172 | // Any string is considered to end with an empty suffix. |
| 2173 | bool String::EndsWithCaseInsensitive(const std::string& str, |
| 2174 | const std::string& suffix) { |
| 2175 | const size_t str_len = str.length(); |
| 2176 | const size_t suffix_len = suffix.length(); |
| 2177 | return (str_len >= suffix_len) && |
| 2178 | CaseInsensitiveCStringEquals(lhs: str.c_str() + str_len - suffix_len, |
| 2179 | rhs: suffix.c_str()); |
| 2180 | } |
| 2181 | |
| 2182 | // Formats an int value as "%02d". |
| 2183 | std::string String::FormatIntWidth2(int value) { |
| 2184 | return FormatIntWidthN(value, width: 2); |
| 2185 | } |
| 2186 | |
| 2187 | // Formats an int value to given width with leading zeros. |
| 2188 | std::string String::FormatIntWidthN(int value, int width) { |
| 2189 | std::stringstream ss; |
| 2190 | ss << std::setfill('0') << std::setw(width) << value; |
| 2191 | return ss.str(); |
| 2192 | } |
| 2193 | |
| 2194 | // Formats an int value as "%X". |
| 2195 | std::string String::FormatHexUInt32(uint32_t value) { |
| 2196 | std::stringstream ss; |
| 2197 | ss << std::hex << std::uppercase << value; |
| 2198 | return ss.str(); |
| 2199 | } |
| 2200 | |
| 2201 | // Formats an int value as "%X". |
| 2202 | std::string String::FormatHexInt(int value) { |
| 2203 | return FormatHexUInt32(value: static_cast<uint32_t>(value)); |
| 2204 | } |
| 2205 | |
| 2206 | // Formats a byte as "%02X". |
| 2207 | std::string String::FormatByte(unsigned char value) { |
| 2208 | std::stringstream ss; |
| 2209 | ss << std::setfill('0') << std::setw(2) << std::hex << std::uppercase |
| 2210 | << static_cast<unsigned int>(value); |
| 2211 | return ss.str(); |
| 2212 | } |
| 2213 | |
| 2214 | // Converts the buffer in a stringstream to an std::string, converting NUL |
| 2215 | // bytes to "\\0" along the way. |
| 2216 | std::string StringStreamToString(::std::stringstream* ss) { |
| 2217 | const ::std::string& str = ss->str(); |
| 2218 | const char* const start = str.c_str(); |
| 2219 | const char* const end = start + str.length(); |
| 2220 | |
| 2221 | std::string result; |
| 2222 | result.reserve(res: static_cast<size_t>(2 * (end - start))); |
| 2223 | for (const char* ch = start; ch != end; ++ch) { |
| 2224 | if (*ch == '\0') { |
| 2225 | result += "\\0" ; // Replaces NUL with "\\0"; |
| 2226 | } else { |
| 2227 | result += *ch; |
| 2228 | } |
| 2229 | } |
| 2230 | |
| 2231 | return result; |
| 2232 | } |
| 2233 | |
| 2234 | // Appends the user-supplied message to the Google-Test-generated message. |
| 2235 | std::string AppendUserMessage(const std::string& gtest_msg, |
| 2236 | const Message& user_msg) { |
| 2237 | // Appends the user message if it's non-empty. |
| 2238 | const std::string user_msg_string = user_msg.GetString(); |
| 2239 | if (user_msg_string.empty()) { |
| 2240 | return gtest_msg; |
| 2241 | } |
| 2242 | if (gtest_msg.empty()) { |
| 2243 | return user_msg_string; |
| 2244 | } |
| 2245 | return gtest_msg + "\n" + user_msg_string; |
| 2246 | } |
| 2247 | |
| 2248 | } // namespace internal |
| 2249 | |
| 2250 | // class TestResult |
| 2251 | |
| 2252 | // Creates an empty TestResult. |
| 2253 | TestResult::TestResult() |
| 2254 | : death_test_count_(0), start_timestamp_(0), elapsed_time_(0) {} |
| 2255 | |
| 2256 | // D'tor. |
| 2257 | TestResult::~TestResult() = default; |
| 2258 | |
| 2259 | // Returns the i-th test part result among all the results. i can |
| 2260 | // range from 0 to total_part_count() - 1. If i is not in that range, |
| 2261 | // aborts the program. |
| 2262 | const TestPartResult& TestResult::GetTestPartResult(int i) const { |
| 2263 | if (i < 0 || i >= total_part_count()) internal::posix::Abort(); |
| 2264 | return test_part_results_.at(n: static_cast<size_t>(i)); |
| 2265 | } |
| 2266 | |
| 2267 | // Returns the i-th test property. i can range from 0 to |
| 2268 | // test_property_count() - 1. If i is not in that range, aborts the |
| 2269 | // program. |
| 2270 | const TestProperty& TestResult::GetTestProperty(int i) const { |
| 2271 | if (i < 0 || i >= test_property_count()) internal::posix::Abort(); |
| 2272 | return test_properties_.at(n: static_cast<size_t>(i)); |
| 2273 | } |
| 2274 | |
| 2275 | // Clears the test part results. |
| 2276 | void TestResult::ClearTestPartResults() { test_part_results_.clear(); } |
| 2277 | |
| 2278 | // Adds a test part result to the list. |
| 2279 | void TestResult::AddTestPartResult(const TestPartResult& test_part_result) { |
| 2280 | test_part_results_.push_back(x: test_part_result); |
| 2281 | } |
| 2282 | |
| 2283 | // Adds a test property to the list. If a property with the same key as the |
| 2284 | // supplied property is already represented, the value of this test_property |
| 2285 | // replaces the old value for that key. |
| 2286 | void TestResult::RecordProperty(const std::string& xml_element, |
| 2287 | const TestProperty& test_property) { |
| 2288 | if (!ValidateTestProperty(xml_element, test_property)) { |
| 2289 | return; |
| 2290 | } |
| 2291 | internal::MutexLock lock(&test_properties_mutex_); |
| 2292 | const std::vector<TestProperty>::iterator property_with_matching_key = |
| 2293 | std::find_if(first: test_properties_.begin(), last: test_properties_.end(), |
| 2294 | pred: internal::TestPropertyKeyIs(test_property.key())); |
| 2295 | if (property_with_matching_key == test_properties_.end()) { |
| 2296 | test_properties_.push_back(x: test_property); |
| 2297 | return; |
| 2298 | } |
| 2299 | property_with_matching_key->SetValue(test_property.value()); |
| 2300 | } |
| 2301 | |
| 2302 | // The list of reserved attributes used in the <testsuites> element of XML |
| 2303 | // output. |
| 2304 | static const char* const kReservedTestSuitesAttributes[] = { |
| 2305 | "disabled" , "errors" , "failures" , "name" , |
| 2306 | "random_seed" , "tests" , "time" , "timestamp" }; |
| 2307 | |
| 2308 | // The list of reserved attributes used in the <testsuite> element of XML |
| 2309 | // output. |
| 2310 | static const char* const kReservedTestSuiteAttributes[] = { |
| 2311 | "disabled" , "errors" , "failures" , "name" , |
| 2312 | "tests" , "time" , "timestamp" , "skipped" }; |
| 2313 | |
| 2314 | // The list of reserved attributes used in the <testcase> element of XML output. |
| 2315 | static const char* const kReservedTestCaseAttributes[] = { |
| 2316 | "classname" , "name" , "status" , "time" , |
| 2317 | "type_param" , "value_param" , "file" , "line" }; |
| 2318 | |
| 2319 | // Use a slightly different set for allowed output to ensure existing tests can |
| 2320 | // still RecordProperty("result") or "RecordProperty(timestamp") |
| 2321 | static const char* const kReservedOutputTestCaseAttributes[] = { |
| 2322 | "classname" , "name" , "status" , "time" , "type_param" , |
| 2323 | "value_param" , "file" , "line" , "result" , "timestamp" }; |
| 2324 | |
| 2325 | template <size_t kSize> |
| 2326 | std::vector<std::string> ArrayAsVector(const char* const (&array)[kSize]) { |
| 2327 | return std::vector<std::string>(array, array + kSize); |
| 2328 | } |
| 2329 | |
| 2330 | static std::vector<std::string> GetReservedAttributesForElement( |
| 2331 | const std::string& xml_element) { |
| 2332 | if (xml_element == "testsuites" ) { |
| 2333 | return ArrayAsVector(array: kReservedTestSuitesAttributes); |
| 2334 | } else if (xml_element == "testsuite" ) { |
| 2335 | return ArrayAsVector(array: kReservedTestSuiteAttributes); |
| 2336 | } else if (xml_element == "testcase" ) { |
| 2337 | return ArrayAsVector(array: kReservedTestCaseAttributes); |
| 2338 | } else { |
| 2339 | GTEST_CHECK_(false) << "Unrecognized xml_element provided: " << xml_element; |
| 2340 | } |
| 2341 | // This code is unreachable but some compilers may not realizes that. |
| 2342 | return std::vector<std::string>(); |
| 2343 | } |
| 2344 | |
| 2345 | #if GTEST_HAS_FILE_SYSTEM |
| 2346 | // TODO(jdesprez): Merge the two getReserved attributes once skip is improved |
| 2347 | // This function is only used when file systems are enabled. |
| 2348 | static std::vector<std::string> GetReservedOutputAttributesForElement( |
| 2349 | const std::string& xml_element) { |
| 2350 | if (xml_element == "testsuites" ) { |
| 2351 | return ArrayAsVector(array: kReservedTestSuitesAttributes); |
| 2352 | } else if (xml_element == "testsuite" ) { |
| 2353 | return ArrayAsVector(array: kReservedTestSuiteAttributes); |
| 2354 | } else if (xml_element == "testcase" ) { |
| 2355 | return ArrayAsVector(array: kReservedOutputTestCaseAttributes); |
| 2356 | } else { |
| 2357 | GTEST_CHECK_(false) << "Unrecognized xml_element provided: " << xml_element; |
| 2358 | } |
| 2359 | // This code is unreachable but some compilers may not realizes that. |
| 2360 | return std::vector<std::string>(); |
| 2361 | } |
| 2362 | #endif |
| 2363 | |
| 2364 | static std::string FormatWordList(const std::vector<std::string>& words) { |
| 2365 | Message word_list; |
| 2366 | for (size_t i = 0; i < words.size(); ++i) { |
| 2367 | if (i > 0 && words.size() > 2) { |
| 2368 | word_list << ", " ; |
| 2369 | } |
| 2370 | if (i == words.size() - 1) { |
| 2371 | word_list << "and " ; |
| 2372 | } |
| 2373 | word_list << "'" << words[i] << "'" ; |
| 2374 | } |
| 2375 | return word_list.GetString(); |
| 2376 | } |
| 2377 | |
| 2378 | static bool ValidateTestPropertyName( |
| 2379 | const std::string& property_name, |
| 2380 | const std::vector<std::string>& reserved_names) { |
| 2381 | if (std::find(first: reserved_names.begin(), last: reserved_names.end(), val: property_name) != |
| 2382 | reserved_names.end()) { |
| 2383 | ADD_FAILURE() << "Reserved key used in RecordProperty(): " << property_name |
| 2384 | << " (" << FormatWordList(words: reserved_names) |
| 2385 | << " are reserved by " << GTEST_NAME_ << ")" ; |
| 2386 | return false; |
| 2387 | } |
| 2388 | return true; |
| 2389 | } |
| 2390 | |
| 2391 | // Adds a failure if the key is a reserved attribute of the element named |
| 2392 | // xml_element. Returns true if the property is valid. |
| 2393 | bool TestResult::ValidateTestProperty(const std::string& xml_element, |
| 2394 | const TestProperty& test_property) { |
| 2395 | return ValidateTestPropertyName(property_name: test_property.key(), |
| 2396 | reserved_names: GetReservedAttributesForElement(xml_element)); |
| 2397 | } |
| 2398 | |
| 2399 | // Clears the object. |
| 2400 | void TestResult::Clear() { |
| 2401 | test_part_results_.clear(); |
| 2402 | test_properties_.clear(); |
| 2403 | death_test_count_ = 0; |
| 2404 | elapsed_time_ = 0; |
| 2405 | } |
| 2406 | |
| 2407 | // Returns true off the test part was skipped. |
| 2408 | static bool TestPartSkipped(const TestPartResult& result) { |
| 2409 | return result.skipped(); |
| 2410 | } |
| 2411 | |
| 2412 | // Returns true if and only if the test was skipped. |
| 2413 | bool TestResult::Skipped() const { |
| 2414 | return !Failed() && CountIf(c: test_part_results_, predicate: TestPartSkipped) > 0; |
| 2415 | } |
| 2416 | |
| 2417 | // Returns true if and only if the test failed. |
| 2418 | bool TestResult::Failed() const { |
| 2419 | for (int i = 0; i < total_part_count(); ++i) { |
| 2420 | if (GetTestPartResult(i).failed()) return true; |
| 2421 | } |
| 2422 | return false; |
| 2423 | } |
| 2424 | |
| 2425 | // Returns true if and only if the test part fatally failed. |
| 2426 | static bool TestPartFatallyFailed(const TestPartResult& result) { |
| 2427 | return result.fatally_failed(); |
| 2428 | } |
| 2429 | |
| 2430 | // Returns true if and only if the test fatally failed. |
| 2431 | bool TestResult::HasFatalFailure() const { |
| 2432 | return CountIf(c: test_part_results_, predicate: TestPartFatallyFailed) > 0; |
| 2433 | } |
| 2434 | |
| 2435 | // Returns true if and only if the test part non-fatally failed. |
| 2436 | static bool TestPartNonfatallyFailed(const TestPartResult& result) { |
| 2437 | return result.nonfatally_failed(); |
| 2438 | } |
| 2439 | |
| 2440 | // Returns true if and only if the test has a non-fatal failure. |
| 2441 | bool TestResult::HasNonfatalFailure() const { |
| 2442 | return CountIf(c: test_part_results_, predicate: TestPartNonfatallyFailed) > 0; |
| 2443 | } |
| 2444 | |
| 2445 | // Gets the number of all test parts. This is the sum of the number |
| 2446 | // of successful test parts and the number of failed test parts. |
| 2447 | int TestResult::total_part_count() const { |
| 2448 | return static_cast<int>(test_part_results_.size()); |
| 2449 | } |
| 2450 | |
| 2451 | // Returns the number of the test properties. |
| 2452 | int TestResult::test_property_count() const { |
| 2453 | return static_cast<int>(test_properties_.size()); |
| 2454 | } |
| 2455 | |
| 2456 | // class Test |
| 2457 | |
| 2458 | // Creates a Test object. |
| 2459 | |
| 2460 | // The c'tor saves the states of all flags. |
| 2461 | Test::Test() : gtest_flag_saver_(new GTEST_FLAG_SAVER_) {} |
| 2462 | |
| 2463 | // The d'tor restores the states of all flags. The actual work is |
| 2464 | // done by the d'tor of the gtest_flag_saver_ field, and thus not |
| 2465 | // visible here. |
| 2466 | Test::~Test() = default; |
| 2467 | |
| 2468 | // Sets up the test fixture. |
| 2469 | // |
| 2470 | // A sub-class may override this. |
| 2471 | void Test::SetUp() {} |
| 2472 | |
| 2473 | // Tears down the test fixture. |
| 2474 | // |
| 2475 | // A sub-class may override this. |
| 2476 | void Test::TearDown() {} |
| 2477 | |
| 2478 | // Allows user supplied key value pairs to be recorded for later output. |
| 2479 | void Test::RecordProperty(const std::string& key, const std::string& value) { |
| 2480 | UnitTest::GetInstance()->RecordProperty(key, value); |
| 2481 | } |
| 2482 | |
| 2483 | namespace internal { |
| 2484 | |
| 2485 | void ReportFailureInUnknownLocation(TestPartResult::Type result_type, |
| 2486 | const std::string& message) { |
| 2487 | // This function is a friend of UnitTest and as such has access to |
| 2488 | // AddTestPartResult. |
| 2489 | UnitTest::GetInstance()->AddTestPartResult( |
| 2490 | result_type, |
| 2491 | file_name: nullptr, // No info about the source file where the exception occurred. |
| 2492 | line_number: -1, // We have no info on which line caused the exception. |
| 2493 | message, |
| 2494 | os_stack_trace: "" ); // No stack trace, either. |
| 2495 | } |
| 2496 | |
| 2497 | } // namespace internal |
| 2498 | |
| 2499 | // Google Test requires all tests in the same test suite to use the same test |
| 2500 | // fixture class. This function checks if the current test has the |
| 2501 | // same fixture class as the first test in the current test suite. If |
| 2502 | // yes, it returns true; otherwise it generates a Google Test failure and |
| 2503 | // returns false. |
| 2504 | bool Test::HasSameFixtureClass() { |
| 2505 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 2506 | const TestSuite* const test_suite = impl->current_test_suite(); |
| 2507 | |
| 2508 | // Info about the first test in the current test suite. |
| 2509 | const TestInfo* const first_test_info = test_suite->test_info_list()[0]; |
| 2510 | const internal::TypeId first_fixture_id = first_test_info->fixture_class_id_; |
| 2511 | const char* const first_test_name = first_test_info->name(); |
| 2512 | |
| 2513 | // Info about the current test. |
| 2514 | const TestInfo* const this_test_info = impl->current_test_info(); |
| 2515 | const internal::TypeId this_fixture_id = this_test_info->fixture_class_id_; |
| 2516 | const char* const this_test_name = this_test_info->name(); |
| 2517 | |
| 2518 | if (this_fixture_id != first_fixture_id) { |
| 2519 | // Is the first test defined using TEST? |
| 2520 | const bool first_is_TEST = first_fixture_id == internal::GetTestTypeId(); |
| 2521 | // Is this test defined using TEST? |
| 2522 | const bool this_is_TEST = this_fixture_id == internal::GetTestTypeId(); |
| 2523 | |
| 2524 | if (first_is_TEST || this_is_TEST) { |
| 2525 | // Both TEST and TEST_F appear in same test suite, which is incorrect. |
| 2526 | // Tell the user how to fix this. |
| 2527 | |
| 2528 | // Gets the name of the TEST and the name of the TEST_F. Note |
| 2529 | // that first_is_TEST and this_is_TEST cannot both be true, as |
| 2530 | // the fixture IDs are different for the two tests. |
| 2531 | const char* const TEST_name = |
| 2532 | first_is_TEST ? first_test_name : this_test_name; |
| 2533 | const char* const TEST_F_name = |
| 2534 | first_is_TEST ? this_test_name : first_test_name; |
| 2535 | |
| 2536 | ADD_FAILURE() |
| 2537 | << "All tests in the same test suite must use the same test fixture\n" |
| 2538 | << "class, so mixing TEST_F and TEST in the same test suite is\n" |
| 2539 | << "illegal. In test suite " << this_test_info->test_suite_name() |
| 2540 | << ",\n" |
| 2541 | << "test " << TEST_F_name << " is defined using TEST_F but\n" |
| 2542 | << "test " << TEST_name << " is defined using TEST. You probably\n" |
| 2543 | << "want to change the TEST to TEST_F or move it to another test\n" |
| 2544 | << "case." ; |
| 2545 | } else { |
| 2546 | // Two fixture classes with the same name appear in two different |
| 2547 | // namespaces, which is not allowed. Tell the user how to fix this. |
| 2548 | ADD_FAILURE() |
| 2549 | << "All tests in the same test suite must use the same test fixture\n" |
| 2550 | << "class. However, in test suite " |
| 2551 | << this_test_info->test_suite_name() << ",\n" |
| 2552 | << "you defined test " << first_test_name << " and test " |
| 2553 | << this_test_name << "\n" |
| 2554 | << "using two different test fixture classes. This can happen if\n" |
| 2555 | << "the two classes are from different namespaces or translation\n" |
| 2556 | << "units and have the same name. You should probably rename one\n" |
| 2557 | << "of the classes to put the tests into different test suites." ; |
| 2558 | } |
| 2559 | return false; |
| 2560 | } |
| 2561 | |
| 2562 | return true; |
| 2563 | } |
| 2564 | |
| 2565 | namespace internal { |
| 2566 | |
| 2567 | #if GTEST_HAS_EXCEPTIONS |
| 2568 | |
| 2569 | // Adds an "exception thrown" fatal failure to the current test. |
| 2570 | static std::string FormatCxxExceptionMessage(const char* description, |
| 2571 | const char* location) { |
| 2572 | Message message; |
| 2573 | if (description != nullptr) { |
| 2574 | message << "C++ exception with description \"" << description << "\"" ; |
| 2575 | } else { |
| 2576 | message << "Unknown C++ exception" ; |
| 2577 | } |
| 2578 | message << " thrown in " << location << "." ; |
| 2579 | |
| 2580 | return message.GetString(); |
| 2581 | } |
| 2582 | |
| 2583 | static std::string PrintTestPartResultToString( |
| 2584 | const TestPartResult& test_part_result); |
| 2585 | |
| 2586 | GoogleTestFailureException::GoogleTestFailureException( |
| 2587 | const TestPartResult& failure) |
| 2588 | : ::std::runtime_error(PrintTestPartResultToString(failure).c_str()) {} |
| 2589 | |
| 2590 | #endif // GTEST_HAS_EXCEPTIONS |
| 2591 | |
| 2592 | // We put these helper functions in the internal namespace as IBM's xlC |
| 2593 | // compiler rejects the code if they were declared static. |
| 2594 | |
| 2595 | // Runs the given method and handles SEH exceptions it throws, when |
| 2596 | // SEH is supported; returns the 0-value for type Result in case of an |
| 2597 | // SEH exception. (Microsoft compilers cannot handle SEH and C++ |
| 2598 | // exceptions in the same function. Therefore, we provide a separate |
| 2599 | // wrapper function for handling SEH exceptions.) |
| 2600 | template <class T, typename Result> |
| 2601 | Result HandleSehExceptionsInMethodIfSupported(T* object, Result (T::*method)(), |
| 2602 | const char* location) { |
| 2603 | #if GTEST_HAS_SEH |
| 2604 | __try { |
| 2605 | return (object->*method)(); |
| 2606 | } __except (internal::UnitTestOptions::GTestProcessSEH( // NOLINT |
| 2607 | GetExceptionCode(), location)) { |
| 2608 | return static_cast<Result>(0); |
| 2609 | } |
| 2610 | #else |
| 2611 | (void)location; |
| 2612 | return (object->*method)(); |
| 2613 | #endif // GTEST_HAS_SEH |
| 2614 | } |
| 2615 | |
| 2616 | // Runs the given method and catches and reports C++ and/or SEH-style |
| 2617 | // exceptions, if they are supported; returns the 0-value for type |
| 2618 | // Result in case of an SEH exception. |
| 2619 | template <class T, typename Result> |
| 2620 | Result HandleExceptionsInMethodIfSupported(T* object, Result (T::*method)(), |
| 2621 | const char* location) { |
| 2622 | // NOTE: The user code can affect the way in which Google Test handles |
| 2623 | // exceptions by setting GTEST_FLAG(catch_exceptions), but only before |
| 2624 | // RUN_ALL_TESTS() starts. It is technically possible to check the flag |
| 2625 | // after the exception is caught and either report or re-throw the |
| 2626 | // exception based on the flag's value: |
| 2627 | // |
| 2628 | // try { |
| 2629 | // // Perform the test method. |
| 2630 | // } catch (...) { |
| 2631 | // if (GTEST_FLAG_GET(catch_exceptions)) |
| 2632 | // // Report the exception as failure. |
| 2633 | // else |
| 2634 | // throw; // Re-throws the original exception. |
| 2635 | // } |
| 2636 | // |
| 2637 | // However, the purpose of this flag is to allow the program to drop into |
| 2638 | // the debugger when the exception is thrown. On most platforms, once the |
| 2639 | // control enters the catch block, the exception origin information is |
| 2640 | // lost and the debugger will stop the program at the point of the |
| 2641 | // re-throw in this function -- instead of at the point of the original |
| 2642 | // throw statement in the code under test. For this reason, we perform |
| 2643 | // the check early, sacrificing the ability to affect Google Test's |
| 2644 | // exception handling in the method where the exception is thrown. |
| 2645 | if (internal::GetUnitTestImpl()->catch_exceptions()) { |
| 2646 | #if GTEST_HAS_EXCEPTIONS |
| 2647 | try { |
| 2648 | return HandleSehExceptionsInMethodIfSupported(object, method, location); |
| 2649 | } catch (const AssertionException&) { // NOLINT |
| 2650 | // This failure was reported already. |
| 2651 | } catch (const internal::GoogleTestFailureException&) { // NOLINT |
| 2652 | // This exception type can only be thrown by a failed Google |
| 2653 | // Test assertion with the intention of letting another testing |
| 2654 | // framework catch it. Therefore we just re-throw it. |
| 2655 | throw; |
| 2656 | } catch (const std::exception& e) { // NOLINT |
| 2657 | internal::ReportFailureInUnknownLocation( |
| 2658 | TestPartResult::kFatalFailure, |
| 2659 | FormatCxxExceptionMessage(e.what(), location)); |
| 2660 | } catch (...) { // NOLINT |
| 2661 | internal::ReportFailureInUnknownLocation( |
| 2662 | TestPartResult::kFatalFailure, |
| 2663 | FormatCxxExceptionMessage(nullptr, location)); |
| 2664 | } |
| 2665 | return static_cast<Result>(0); |
| 2666 | #else |
| 2667 | return HandleSehExceptionsInMethodIfSupported(object, method, location); |
| 2668 | #endif // GTEST_HAS_EXCEPTIONS |
| 2669 | } else { |
| 2670 | return (object->*method)(); |
| 2671 | } |
| 2672 | } |
| 2673 | |
| 2674 | } // namespace internal |
| 2675 | |
| 2676 | // Runs the test and updates the test result. |
| 2677 | void Test::Run() { |
| 2678 | if (!HasSameFixtureClass()) return; |
| 2679 | |
| 2680 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 2681 | impl->os_stack_trace_getter()->UponLeavingGTest(); |
| 2682 | internal::HandleExceptionsInMethodIfSupported(object: this, method: &Test::SetUp, location: "SetUp()" ); |
| 2683 | // We will run the test only if SetUp() was successful and didn't call |
| 2684 | // GTEST_SKIP(). |
| 2685 | if (!HasFatalFailure() && !IsSkipped()) { |
| 2686 | impl->os_stack_trace_getter()->UponLeavingGTest(); |
| 2687 | internal::HandleExceptionsInMethodIfSupported(object: this, method: &Test::TestBody, |
| 2688 | location: "the test body" ); |
| 2689 | } |
| 2690 | |
| 2691 | // However, we want to clean up as much as possible. Hence we will |
| 2692 | // always call TearDown(), even if SetUp() or the test body has |
| 2693 | // failed. |
| 2694 | impl->os_stack_trace_getter()->UponLeavingGTest(); |
| 2695 | internal::HandleExceptionsInMethodIfSupported(object: this, method: &Test::TearDown, |
| 2696 | location: "TearDown()" ); |
| 2697 | } |
| 2698 | |
| 2699 | // Returns true if and only if the current test has a fatal failure. |
| 2700 | bool Test::HasFatalFailure() { |
| 2701 | return internal::GetUnitTestImpl()->current_test_result()->HasFatalFailure(); |
| 2702 | } |
| 2703 | |
| 2704 | // Returns true if and only if the current test has a non-fatal failure. |
| 2705 | bool Test::HasNonfatalFailure() { |
| 2706 | return internal::GetUnitTestImpl() |
| 2707 | ->current_test_result() |
| 2708 | ->HasNonfatalFailure(); |
| 2709 | } |
| 2710 | |
| 2711 | // Returns true if and only if the current test was skipped. |
| 2712 | bool Test::IsSkipped() { |
| 2713 | return internal::GetUnitTestImpl()->current_test_result()->Skipped(); |
| 2714 | } |
| 2715 | |
| 2716 | // class TestInfo |
| 2717 | |
| 2718 | // Constructs a TestInfo object. It assumes ownership of the test factory |
| 2719 | // object. |
| 2720 | TestInfo::TestInfo(const std::string& a_test_suite_name, |
| 2721 | const std::string& a_name, const char* a_type_param, |
| 2722 | const char* a_value_param, |
| 2723 | internal::CodeLocation a_code_location, |
| 2724 | internal::TypeId fixture_class_id, |
| 2725 | internal::TestFactoryBase* factory) |
| 2726 | : test_suite_name_(a_test_suite_name), |
| 2727 | // begin()/end() is MSVC 17.3.3 ASAN crash workaround (GitHub issue #3997) |
| 2728 | name_(a_name.begin(), a_name.end()), |
| 2729 | type_param_(a_type_param ? new std::string(a_type_param) : nullptr), |
| 2730 | value_param_(a_value_param ? new std::string(a_value_param) : nullptr), |
| 2731 | location_(a_code_location), |
| 2732 | fixture_class_id_(fixture_class_id), |
| 2733 | should_run_(false), |
| 2734 | is_disabled_(false), |
| 2735 | matches_filter_(false), |
| 2736 | is_in_another_shard_(false), |
| 2737 | factory_(factory), |
| 2738 | result_() {} |
| 2739 | |
| 2740 | // Destructs a TestInfo object. |
| 2741 | TestInfo::~TestInfo() { delete factory_; } |
| 2742 | |
| 2743 | namespace internal { |
| 2744 | |
| 2745 | // Creates a new TestInfo object and registers it with Google Test; |
| 2746 | // returns the created object. |
| 2747 | // |
| 2748 | // Arguments: |
| 2749 | // |
| 2750 | // test_suite_name: name of the test suite |
| 2751 | // name: name of the test |
| 2752 | // type_param: the name of the test's type parameter, or NULL if |
| 2753 | // this is not a typed or a type-parameterized test. |
| 2754 | // value_param: text representation of the test's value parameter, |
| 2755 | // or NULL if this is not a value-parameterized test. |
| 2756 | // code_location: code location where the test is defined |
| 2757 | // fixture_class_id: ID of the test fixture class |
| 2758 | // set_up_tc: pointer to the function that sets up the test suite |
| 2759 | // tear_down_tc: pointer to the function that tears down the test suite |
| 2760 | // factory: pointer to the factory that creates a test object. |
| 2761 | // The newly created TestInfo instance will assume |
| 2762 | // ownership of the factory object. |
| 2763 | TestInfo* MakeAndRegisterTestInfo( |
| 2764 | const char* test_suite_name, const char* name, const char* type_param, |
| 2765 | const char* value_param, CodeLocation code_location, |
| 2766 | TypeId fixture_class_id, SetUpTestSuiteFunc set_up_tc, |
| 2767 | TearDownTestSuiteFunc tear_down_tc, TestFactoryBase* factory) { |
| 2768 | TestInfo* const test_info = |
| 2769 | new TestInfo(test_suite_name, name, type_param, value_param, |
| 2770 | code_location, fixture_class_id, factory); |
| 2771 | GetUnitTestImpl()->AddTestInfo(set_up_tc, tear_down_tc, test_info); |
| 2772 | return test_info; |
| 2773 | } |
| 2774 | |
| 2775 | void ReportInvalidTestSuiteType(const char* test_suite_name, |
| 2776 | CodeLocation code_location) { |
| 2777 | Message errors; |
| 2778 | errors |
| 2779 | << "Attempted redefinition of test suite " << test_suite_name << ".\n" |
| 2780 | << "All tests in the same test suite must use the same test fixture\n" |
| 2781 | << "class. However, in test suite " << test_suite_name << ", you tried\n" |
| 2782 | << "to define a test using a fixture class different from the one\n" |
| 2783 | << "used earlier. This can happen if the two fixture classes are\n" |
| 2784 | << "from different namespaces and have the same name. You should\n" |
| 2785 | << "probably rename one of the classes to put the tests into different\n" |
| 2786 | << "test suites." ; |
| 2787 | |
| 2788 | GTEST_LOG_(ERROR) << FormatFileLocation(file: code_location.file.c_str(), |
| 2789 | line: code_location.line) |
| 2790 | << " " << errors.GetString(); |
| 2791 | } |
| 2792 | |
| 2793 | // This method expands all parameterized tests registered with macros TEST_P |
| 2794 | // and INSTANTIATE_TEST_SUITE_P into regular tests and registers those. |
| 2795 | // This will be done just once during the program runtime. |
| 2796 | void UnitTestImpl::RegisterParameterizedTests() { |
| 2797 | if (!parameterized_tests_registered_) { |
| 2798 | parameterized_test_registry_.RegisterTests(); |
| 2799 | type_parameterized_test_registry_.CheckForInstantiations(); |
| 2800 | parameterized_tests_registered_ = true; |
| 2801 | } |
| 2802 | } |
| 2803 | |
| 2804 | } // namespace internal |
| 2805 | |
| 2806 | // Creates the test object, runs it, records its result, and then |
| 2807 | // deletes it. |
| 2808 | void TestInfo::Run() { |
| 2809 | TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); |
| 2810 | if (!should_run_) { |
| 2811 | if (is_disabled_ && matches_filter_) repeater->OnTestDisabled(*this); |
| 2812 | return; |
| 2813 | } |
| 2814 | |
| 2815 | // Tells UnitTest where to store test result. |
| 2816 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 2817 | impl->set_current_test_info(this); |
| 2818 | |
| 2819 | // Notifies the unit test event listeners that a test is about to start. |
| 2820 | repeater->OnTestStart(test_info: *this); |
| 2821 | result_.set_start_timestamp(internal::GetTimeInMillis()); |
| 2822 | internal::Timer timer; |
| 2823 | impl->os_stack_trace_getter()->UponLeavingGTest(); |
| 2824 | |
| 2825 | // Creates the test object. |
| 2826 | Test* const test = internal::HandleExceptionsInMethodIfSupported( |
| 2827 | object: factory_, method: &internal::TestFactoryBase::CreateTest, |
| 2828 | location: "the test fixture's constructor" ); |
| 2829 | |
| 2830 | // Runs the test if the constructor didn't generate a fatal failure or invoke |
| 2831 | // GTEST_SKIP(). |
| 2832 | // Note that the object will not be null |
| 2833 | if (!Test::HasFatalFailure() && !Test::IsSkipped()) { |
| 2834 | // This doesn't throw as all user code that can throw are wrapped into |
| 2835 | // exception handling code. |
| 2836 | test->Run(); |
| 2837 | } |
| 2838 | |
| 2839 | if (test != nullptr) { |
| 2840 | // Deletes the test object. |
| 2841 | impl->os_stack_trace_getter()->UponLeavingGTest(); |
| 2842 | internal::HandleExceptionsInMethodIfSupported( |
| 2843 | object: test, method: &Test::DeleteSelf_, location: "the test fixture's destructor" ); |
| 2844 | } |
| 2845 | |
| 2846 | result_.set_elapsed_time(timer.Elapsed()); |
| 2847 | |
| 2848 | // Notifies the unit test event listener that a test has just finished. |
| 2849 | repeater->OnTestEnd(test_info: *this); |
| 2850 | |
| 2851 | // Tells UnitTest to stop associating assertion results to this |
| 2852 | // test. |
| 2853 | impl->set_current_test_info(nullptr); |
| 2854 | } |
| 2855 | |
| 2856 | // Skip and records a skipped test result for this object. |
| 2857 | void TestInfo::Skip() { |
| 2858 | if (!should_run_) return; |
| 2859 | |
| 2860 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 2861 | impl->set_current_test_info(this); |
| 2862 | |
| 2863 | TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); |
| 2864 | |
| 2865 | // Notifies the unit test event listeners that a test is about to start. |
| 2866 | repeater->OnTestStart(test_info: *this); |
| 2867 | |
| 2868 | const TestPartResult test_part_result = |
| 2869 | TestPartResult(TestPartResult::kSkip, this->file(), this->line(), "" ); |
| 2870 | impl->GetTestPartResultReporterForCurrentThread()->ReportTestPartResult( |
| 2871 | result: test_part_result); |
| 2872 | |
| 2873 | // Notifies the unit test event listener that a test has just finished. |
| 2874 | repeater->OnTestEnd(test_info: *this); |
| 2875 | impl->set_current_test_info(nullptr); |
| 2876 | } |
| 2877 | |
| 2878 | // class TestSuite |
| 2879 | |
| 2880 | // Gets the number of successful tests in this test suite. |
| 2881 | int TestSuite::successful_test_count() const { |
| 2882 | return CountIf(c: test_info_list_, predicate: TestPassed); |
| 2883 | } |
| 2884 | |
| 2885 | // Gets the number of successful tests in this test suite. |
| 2886 | int TestSuite::skipped_test_count() const { |
| 2887 | return CountIf(c: test_info_list_, predicate: TestSkipped); |
| 2888 | } |
| 2889 | |
| 2890 | // Gets the number of failed tests in this test suite. |
| 2891 | int TestSuite::failed_test_count() const { |
| 2892 | return CountIf(c: test_info_list_, predicate: TestFailed); |
| 2893 | } |
| 2894 | |
| 2895 | // Gets the number of disabled tests that will be reported in the XML report. |
| 2896 | int TestSuite::reportable_disabled_test_count() const { |
| 2897 | return CountIf(c: test_info_list_, predicate: TestReportableDisabled); |
| 2898 | } |
| 2899 | |
| 2900 | // Gets the number of disabled tests in this test suite. |
| 2901 | int TestSuite::disabled_test_count() const { |
| 2902 | return CountIf(c: test_info_list_, predicate: TestDisabled); |
| 2903 | } |
| 2904 | |
| 2905 | // Gets the number of tests to be printed in the XML report. |
| 2906 | int TestSuite::reportable_test_count() const { |
| 2907 | return CountIf(c: test_info_list_, predicate: TestReportable); |
| 2908 | } |
| 2909 | |
| 2910 | // Get the number of tests in this test suite that should run. |
| 2911 | int TestSuite::test_to_run_count() const { |
| 2912 | return CountIf(c: test_info_list_, predicate: ShouldRunTest); |
| 2913 | } |
| 2914 | |
| 2915 | // Gets the number of all tests. |
| 2916 | int TestSuite::total_test_count() const { |
| 2917 | return static_cast<int>(test_info_list_.size()); |
| 2918 | } |
| 2919 | |
| 2920 | // Creates a TestSuite with the given name. |
| 2921 | // |
| 2922 | // Arguments: |
| 2923 | // |
| 2924 | // a_name: name of the test suite |
| 2925 | // a_type_param: the name of the test suite's type parameter, or NULL if |
| 2926 | // this is not a typed or a type-parameterized test suite. |
| 2927 | // set_up_tc: pointer to the function that sets up the test suite |
| 2928 | // tear_down_tc: pointer to the function that tears down the test suite |
| 2929 | TestSuite::TestSuite(const char* a_name, const char* a_type_param, |
| 2930 | internal::SetUpTestSuiteFunc set_up_tc, |
| 2931 | internal::TearDownTestSuiteFunc tear_down_tc) |
| 2932 | : name_(a_name), |
| 2933 | type_param_(a_type_param ? new std::string(a_type_param) : nullptr), |
| 2934 | set_up_tc_(set_up_tc), |
| 2935 | tear_down_tc_(tear_down_tc), |
| 2936 | should_run_(false), |
| 2937 | start_timestamp_(0), |
| 2938 | elapsed_time_(0) {} |
| 2939 | |
| 2940 | // Destructor of TestSuite. |
| 2941 | TestSuite::~TestSuite() { |
| 2942 | // Deletes every Test in the collection. |
| 2943 | ForEach(c: test_info_list_, functor: internal::Delete<TestInfo>); |
| 2944 | } |
| 2945 | |
| 2946 | // Returns the i-th test among all the tests. i can range from 0 to |
| 2947 | // total_test_count() - 1. If i is not in that range, returns NULL. |
| 2948 | const TestInfo* TestSuite::GetTestInfo(int i) const { |
| 2949 | const int index = GetElementOr(v: test_indices_, i, default_value: -1); |
| 2950 | return index < 0 ? nullptr : test_info_list_[static_cast<size_t>(index)]; |
| 2951 | } |
| 2952 | |
| 2953 | // Returns the i-th test among all the tests. i can range from 0 to |
| 2954 | // total_test_count() - 1. If i is not in that range, returns NULL. |
| 2955 | TestInfo* TestSuite::GetMutableTestInfo(int i) { |
| 2956 | const int index = GetElementOr(v: test_indices_, i, default_value: -1); |
| 2957 | return index < 0 ? nullptr : test_info_list_[static_cast<size_t>(index)]; |
| 2958 | } |
| 2959 | |
| 2960 | // Adds a test to this test suite. Will delete the test upon |
| 2961 | // destruction of the TestSuite object. |
| 2962 | void TestSuite::AddTestInfo(TestInfo* test_info) { |
| 2963 | test_info_list_.push_back(x: test_info); |
| 2964 | test_indices_.push_back(x: static_cast<int>(test_indices_.size())); |
| 2965 | } |
| 2966 | |
| 2967 | // Runs every test in this TestSuite. |
| 2968 | void TestSuite::Run() { |
| 2969 | if (!should_run_) return; |
| 2970 | |
| 2971 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 2972 | impl->set_current_test_suite(this); |
| 2973 | |
| 2974 | TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); |
| 2975 | |
| 2976 | // Ensure our tests are in a deterministic order. |
| 2977 | // |
| 2978 | // We do this by sorting lexicographically on (file, line number), providing |
| 2979 | // an order matching what the user can see in the source code. |
| 2980 | // |
| 2981 | // In the common case the line number comparison shouldn't be necessary, |
| 2982 | // because the registrations made by the TEST macro are executed in order |
| 2983 | // within a translation unit. But this is not true of the manual registration |
| 2984 | // API, and in more exotic scenarios a single file may be part of multiple |
| 2985 | // translation units. |
| 2986 | std::stable_sort(first: test_info_list_.begin(), last: test_info_list_.end(), |
| 2987 | comp: [](const TestInfo* const a, const TestInfo* const b) { |
| 2988 | if (const int result = std::strcmp(s1: a->file(), s2: b->file())) { |
| 2989 | return result < 0; |
| 2990 | } |
| 2991 | |
| 2992 | return a->line() < b->line(); |
| 2993 | }); |
| 2994 | |
| 2995 | // Call both legacy and the new API |
| 2996 | repeater->OnTestSuiteStart(*this); |
| 2997 | // Legacy API is deprecated but still available |
| 2998 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 2999 | repeater->OnTestCaseStart(*this); |
| 3000 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3001 | |
| 3002 | impl->os_stack_trace_getter()->UponLeavingGTest(); |
| 3003 | internal::HandleExceptionsInMethodIfSupported( |
| 3004 | object: this, method: &TestSuite::RunSetUpTestSuite, location: "SetUpTestSuite()" ); |
| 3005 | |
| 3006 | const bool skip_all = |
| 3007 | ad_hoc_test_result().Failed() || ad_hoc_test_result().Skipped(); |
| 3008 | |
| 3009 | start_timestamp_ = internal::GetTimeInMillis(); |
| 3010 | internal::Timer timer; |
| 3011 | for (int i = 0; i < total_test_count(); i++) { |
| 3012 | if (skip_all) { |
| 3013 | GetMutableTestInfo(i)->Skip(); |
| 3014 | } else { |
| 3015 | GetMutableTestInfo(i)->Run(); |
| 3016 | } |
| 3017 | if (GTEST_FLAG_GET(fail_fast) && |
| 3018 | GetMutableTestInfo(i)->result()->Failed()) { |
| 3019 | for (int j = i + 1; j < total_test_count(); j++) { |
| 3020 | GetMutableTestInfo(i: j)->Skip(); |
| 3021 | } |
| 3022 | break; |
| 3023 | } |
| 3024 | } |
| 3025 | elapsed_time_ = timer.Elapsed(); |
| 3026 | |
| 3027 | impl->os_stack_trace_getter()->UponLeavingGTest(); |
| 3028 | internal::HandleExceptionsInMethodIfSupported( |
| 3029 | object: this, method: &TestSuite::RunTearDownTestSuite, location: "TearDownTestSuite()" ); |
| 3030 | |
| 3031 | // Call both legacy and the new API |
| 3032 | repeater->OnTestSuiteEnd(*this); |
| 3033 | // Legacy API is deprecated but still available |
| 3034 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3035 | repeater->OnTestCaseEnd(*this); |
| 3036 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3037 | |
| 3038 | impl->set_current_test_suite(nullptr); |
| 3039 | } |
| 3040 | |
| 3041 | // Skips all tests under this TestSuite. |
| 3042 | void TestSuite::Skip() { |
| 3043 | if (!should_run_) return; |
| 3044 | |
| 3045 | internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); |
| 3046 | impl->set_current_test_suite(this); |
| 3047 | |
| 3048 | TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); |
| 3049 | |
| 3050 | // Call both legacy and the new API |
| 3051 | repeater->OnTestSuiteStart(*this); |
| 3052 | // Legacy API is deprecated but still available |
| 3053 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3054 | repeater->OnTestCaseStart(*this); |
| 3055 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3056 | |
| 3057 | for (int i = 0; i < total_test_count(); i++) { |
| 3058 | GetMutableTestInfo(i)->Skip(); |
| 3059 | } |
| 3060 | |
| 3061 | // Call both legacy and the new API |
| 3062 | repeater->OnTestSuiteEnd(*this); |
| 3063 | // Legacy API is deprecated but still available |
| 3064 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3065 | repeater->OnTestCaseEnd(*this); |
| 3066 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3067 | |
| 3068 | impl->set_current_test_suite(nullptr); |
| 3069 | } |
| 3070 | |
| 3071 | // Clears the results of all tests in this test suite. |
| 3072 | void TestSuite::ClearResult() { |
| 3073 | ad_hoc_test_result_.Clear(); |
| 3074 | ForEach(c: test_info_list_, functor: TestInfo::ClearTestResult); |
| 3075 | } |
| 3076 | |
| 3077 | // Shuffles the tests in this test suite. |
| 3078 | void TestSuite::ShuffleTests(internal::Random* random) { |
| 3079 | Shuffle(random, v: &test_indices_); |
| 3080 | } |
| 3081 | |
| 3082 | // Restores the test order to before the first shuffle. |
| 3083 | void TestSuite::UnshuffleTests() { |
| 3084 | for (size_t i = 0; i < test_indices_.size(); i++) { |
| 3085 | test_indices_[i] = static_cast<int>(i); |
| 3086 | } |
| 3087 | } |
| 3088 | |
| 3089 | // Formats a countable noun. Depending on its quantity, either the |
| 3090 | // singular form or the plural form is used. e.g. |
| 3091 | // |
| 3092 | // FormatCountableNoun(1, "formula", "formuli") returns "1 formula". |
| 3093 | // FormatCountableNoun(5, "book", "books") returns "5 books". |
| 3094 | static std::string FormatCountableNoun(int count, const char* singular_form, |
| 3095 | const char* plural_form) { |
| 3096 | return internal::StreamableToString(streamable: count) + " " + |
| 3097 | (count == 1 ? singular_form : plural_form); |
| 3098 | } |
| 3099 | |
| 3100 | // Formats the count of tests. |
| 3101 | static std::string FormatTestCount(int test_count) { |
| 3102 | return FormatCountableNoun(count: test_count, singular_form: "test" , plural_form: "tests" ); |
| 3103 | } |
| 3104 | |
| 3105 | // Formats the count of test suites. |
| 3106 | static std::string FormatTestSuiteCount(int test_suite_count) { |
| 3107 | return FormatCountableNoun(count: test_suite_count, singular_form: "test suite" , plural_form: "test suites" ); |
| 3108 | } |
| 3109 | |
| 3110 | // Converts a TestPartResult::Type enum to human-friendly string |
| 3111 | // representation. Both kNonFatalFailure and kFatalFailure are translated |
| 3112 | // to "Failure", as the user usually doesn't care about the difference |
| 3113 | // between the two when viewing the test result. |
| 3114 | static const char* TestPartResultTypeToString(TestPartResult::Type type) { |
| 3115 | switch (type) { |
| 3116 | case TestPartResult::kSkip: |
| 3117 | return "Skipped\n" ; |
| 3118 | case TestPartResult::kSuccess: |
| 3119 | return "Success" ; |
| 3120 | |
| 3121 | case TestPartResult::kNonFatalFailure: |
| 3122 | case TestPartResult::kFatalFailure: |
| 3123 | #ifdef _MSC_VER |
| 3124 | return "error: " ; |
| 3125 | #else |
| 3126 | return "Failure\n" ; |
| 3127 | #endif |
| 3128 | default: |
| 3129 | return "Unknown result type" ; |
| 3130 | } |
| 3131 | } |
| 3132 | |
| 3133 | namespace internal { |
| 3134 | namespace { |
| 3135 | enum class GTestColor { kDefault, kRed, kGreen, kYellow }; |
| 3136 | } // namespace |
| 3137 | |
| 3138 | // Prints a TestPartResult to an std::string. |
| 3139 | static std::string PrintTestPartResultToString( |
| 3140 | const TestPartResult& test_part_result) { |
| 3141 | return (Message() << internal::FormatFileLocation( |
| 3142 | file: test_part_result.file_name(), |
| 3143 | line: test_part_result.line_number()) |
| 3144 | << " " |
| 3145 | << TestPartResultTypeToString(type: test_part_result.type()) |
| 3146 | << test_part_result.message()) |
| 3147 | .GetString(); |
| 3148 | } |
| 3149 | |
| 3150 | // Prints a TestPartResult. |
| 3151 | static void PrintTestPartResult(const TestPartResult& test_part_result) { |
| 3152 | const std::string& result = PrintTestPartResultToString(test_part_result); |
| 3153 | printf(format: "%s\n" , result.c_str()); |
| 3154 | fflush(stdout); |
| 3155 | // If the test program runs in Visual Studio or a debugger, the |
| 3156 | // following statements add the test part result message to the Output |
| 3157 | // window such that the user can double-click on it to jump to the |
| 3158 | // corresponding source code location; otherwise they do nothing. |
| 3159 | #if defined(GTEST_OS_WINDOWS) && !defined(GTEST_OS_WINDOWS_MOBILE) |
| 3160 | // We don't call OutputDebugString*() on Windows Mobile, as printing |
| 3161 | // to stdout is done by OutputDebugString() there already - we don't |
| 3162 | // want the same message printed twice. |
| 3163 | ::OutputDebugStringA(result.c_str()); |
| 3164 | ::OutputDebugStringA("\n" ); |
| 3165 | #endif |
| 3166 | } |
| 3167 | |
| 3168 | // class PrettyUnitTestResultPrinter |
| 3169 | #if defined(GTEST_OS_WINDOWS) && !defined(GTEST_OS_WINDOWS_MOBILE) && \ |
| 3170 | !defined(GTEST_OS_WINDOWS_PHONE) && !defined(GTEST_OS_WINDOWS_RT) && \ |
| 3171 | !defined(GTEST_OS_WINDOWS_MINGW) |
| 3172 | |
| 3173 | // Returns the character attribute for the given color. |
| 3174 | static WORD GetColorAttribute(GTestColor color) { |
| 3175 | switch (color) { |
| 3176 | case GTestColor::kRed: |
| 3177 | return FOREGROUND_RED; |
| 3178 | case GTestColor::kGreen: |
| 3179 | return FOREGROUND_GREEN; |
| 3180 | case GTestColor::kYellow: |
| 3181 | return FOREGROUND_RED | FOREGROUND_GREEN; |
| 3182 | default: |
| 3183 | return 0; |
| 3184 | } |
| 3185 | } |
| 3186 | |
| 3187 | static int GetBitOffset(WORD color_mask) { |
| 3188 | if (color_mask == 0) return 0; |
| 3189 | |
| 3190 | int bitOffset = 0; |
| 3191 | while ((color_mask & 1) == 0) { |
| 3192 | color_mask >>= 1; |
| 3193 | ++bitOffset; |
| 3194 | } |
| 3195 | return bitOffset; |
| 3196 | } |
| 3197 | |
| 3198 | static WORD GetNewColor(GTestColor color, WORD old_color_attrs) { |
| 3199 | // Let's reuse the BG |
| 3200 | static const WORD background_mask = BACKGROUND_BLUE | BACKGROUND_GREEN | |
| 3201 | BACKGROUND_RED | BACKGROUND_INTENSITY; |
| 3202 | static const WORD foreground_mask = FOREGROUND_BLUE | FOREGROUND_GREEN | |
| 3203 | FOREGROUND_RED | FOREGROUND_INTENSITY; |
| 3204 | const WORD existing_bg = old_color_attrs & background_mask; |
| 3205 | |
| 3206 | WORD new_color = |
| 3207 | GetColorAttribute(color) | existing_bg | FOREGROUND_INTENSITY; |
| 3208 | static const int bg_bitOffset = GetBitOffset(background_mask); |
| 3209 | static const int fg_bitOffset = GetBitOffset(foreground_mask); |
| 3210 | |
| 3211 | if (((new_color & background_mask) >> bg_bitOffset) == |
| 3212 | ((new_color & foreground_mask) >> fg_bitOffset)) { |
| 3213 | new_color ^= FOREGROUND_INTENSITY; // invert intensity |
| 3214 | } |
| 3215 | return new_color; |
| 3216 | } |
| 3217 | |
| 3218 | #else |
| 3219 | |
| 3220 | // Returns the ANSI color code for the given color. GTestColor::kDefault is |
| 3221 | // an invalid input. |
| 3222 | static const char* GetAnsiColorCode(GTestColor color) { |
| 3223 | switch (color) { |
| 3224 | case GTestColor::kRed: |
| 3225 | return "1" ; |
| 3226 | case GTestColor::kGreen: |
| 3227 | return "2" ; |
| 3228 | case GTestColor::kYellow: |
| 3229 | return "3" ; |
| 3230 | default: |
| 3231 | return nullptr; |
| 3232 | } |
| 3233 | } |
| 3234 | |
| 3235 | #endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE |
| 3236 | |
| 3237 | // Returns true if and only if Google Test should use colors in the output. |
| 3238 | bool ShouldUseColor(bool stdout_is_tty) { |
| 3239 | std::string c = GTEST_FLAG_GET(color); |
| 3240 | const char* const gtest_color = c.c_str(); |
| 3241 | |
| 3242 | if (String::CaseInsensitiveCStringEquals(lhs: gtest_color, rhs: "auto" )) { |
| 3243 | #if defined(GTEST_OS_WINDOWS) && !defined(GTEST_OS_WINDOWS_MINGW) |
| 3244 | // On Windows the TERM variable is usually not set, but the |
| 3245 | // console there does support colors. |
| 3246 | return stdout_is_tty; |
| 3247 | #else |
| 3248 | // On non-Windows platforms, we rely on the TERM variable. |
| 3249 | const char* const term = posix::GetEnv(name: "TERM" ); |
| 3250 | const bool term_supports_color = |
| 3251 | term != nullptr && (String::CStringEquals(lhs: term, rhs: "xterm" ) || |
| 3252 | String::CStringEquals(lhs: term, rhs: "xterm-color" ) || |
| 3253 | String::CStringEquals(lhs: term, rhs: "xterm-kitty" ) || |
| 3254 | String::CStringEquals(lhs: term, rhs: "screen" ) || |
| 3255 | String::CStringEquals(lhs: term, rhs: "tmux" ) || |
| 3256 | String::CStringEquals(lhs: term, rhs: "rxvt-unicode" ) || |
| 3257 | String::CStringEquals(lhs: term, rhs: "linux" ) || |
| 3258 | String::CStringEquals(lhs: term, rhs: "cygwin" ) || |
| 3259 | String::EndsWithCaseInsensitive(str: term, suffix: "-256color" )); |
| 3260 | return stdout_is_tty && term_supports_color; |
| 3261 | #endif // GTEST_OS_WINDOWS |
| 3262 | } |
| 3263 | |
| 3264 | return String::CaseInsensitiveCStringEquals(lhs: gtest_color, rhs: "yes" ) || |
| 3265 | String::CaseInsensitiveCStringEquals(lhs: gtest_color, rhs: "true" ) || |
| 3266 | String::CaseInsensitiveCStringEquals(lhs: gtest_color, rhs: "t" ) || |
| 3267 | String::CStringEquals(lhs: gtest_color, rhs: "1" ); |
| 3268 | // We take "yes", "true", "t", and "1" as meaning "yes". If the |
| 3269 | // value is neither one of these nor "auto", we treat it as "no" to |
| 3270 | // be conservative. |
| 3271 | } |
| 3272 | |
| 3273 | // Helpers for printing colored strings to stdout. Note that on Windows, we |
| 3274 | // cannot simply emit special characters and have the terminal change colors. |
| 3275 | // This routine must actually emit the characters rather than return a string |
| 3276 | // that would be colored when printed, as can be done on Linux. |
| 3277 | |
| 3278 | GTEST_ATTRIBUTE_PRINTF_(2, 3) |
| 3279 | static void ColoredPrintf(GTestColor color, const char* fmt, ...) { |
| 3280 | va_list args; |
| 3281 | va_start(args, fmt); |
| 3282 | |
| 3283 | static const bool in_color_mode = |
| 3284 | #if GTEST_HAS_FILE_SYSTEM |
| 3285 | ShouldUseColor(stdout_is_tty: posix::IsATTY(fd: posix::FileNo(stdout)) != 0); |
| 3286 | #else |
| 3287 | false; |
| 3288 | #endif // GTEST_HAS_FILE_SYSTEM |
| 3289 | |
| 3290 | const bool use_color = in_color_mode && (color != GTestColor::kDefault); |
| 3291 | |
| 3292 | if (!use_color) { |
| 3293 | vprintf(format: fmt, arg: args); |
| 3294 | va_end(args); |
| 3295 | return; |
| 3296 | } |
| 3297 | |
| 3298 | #if defined(GTEST_OS_WINDOWS) && !defined(GTEST_OS_WINDOWS_MOBILE) && \ |
| 3299 | !defined(GTEST_OS_WINDOWS_PHONE) && !defined(GTEST_OS_WINDOWS_RT) && \ |
| 3300 | !defined(GTEST_OS_WINDOWS_MINGW) |
| 3301 | const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE); |
| 3302 | |
| 3303 | // Gets the current text color. |
| 3304 | CONSOLE_SCREEN_BUFFER_INFO buffer_info; |
| 3305 | GetConsoleScreenBufferInfo(stdout_handle, &buffer_info); |
| 3306 | const WORD old_color_attrs = buffer_info.wAttributes; |
| 3307 | const WORD new_color = GetNewColor(color, old_color_attrs); |
| 3308 | |
| 3309 | // We need to flush the stream buffers into the console before each |
| 3310 | // SetConsoleTextAttribute call lest it affect the text that is already |
| 3311 | // printed but has not yet reached the console. |
| 3312 | fflush(stdout); |
| 3313 | SetConsoleTextAttribute(stdout_handle, new_color); |
| 3314 | |
| 3315 | vprintf(fmt, args); |
| 3316 | |
| 3317 | fflush(stdout); |
| 3318 | // Restores the text color. |
| 3319 | SetConsoleTextAttribute(stdout_handle, old_color_attrs); |
| 3320 | #else |
| 3321 | printf(format: "\033[0;3%sm" , GetAnsiColorCode(color)); |
| 3322 | vprintf(format: fmt, arg: args); |
| 3323 | printf(format: "\033[m" ); // Resets the terminal to default. |
| 3324 | #endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE |
| 3325 | va_end(args); |
| 3326 | } |
| 3327 | |
| 3328 | // Text printed in Google Test's text output and --gtest_list_tests |
| 3329 | // output to label the type parameter and value parameter for a test. |
| 3330 | static const char kTypeParamLabel[] = "TypeParam" ; |
| 3331 | static const char kValueParamLabel[] = "GetParam()" ; |
| 3332 | |
| 3333 | static void (const TestInfo& test_info) { |
| 3334 | const char* const type_param = test_info.type_param(); |
| 3335 | const char* const value_param = test_info.value_param(); |
| 3336 | |
| 3337 | if (type_param != nullptr || value_param != nullptr) { |
| 3338 | printf(format: ", where " ); |
| 3339 | if (type_param != nullptr) { |
| 3340 | printf(format: "%s = %s" , kTypeParamLabel, type_param); |
| 3341 | if (value_param != nullptr) printf(format: " and " ); |
| 3342 | } |
| 3343 | if (value_param != nullptr) { |
| 3344 | printf(format: "%s = %s" , kValueParamLabel, value_param); |
| 3345 | } |
| 3346 | } |
| 3347 | } |
| 3348 | |
| 3349 | // This class implements the TestEventListener interface. |
| 3350 | // |
| 3351 | // Class PrettyUnitTestResultPrinter is copyable. |
| 3352 | class PrettyUnitTestResultPrinter : public TestEventListener { |
| 3353 | public: |
| 3354 | PrettyUnitTestResultPrinter() = default; |
| 3355 | static void PrintTestName(const char* test_suite, const char* test) { |
| 3356 | printf(format: "%s.%s" , test_suite, test); |
| 3357 | } |
| 3358 | |
| 3359 | // The following methods override what's in the TestEventListener class. |
| 3360 | void OnTestProgramStart(const UnitTest& /*unit_test*/) override {} |
| 3361 | void OnTestIterationStart(const UnitTest& unit_test, int iteration) override; |
| 3362 | void OnEnvironmentsSetUpStart(const UnitTest& unit_test) override; |
| 3363 | void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {} |
| 3364 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3365 | void OnTestCaseStart(const TestCase& test_case) override; |
| 3366 | #else |
| 3367 | void OnTestSuiteStart(const TestSuite& test_suite) override; |
| 3368 | #endif // OnTestCaseStart |
| 3369 | |
| 3370 | void OnTestStart(const TestInfo& test_info) override; |
| 3371 | void OnTestDisabled(const TestInfo& test_info) override; |
| 3372 | |
| 3373 | void OnTestPartResult(const TestPartResult& result) override; |
| 3374 | void OnTestEnd(const TestInfo& test_info) override; |
| 3375 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3376 | void OnTestCaseEnd(const TestCase& test_case) override; |
| 3377 | #else |
| 3378 | void OnTestSuiteEnd(const TestSuite& test_suite) override; |
| 3379 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3380 | |
| 3381 | void OnEnvironmentsTearDownStart(const UnitTest& unit_test) override; |
| 3382 | void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {} |
| 3383 | void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override; |
| 3384 | void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {} |
| 3385 | |
| 3386 | private: |
| 3387 | static void PrintFailedTests(const UnitTest& unit_test); |
| 3388 | static void PrintFailedTestSuites(const UnitTest& unit_test); |
| 3389 | static void PrintSkippedTests(const UnitTest& unit_test); |
| 3390 | }; |
| 3391 | |
| 3392 | // Fired before each iteration of tests starts. |
| 3393 | void PrettyUnitTestResultPrinter::OnTestIterationStart( |
| 3394 | const UnitTest& unit_test, int iteration) { |
| 3395 | if (GTEST_FLAG_GET(repeat) != 1) |
| 3396 | printf(format: "\nRepeating all tests (iteration %d) . . .\n\n" , iteration + 1); |
| 3397 | |
| 3398 | std::string f = GTEST_FLAG_GET(filter); |
| 3399 | const char* const filter = f.c_str(); |
| 3400 | |
| 3401 | // Prints the filter if it's not *. This reminds the user that some |
| 3402 | // tests may be skipped. |
| 3403 | if (!String::CStringEquals(lhs: filter, rhs: kUniversalFilter)) { |
| 3404 | ColoredPrintf(color: GTestColor::kYellow, fmt: "Note: %s filter = %s\n" , GTEST_NAME_, |
| 3405 | filter); |
| 3406 | } |
| 3407 | |
| 3408 | if (internal::ShouldShard(total_shards_str: kTestTotalShards, shard_index_str: kTestShardIndex, in_subprocess_for_death_test: false)) { |
| 3409 | const int32_t shard_index = Int32FromEnvOrDie(env_var: kTestShardIndex, default_val: -1); |
| 3410 | ColoredPrintf(color: GTestColor::kYellow, fmt: "Note: This is test shard %d of %s.\n" , |
| 3411 | static_cast<int>(shard_index) + 1, |
| 3412 | internal::posix::GetEnv(name: kTestTotalShards)); |
| 3413 | } |
| 3414 | |
| 3415 | if (GTEST_FLAG_GET(shuffle)) { |
| 3416 | ColoredPrintf(color: GTestColor::kYellow, |
| 3417 | fmt: "Note: Randomizing tests' orders with a seed of %d .\n" , |
| 3418 | unit_test.random_seed()); |
| 3419 | } |
| 3420 | |
| 3421 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[==========] " ); |
| 3422 | printf(format: "Running %s from %s.\n" , |
| 3423 | FormatTestCount(test_count: unit_test.test_to_run_count()).c_str(), |
| 3424 | FormatTestSuiteCount(test_suite_count: unit_test.test_suite_to_run_count()).c_str()); |
| 3425 | fflush(stdout); |
| 3426 | } |
| 3427 | |
| 3428 | void PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart( |
| 3429 | const UnitTest& /*unit_test*/) { |
| 3430 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[----------] " ); |
| 3431 | printf(format: "Global test environment set-up.\n" ); |
| 3432 | fflush(stdout); |
| 3433 | } |
| 3434 | |
| 3435 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3436 | void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) { |
| 3437 | const std::string counts = |
| 3438 | FormatCountableNoun(count: test_case.test_to_run_count(), singular_form: "test" , plural_form: "tests" ); |
| 3439 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[----------] " ); |
| 3440 | printf(format: "%s from %s" , counts.c_str(), test_case.name()); |
| 3441 | if (test_case.type_param() == nullptr) { |
| 3442 | printf(format: "\n" ); |
| 3443 | } else { |
| 3444 | printf(format: ", where %s = %s\n" , kTypeParamLabel, test_case.type_param()); |
| 3445 | } |
| 3446 | fflush(stdout); |
| 3447 | } |
| 3448 | #else |
| 3449 | void PrettyUnitTestResultPrinter::OnTestSuiteStart( |
| 3450 | const TestSuite& test_suite) { |
| 3451 | const std::string counts = |
| 3452 | FormatCountableNoun(test_suite.test_to_run_count(), "test" , "tests" ); |
| 3453 | ColoredPrintf(GTestColor::kGreen, "[----------] " ); |
| 3454 | printf("%s from %s" , counts.c_str(), test_suite.name()); |
| 3455 | if (test_suite.type_param() == nullptr) { |
| 3456 | printf("\n" ); |
| 3457 | } else { |
| 3458 | printf(", where %s = %s\n" , kTypeParamLabel, test_suite.type_param()); |
| 3459 | } |
| 3460 | fflush(stdout); |
| 3461 | } |
| 3462 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3463 | |
| 3464 | void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) { |
| 3465 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ RUN ] " ); |
| 3466 | PrintTestName(test_suite: test_info.test_suite_name(), test: test_info.name()); |
| 3467 | printf(format: "\n" ); |
| 3468 | fflush(stdout); |
| 3469 | } |
| 3470 | |
| 3471 | void PrettyUnitTestResultPrinter::OnTestDisabled(const TestInfo& test_info) { |
| 3472 | ColoredPrintf(color: GTestColor::kYellow, fmt: "[ DISABLED ] " ); |
| 3473 | PrintTestName(test_suite: test_info.test_suite_name(), test: test_info.name()); |
| 3474 | printf(format: "\n" ); |
| 3475 | fflush(stdout); |
| 3476 | } |
| 3477 | |
| 3478 | // Called after an assertion failure. |
| 3479 | void PrettyUnitTestResultPrinter::OnTestPartResult( |
| 3480 | const TestPartResult& result) { |
| 3481 | switch (result.type()) { |
| 3482 | // If the test part succeeded, we don't need to do anything. |
| 3483 | case TestPartResult::kSuccess: |
| 3484 | return; |
| 3485 | default: |
| 3486 | // Print failure message from the assertion |
| 3487 | // (e.g. expected this and got that). |
| 3488 | PrintTestPartResult(test_part_result: result); |
| 3489 | fflush(stdout); |
| 3490 | } |
| 3491 | } |
| 3492 | |
| 3493 | void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) { |
| 3494 | if (test_info.result()->Passed()) { |
| 3495 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ OK ] " ); |
| 3496 | } else if (test_info.result()->Skipped()) { |
| 3497 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ SKIPPED ] " ); |
| 3498 | } else { |
| 3499 | ColoredPrintf(color: GTestColor::kRed, fmt: "[ FAILED ] " ); |
| 3500 | } |
| 3501 | PrintTestName(test_suite: test_info.test_suite_name(), test: test_info.name()); |
| 3502 | if (test_info.result()->Failed()) PrintFullTestCommentIfPresent(test_info); |
| 3503 | |
| 3504 | if (GTEST_FLAG_GET(print_time)) { |
| 3505 | printf(format: " (%s ms)\n" , |
| 3506 | internal::StreamableToString(streamable: test_info.result()->elapsed_time()) |
| 3507 | .c_str()); |
| 3508 | } else { |
| 3509 | printf(format: "\n" ); |
| 3510 | } |
| 3511 | fflush(stdout); |
| 3512 | } |
| 3513 | |
| 3514 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3515 | void PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) { |
| 3516 | if (!GTEST_FLAG_GET(print_time)) return; |
| 3517 | |
| 3518 | const std::string counts = |
| 3519 | FormatCountableNoun(count: test_case.test_to_run_count(), singular_form: "test" , plural_form: "tests" ); |
| 3520 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[----------] " ); |
| 3521 | printf(format: "%s from %s (%s ms total)\n\n" , counts.c_str(), test_case.name(), |
| 3522 | internal::StreamableToString(streamable: test_case.elapsed_time()).c_str()); |
| 3523 | fflush(stdout); |
| 3524 | } |
| 3525 | #else |
| 3526 | void PrettyUnitTestResultPrinter::OnTestSuiteEnd(const TestSuite& test_suite) { |
| 3527 | if (!GTEST_FLAG_GET(print_time)) return; |
| 3528 | |
| 3529 | const std::string counts = |
| 3530 | FormatCountableNoun(test_suite.test_to_run_count(), "test" , "tests" ); |
| 3531 | ColoredPrintf(GTestColor::kGreen, "[----------] " ); |
| 3532 | printf("%s from %s (%s ms total)\n\n" , counts.c_str(), test_suite.name(), |
| 3533 | internal::StreamableToString(test_suite.elapsed_time()).c_str()); |
| 3534 | fflush(stdout); |
| 3535 | } |
| 3536 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3537 | |
| 3538 | void PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart( |
| 3539 | const UnitTest& /*unit_test*/) { |
| 3540 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[----------] " ); |
| 3541 | printf(format: "Global test environment tear-down\n" ); |
| 3542 | fflush(stdout); |
| 3543 | } |
| 3544 | |
| 3545 | // Internal helper for printing the list of failed tests. |
| 3546 | void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) { |
| 3547 | const int failed_test_count = unit_test.failed_test_count(); |
| 3548 | ColoredPrintf(color: GTestColor::kRed, fmt: "[ FAILED ] " ); |
| 3549 | printf(format: "%s, listed below:\n" , FormatTestCount(test_count: failed_test_count).c_str()); |
| 3550 | |
| 3551 | for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { |
| 3552 | const TestSuite& test_suite = *unit_test.GetTestSuite(i); |
| 3553 | if (!test_suite.should_run() || (test_suite.failed_test_count() == 0)) { |
| 3554 | continue; |
| 3555 | } |
| 3556 | for (int j = 0; j < test_suite.total_test_count(); ++j) { |
| 3557 | const TestInfo& test_info = *test_suite.GetTestInfo(i: j); |
| 3558 | if (!test_info.should_run() || !test_info.result()->Failed()) { |
| 3559 | continue; |
| 3560 | } |
| 3561 | ColoredPrintf(color: GTestColor::kRed, fmt: "[ FAILED ] " ); |
| 3562 | printf(format: "%s.%s" , test_suite.name(), test_info.name()); |
| 3563 | PrintFullTestCommentIfPresent(test_info); |
| 3564 | printf(format: "\n" ); |
| 3565 | } |
| 3566 | } |
| 3567 | printf(format: "\n%2d FAILED %s\n" , failed_test_count, |
| 3568 | failed_test_count == 1 ? "TEST" : "TESTS" ); |
| 3569 | } |
| 3570 | |
| 3571 | // Internal helper for printing the list of test suite failures not covered by |
| 3572 | // PrintFailedTests. |
| 3573 | void PrettyUnitTestResultPrinter::PrintFailedTestSuites( |
| 3574 | const UnitTest& unit_test) { |
| 3575 | int suite_failure_count = 0; |
| 3576 | for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { |
| 3577 | const TestSuite& test_suite = *unit_test.GetTestSuite(i); |
| 3578 | if (!test_suite.should_run()) { |
| 3579 | continue; |
| 3580 | } |
| 3581 | if (test_suite.ad_hoc_test_result().Failed()) { |
| 3582 | ColoredPrintf(color: GTestColor::kRed, fmt: "[ FAILED ] " ); |
| 3583 | printf(format: "%s: SetUpTestSuite or TearDownTestSuite\n" , test_suite.name()); |
| 3584 | ++suite_failure_count; |
| 3585 | } |
| 3586 | } |
| 3587 | if (suite_failure_count > 0) { |
| 3588 | printf(format: "\n%2d FAILED TEST %s\n" , suite_failure_count, |
| 3589 | suite_failure_count == 1 ? "SUITE" : "SUITES" ); |
| 3590 | } |
| 3591 | } |
| 3592 | |
| 3593 | // Internal helper for printing the list of skipped tests. |
| 3594 | void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) { |
| 3595 | const int skipped_test_count = unit_test.skipped_test_count(); |
| 3596 | if (skipped_test_count == 0) { |
| 3597 | return; |
| 3598 | } |
| 3599 | |
| 3600 | for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { |
| 3601 | const TestSuite& test_suite = *unit_test.GetTestSuite(i); |
| 3602 | if (!test_suite.should_run() || (test_suite.skipped_test_count() == 0)) { |
| 3603 | continue; |
| 3604 | } |
| 3605 | for (int j = 0; j < test_suite.total_test_count(); ++j) { |
| 3606 | const TestInfo& test_info = *test_suite.GetTestInfo(i: j); |
| 3607 | if (!test_info.should_run() || !test_info.result()->Skipped()) { |
| 3608 | continue; |
| 3609 | } |
| 3610 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ SKIPPED ] " ); |
| 3611 | printf(format: "%s.%s" , test_suite.name(), test_info.name()); |
| 3612 | printf(format: "\n" ); |
| 3613 | } |
| 3614 | } |
| 3615 | } |
| 3616 | |
| 3617 | void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, |
| 3618 | int /*iteration*/) { |
| 3619 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[==========] " ); |
| 3620 | printf(format: "%s from %s ran." , |
| 3621 | FormatTestCount(test_count: unit_test.test_to_run_count()).c_str(), |
| 3622 | FormatTestSuiteCount(test_suite_count: unit_test.test_suite_to_run_count()).c_str()); |
| 3623 | if (GTEST_FLAG_GET(print_time)) { |
| 3624 | printf(format: " (%s ms total)" , |
| 3625 | internal::StreamableToString(streamable: unit_test.elapsed_time()).c_str()); |
| 3626 | } |
| 3627 | printf(format: "\n" ); |
| 3628 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ PASSED ] " ); |
| 3629 | printf(format: "%s.\n" , FormatTestCount(test_count: unit_test.successful_test_count()).c_str()); |
| 3630 | |
| 3631 | const int skipped_test_count = unit_test.skipped_test_count(); |
| 3632 | if (skipped_test_count > 0) { |
| 3633 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ SKIPPED ] " ); |
| 3634 | printf(format: "%s, listed below:\n" , FormatTestCount(test_count: skipped_test_count).c_str()); |
| 3635 | PrintSkippedTests(unit_test); |
| 3636 | } |
| 3637 | |
| 3638 | if (!unit_test.Passed()) { |
| 3639 | PrintFailedTests(unit_test); |
| 3640 | PrintFailedTestSuites(unit_test); |
| 3641 | } |
| 3642 | |
| 3643 | int num_disabled = unit_test.reportable_disabled_test_count(); |
| 3644 | if (num_disabled && !GTEST_FLAG_GET(also_run_disabled_tests)) { |
| 3645 | if (unit_test.Passed()) { |
| 3646 | printf(format: "\n" ); // Add a spacer if no FAILURE banner is displayed. |
| 3647 | } |
| 3648 | ColoredPrintf(color: GTestColor::kYellow, fmt: " YOU HAVE %d DISABLED %s\n\n" , |
| 3649 | num_disabled, num_disabled == 1 ? "TEST" : "TESTS" ); |
| 3650 | } |
| 3651 | // Ensure that Google Test output is printed before, e.g., heapchecker output. |
| 3652 | fflush(stdout); |
| 3653 | } |
| 3654 | |
| 3655 | // End PrettyUnitTestResultPrinter |
| 3656 | |
| 3657 | // This class implements the TestEventListener interface. |
| 3658 | // |
| 3659 | // Class BriefUnitTestResultPrinter is copyable. |
| 3660 | class BriefUnitTestResultPrinter : public TestEventListener { |
| 3661 | public: |
| 3662 | BriefUnitTestResultPrinter() = default; |
| 3663 | static void PrintTestName(const char* test_suite, const char* test) { |
| 3664 | printf(format: "%s.%s" , test_suite, test); |
| 3665 | } |
| 3666 | |
| 3667 | // The following methods override what's in the TestEventListener class. |
| 3668 | void OnTestProgramStart(const UnitTest& /*unit_test*/) override {} |
| 3669 | void OnTestIterationStart(const UnitTest& /*unit_test*/, |
| 3670 | int /*iteration*/) override {} |
| 3671 | void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override {} |
| 3672 | void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {} |
| 3673 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3674 | void OnTestCaseStart(const TestCase& /*test_case*/) override {} |
| 3675 | #else |
| 3676 | void OnTestSuiteStart(const TestSuite& /*test_suite*/) override {} |
| 3677 | #endif // OnTestCaseStart |
| 3678 | |
| 3679 | void OnTestStart(const TestInfo& /*test_info*/) override {} |
| 3680 | void OnTestDisabled(const TestInfo& /*test_info*/) override {} |
| 3681 | |
| 3682 | void OnTestPartResult(const TestPartResult& result) override; |
| 3683 | void OnTestEnd(const TestInfo& test_info) override; |
| 3684 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3685 | void OnTestCaseEnd(const TestCase& /*test_case*/) override {} |
| 3686 | #else |
| 3687 | void OnTestSuiteEnd(const TestSuite& /*test_suite*/) override {} |
| 3688 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3689 | |
| 3690 | void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override {} |
| 3691 | void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {} |
| 3692 | void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override; |
| 3693 | void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {} |
| 3694 | }; |
| 3695 | |
| 3696 | // Called after an assertion failure. |
| 3697 | void BriefUnitTestResultPrinter::OnTestPartResult( |
| 3698 | const TestPartResult& result) { |
| 3699 | switch (result.type()) { |
| 3700 | // If the test part succeeded, we don't need to do anything. |
| 3701 | case TestPartResult::kSuccess: |
| 3702 | return; |
| 3703 | default: |
| 3704 | // Print failure message from the assertion |
| 3705 | // (e.g. expected this and got that). |
| 3706 | PrintTestPartResult(test_part_result: result); |
| 3707 | fflush(stdout); |
| 3708 | } |
| 3709 | } |
| 3710 | |
| 3711 | void BriefUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) { |
| 3712 | if (test_info.result()->Failed()) { |
| 3713 | ColoredPrintf(color: GTestColor::kRed, fmt: "[ FAILED ] " ); |
| 3714 | PrintTestName(test_suite: test_info.test_suite_name(), test: test_info.name()); |
| 3715 | PrintFullTestCommentIfPresent(test_info); |
| 3716 | |
| 3717 | if (GTEST_FLAG_GET(print_time)) { |
| 3718 | printf(format: " (%s ms)\n" , |
| 3719 | internal::StreamableToString(streamable: test_info.result()->elapsed_time()) |
| 3720 | .c_str()); |
| 3721 | } else { |
| 3722 | printf(format: "\n" ); |
| 3723 | } |
| 3724 | fflush(stdout); |
| 3725 | } |
| 3726 | } |
| 3727 | |
| 3728 | void BriefUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, |
| 3729 | int /*iteration*/) { |
| 3730 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[==========] " ); |
| 3731 | printf(format: "%s from %s ran." , |
| 3732 | FormatTestCount(test_count: unit_test.test_to_run_count()).c_str(), |
| 3733 | FormatTestSuiteCount(test_suite_count: unit_test.test_suite_to_run_count()).c_str()); |
| 3734 | if (GTEST_FLAG_GET(print_time)) { |
| 3735 | printf(format: " (%s ms total)" , |
| 3736 | internal::StreamableToString(streamable: unit_test.elapsed_time()).c_str()); |
| 3737 | } |
| 3738 | printf(format: "\n" ); |
| 3739 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ PASSED ] " ); |
| 3740 | printf(format: "%s.\n" , FormatTestCount(test_count: unit_test.successful_test_count()).c_str()); |
| 3741 | |
| 3742 | const int skipped_test_count = unit_test.skipped_test_count(); |
| 3743 | if (skipped_test_count > 0) { |
| 3744 | ColoredPrintf(color: GTestColor::kGreen, fmt: "[ SKIPPED ] " ); |
| 3745 | printf(format: "%s.\n" , FormatTestCount(test_count: skipped_test_count).c_str()); |
| 3746 | } |
| 3747 | |
| 3748 | int num_disabled = unit_test.reportable_disabled_test_count(); |
| 3749 | if (num_disabled && !GTEST_FLAG_GET(also_run_disabled_tests)) { |
| 3750 | if (unit_test.Passed()) { |
| 3751 | printf(format: "\n" ); // Add a spacer if no FAILURE banner is displayed. |
| 3752 | } |
| 3753 | ColoredPrintf(color: GTestColor::kYellow, fmt: " YOU HAVE %d DISABLED %s\n\n" , |
| 3754 | num_disabled, num_disabled == 1 ? "TEST" : "TESTS" ); |
| 3755 | } |
| 3756 | // Ensure that Google Test output is printed before, e.g., heapchecker output. |
| 3757 | fflush(stdout); |
| 3758 | } |
| 3759 | |
| 3760 | // End BriefUnitTestResultPrinter |
| 3761 | |
| 3762 | // class TestEventRepeater |
| 3763 | // |
| 3764 | // This class forwards events to other event listeners. |
| 3765 | class TestEventRepeater : public TestEventListener { |
| 3766 | public: |
| 3767 | TestEventRepeater() : forwarding_enabled_(true) {} |
| 3768 | ~TestEventRepeater() override; |
| 3769 | void Append(TestEventListener* listener); |
| 3770 | TestEventListener* Release(TestEventListener* listener); |
| 3771 | |
| 3772 | // Controls whether events will be forwarded to listeners_. Set to false |
| 3773 | // in death test child processes. |
| 3774 | bool forwarding_enabled() const { return forwarding_enabled_; } |
| 3775 | void set_forwarding_enabled(bool enable) { forwarding_enabled_ = enable; } |
| 3776 | |
| 3777 | void OnTestProgramStart(const UnitTest& parameter) override; |
| 3778 | void OnTestIterationStart(const UnitTest& unit_test, int iteration) override; |
| 3779 | void OnEnvironmentsSetUpStart(const UnitTest& parameter) override; |
| 3780 | void OnEnvironmentsSetUpEnd(const UnitTest& parameter) override; |
| 3781 | // Legacy API is deprecated but still available |
| 3782 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3783 | void OnTestCaseStart(const TestSuite& parameter) override; |
| 3784 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3785 | void OnTestSuiteStart(const TestSuite& parameter) override; |
| 3786 | void OnTestStart(const TestInfo& parameter) override; |
| 3787 | void OnTestDisabled(const TestInfo& parameter) override; |
| 3788 | void OnTestPartResult(const TestPartResult& parameter) override; |
| 3789 | void OnTestEnd(const TestInfo& parameter) override; |
| 3790 | // Legacy API is deprecated but still available |
| 3791 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3792 | void OnTestCaseEnd(const TestCase& parameter) override; |
| 3793 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3794 | void OnTestSuiteEnd(const TestSuite& parameter) override; |
| 3795 | void OnEnvironmentsTearDownStart(const UnitTest& parameter) override; |
| 3796 | void OnEnvironmentsTearDownEnd(const UnitTest& parameter) override; |
| 3797 | void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override; |
| 3798 | void OnTestProgramEnd(const UnitTest& parameter) override; |
| 3799 | |
| 3800 | private: |
| 3801 | // Controls whether events will be forwarded to listeners_. Set to false |
| 3802 | // in death test child processes. |
| 3803 | bool forwarding_enabled_; |
| 3804 | // The list of listeners that receive events. |
| 3805 | std::vector<TestEventListener*> listeners_; |
| 3806 | |
| 3807 | TestEventRepeater(const TestEventRepeater&) = delete; |
| 3808 | TestEventRepeater& operator=(const TestEventRepeater&) = delete; |
| 3809 | }; |
| 3810 | |
| 3811 | TestEventRepeater::~TestEventRepeater() { |
| 3812 | ForEach(c: listeners_, functor: Delete<TestEventListener>); |
| 3813 | } |
| 3814 | |
| 3815 | void TestEventRepeater::Append(TestEventListener* listener) { |
| 3816 | listeners_.push_back(x: listener); |
| 3817 | } |
| 3818 | |
| 3819 | TestEventListener* TestEventRepeater::Release(TestEventListener* listener) { |
| 3820 | for (size_t i = 0; i < listeners_.size(); ++i) { |
| 3821 | if (listeners_[i] == listener) { |
| 3822 | listeners_.erase(position: listeners_.begin() + static_cast<int>(i)); |
| 3823 | return listener; |
| 3824 | } |
| 3825 | } |
| 3826 | |
| 3827 | return nullptr; |
| 3828 | } |
| 3829 | |
| 3830 | // Since most methods are very similar, use macros to reduce boilerplate. |
| 3831 | // This defines a member that forwards the call to all listeners. |
| 3832 | #define GTEST_REPEATER_METHOD_(Name, Type) \ |
| 3833 | void TestEventRepeater::Name(const Type& parameter) { \ |
| 3834 | if (forwarding_enabled_) { \ |
| 3835 | for (size_t i = 0; i < listeners_.size(); i++) { \ |
| 3836 | listeners_[i]->Name(parameter); \ |
| 3837 | } \ |
| 3838 | } \ |
| 3839 | } |
| 3840 | // This defines a member that forwards the call to all listeners in reverse |
| 3841 | // order. |
| 3842 | #define GTEST_REVERSE_REPEATER_METHOD_(Name, Type) \ |
| 3843 | void TestEventRepeater::Name(const Type& parameter) { \ |
| 3844 | if (forwarding_enabled_) { \ |
| 3845 | for (size_t i = listeners_.size(); i != 0; i--) { \ |
| 3846 | listeners_[i - 1]->Name(parameter); \ |
| 3847 | } \ |
| 3848 | } \ |
| 3849 | } |
| 3850 | |
| 3851 | GTEST_REPEATER_METHOD_(OnTestProgramStart, UnitTest) |
| 3852 | GTEST_REPEATER_METHOD_(OnEnvironmentsSetUpStart, UnitTest) |
| 3853 | // Legacy API is deprecated but still available |
| 3854 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3855 | GTEST_REPEATER_METHOD_(OnTestCaseStart, TestSuite) |
| 3856 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3857 | GTEST_REPEATER_METHOD_(OnTestSuiteStart, TestSuite) |
| 3858 | GTEST_REPEATER_METHOD_(OnTestStart, TestInfo) |
| 3859 | GTEST_REPEATER_METHOD_(OnTestDisabled, TestInfo) |
| 3860 | GTEST_REPEATER_METHOD_(OnTestPartResult, TestPartResult) |
| 3861 | GTEST_REPEATER_METHOD_(OnEnvironmentsTearDownStart, UnitTest) |
| 3862 | GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsSetUpEnd, UnitTest) |
| 3863 | GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsTearDownEnd, UnitTest) |
| 3864 | GTEST_REVERSE_REPEATER_METHOD_(OnTestEnd, TestInfo) |
| 3865 | // Legacy API is deprecated but still available |
| 3866 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3867 | GTEST_REVERSE_REPEATER_METHOD_(OnTestCaseEnd, TestSuite) |
| 3868 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 3869 | GTEST_REVERSE_REPEATER_METHOD_(OnTestSuiteEnd, TestSuite) |
| 3870 | GTEST_REVERSE_REPEATER_METHOD_(OnTestProgramEnd, UnitTest) |
| 3871 | |
| 3872 | #undef GTEST_REPEATER_METHOD_ |
| 3873 | #undef GTEST_REVERSE_REPEATER_METHOD_ |
| 3874 | |
| 3875 | void TestEventRepeater::OnTestIterationStart(const UnitTest& unit_test, |
| 3876 | int iteration) { |
| 3877 | if (forwarding_enabled_) { |
| 3878 | for (size_t i = 0; i < listeners_.size(); i++) { |
| 3879 | listeners_[i]->OnTestIterationStart(unit_test, iteration); |
| 3880 | } |
| 3881 | } |
| 3882 | } |
| 3883 | |
| 3884 | void TestEventRepeater::OnTestIterationEnd(const UnitTest& unit_test, |
| 3885 | int iteration) { |
| 3886 | if (forwarding_enabled_) { |
| 3887 | for (size_t i = listeners_.size(); i > 0; i--) { |
| 3888 | listeners_[i - 1]->OnTestIterationEnd(unit_test, iteration); |
| 3889 | } |
| 3890 | } |
| 3891 | } |
| 3892 | |
| 3893 | // End TestEventRepeater |
| 3894 | |
| 3895 | #if GTEST_HAS_FILE_SYSTEM |
| 3896 | // This class generates an XML output file. |
| 3897 | class XmlUnitTestResultPrinter : public EmptyTestEventListener { |
| 3898 | public: |
| 3899 | explicit XmlUnitTestResultPrinter(const char* output_file); |
| 3900 | |
| 3901 | void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override; |
| 3902 | void ListTestsMatchingFilter(const std::vector<TestSuite*>& test_suites); |
| 3903 | |
| 3904 | // Prints an XML summary of all unit tests. |
| 3905 | static void PrintXmlTestsList(std::ostream* stream, |
| 3906 | const std::vector<TestSuite*>& test_suites); |
| 3907 | |
| 3908 | private: |
| 3909 | // Is c a whitespace character that is normalized to a space character |
| 3910 | // when it appears in an XML attribute value? |
| 3911 | static bool IsNormalizableWhitespace(unsigned char c) { |
| 3912 | return c == '\t' || c == '\n' || c == '\r'; |
| 3913 | } |
| 3914 | |
| 3915 | // May c appear in a well-formed XML document? |
| 3916 | // https://www.w3.org/TR/REC-xml/#charsets |
| 3917 | static bool IsValidXmlCharacter(unsigned char c) { |
| 3918 | return IsNormalizableWhitespace(c) || c >= 0x20; |
| 3919 | } |
| 3920 | |
| 3921 | // Returns an XML-escaped copy of the input string str. If |
| 3922 | // is_attribute is true, the text is meant to appear as an attribute |
| 3923 | // value, and normalizable whitespace is preserved by replacing it |
| 3924 | // with character references. |
| 3925 | static std::string EscapeXml(const std::string& str, bool is_attribute); |
| 3926 | |
| 3927 | // Returns the given string with all characters invalid in XML removed. |
| 3928 | static std::string RemoveInvalidXmlCharacters(const std::string& str); |
| 3929 | |
| 3930 | // Convenience wrapper around EscapeXml when str is an attribute value. |
| 3931 | static std::string EscapeXmlAttribute(const std::string& str) { |
| 3932 | return EscapeXml(str, is_attribute: true); |
| 3933 | } |
| 3934 | |
| 3935 | // Convenience wrapper around EscapeXml when str is not an attribute value. |
| 3936 | static std::string EscapeXmlText(const char* str) { |
| 3937 | return EscapeXml(str, is_attribute: false); |
| 3938 | } |
| 3939 | |
| 3940 | // Verifies that the given attribute belongs to the given element and |
| 3941 | // streams the attribute as XML. |
| 3942 | static void OutputXmlAttribute(std::ostream* stream, |
| 3943 | const std::string& element_name, |
| 3944 | const std::string& name, |
| 3945 | const std::string& value); |
| 3946 | |
| 3947 | // Streams an XML CDATA section, escaping invalid CDATA sequences as needed. |
| 3948 | static void OutputXmlCDataSection(::std::ostream* stream, const char* data); |
| 3949 | |
| 3950 | // Streams a test suite XML stanza containing the given test result. |
| 3951 | // |
| 3952 | // Requires: result.Failed() |
| 3953 | static void OutputXmlTestSuiteForTestResult(::std::ostream* stream, |
| 3954 | const TestResult& result); |
| 3955 | |
| 3956 | // Streams an XML representation of a TestResult object. |
| 3957 | static void OutputXmlTestResult(::std::ostream* stream, |
| 3958 | const TestResult& result); |
| 3959 | |
| 3960 | // Streams an XML representation of a TestInfo object. |
| 3961 | static void OutputXmlTestInfo(::std::ostream* stream, |
| 3962 | const char* test_suite_name, |
| 3963 | const TestInfo& test_info); |
| 3964 | |
| 3965 | // Prints an XML representation of a TestSuite object |
| 3966 | static void PrintXmlTestSuite(::std::ostream* stream, |
| 3967 | const TestSuite& test_suite); |
| 3968 | |
| 3969 | // Prints an XML summary of unit_test to output stream out. |
| 3970 | static void PrintXmlUnitTest(::std::ostream* stream, |
| 3971 | const UnitTest& unit_test); |
| 3972 | |
| 3973 | // Produces a string representing the test properties in a result as space |
| 3974 | // delimited XML attributes based on the property key="value" pairs. |
| 3975 | // When the std::string is not empty, it includes a space at the beginning, |
| 3976 | // to delimit this attribute from prior attributes. |
| 3977 | static std::string TestPropertiesAsXmlAttributes(const TestResult& result); |
| 3978 | |
| 3979 | // Streams an XML representation of the test properties of a TestResult |
| 3980 | // object. |
| 3981 | static void OutputXmlTestProperties(std::ostream* stream, |
| 3982 | const TestResult& result); |
| 3983 | |
| 3984 | // The output file. |
| 3985 | const std::string output_file_; |
| 3986 | |
| 3987 | XmlUnitTestResultPrinter(const XmlUnitTestResultPrinter&) = delete; |
| 3988 | XmlUnitTestResultPrinter& operator=(const XmlUnitTestResultPrinter&) = delete; |
| 3989 | }; |
| 3990 | |
| 3991 | // Creates a new XmlUnitTestResultPrinter. |
| 3992 | XmlUnitTestResultPrinter::XmlUnitTestResultPrinter(const char* output_file) |
| 3993 | : output_file_(output_file) { |
| 3994 | if (output_file_.empty()) { |
| 3995 | GTEST_LOG_(FATAL) << "XML output file may not be null" ; |
| 3996 | } |
| 3997 | } |
| 3998 | |
| 3999 | // Called after the unit test ends. |
| 4000 | void XmlUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, |
| 4001 | int /*iteration*/) { |
| 4002 | FILE* xmlout = OpenFileForWriting(output_file: output_file_); |
| 4003 | std::stringstream stream; |
| 4004 | PrintXmlUnitTest(stream: &stream, unit_test); |
| 4005 | fprintf(stream: xmlout, format: "%s" , StringStreamToString(ss: &stream).c_str()); |
| 4006 | fclose(stream: xmlout); |
| 4007 | } |
| 4008 | |
| 4009 | void XmlUnitTestResultPrinter::ListTestsMatchingFilter( |
| 4010 | const std::vector<TestSuite*>& test_suites) { |
| 4011 | FILE* xmlout = OpenFileForWriting(output_file: output_file_); |
| 4012 | std::stringstream stream; |
| 4013 | PrintXmlTestsList(stream: &stream, test_suites); |
| 4014 | fprintf(stream: xmlout, format: "%s" , StringStreamToString(ss: &stream).c_str()); |
| 4015 | fclose(stream: xmlout); |
| 4016 | } |
| 4017 | |
| 4018 | // Returns an XML-escaped copy of the input string str. If is_attribute |
| 4019 | // is true, the text is meant to appear as an attribute value, and |
| 4020 | // normalizable whitespace is preserved by replacing it with character |
| 4021 | // references. |
| 4022 | // |
| 4023 | // Invalid XML characters in str, if any, are stripped from the output. |
| 4024 | // It is expected that most, if not all, of the text processed by this |
| 4025 | // module will consist of ordinary English text. |
| 4026 | // If this module is ever modified to produce version 1.1 XML output, |
| 4027 | // most invalid characters can be retained using character references. |
| 4028 | std::string XmlUnitTestResultPrinter::EscapeXml(const std::string& str, |
| 4029 | bool is_attribute) { |
| 4030 | Message m; |
| 4031 | |
| 4032 | for (size_t i = 0; i < str.size(); ++i) { |
| 4033 | const char ch = str[i]; |
| 4034 | switch (ch) { |
| 4035 | case '<': |
| 4036 | m << "<" ; |
| 4037 | break; |
| 4038 | case '>': |
| 4039 | m << ">" ; |
| 4040 | break; |
| 4041 | case '&': |
| 4042 | m << "&" ; |
| 4043 | break; |
| 4044 | case '\'': |
| 4045 | if (is_attribute) |
| 4046 | m << "'" ; |
| 4047 | else |
| 4048 | m << '\''; |
| 4049 | break; |
| 4050 | case '"': |
| 4051 | if (is_attribute) |
| 4052 | m << """ ; |
| 4053 | else |
| 4054 | m << '"'; |
| 4055 | break; |
| 4056 | default: |
| 4057 | if (IsValidXmlCharacter(c: static_cast<unsigned char>(ch))) { |
| 4058 | if (is_attribute && |
| 4059 | IsNormalizableWhitespace(c: static_cast<unsigned char>(ch))) |
| 4060 | m << "&#x" << String::FormatByte(value: static_cast<unsigned char>(ch)) |
| 4061 | << ";" ; |
| 4062 | else |
| 4063 | m << ch; |
| 4064 | } |
| 4065 | break; |
| 4066 | } |
| 4067 | } |
| 4068 | |
| 4069 | return m.GetString(); |
| 4070 | } |
| 4071 | |
| 4072 | // Returns the given string with all characters invalid in XML removed. |
| 4073 | // Currently invalid characters are dropped from the string. An |
| 4074 | // alternative is to replace them with certain characters such as . or ?. |
| 4075 | std::string XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters( |
| 4076 | const std::string& str) { |
| 4077 | std::string output; |
| 4078 | output.reserve(res: str.size()); |
| 4079 | for (std::string::const_iterator it = str.begin(); it != str.end(); ++it) |
| 4080 | if (IsValidXmlCharacter(c: static_cast<unsigned char>(*it))) |
| 4081 | output.push_back(c: *it); |
| 4082 | |
| 4083 | return output; |
| 4084 | } |
| 4085 | |
| 4086 | // The following routines generate an XML representation of a UnitTest |
| 4087 | // object. |
| 4088 | // |
| 4089 | // This is how Google Test concepts map to the DTD: |
| 4090 | // |
| 4091 | // <testsuites name="AllTests"> <-- corresponds to a UnitTest object |
| 4092 | // <testsuite name="testcase-name"> <-- corresponds to a TestSuite object |
| 4093 | // <testcase name="test-name"> <-- corresponds to a TestInfo object |
| 4094 | // <failure message="...">...</failure> |
| 4095 | // <failure message="...">...</failure> |
| 4096 | // <failure message="...">...</failure> |
| 4097 | // <-- individual assertion failures |
| 4098 | // </testcase> |
| 4099 | // </testsuite> |
| 4100 | // </testsuites> |
| 4101 | |
| 4102 | // Formats the given time in milliseconds as seconds. |
| 4103 | std::string FormatTimeInMillisAsSeconds(TimeInMillis ms) { |
| 4104 | ::std::stringstream ss; |
| 4105 | // For the exact N seconds, makes sure output has a trailing decimal point. |
| 4106 | // Sets precision so that we won't have many trailing zeros (e.g., 300 ms |
| 4107 | // will be just 0.3, 410 ms 0.41, and so on) |
| 4108 | ss << std::fixed |
| 4109 | << std::setprecision( |
| 4110 | ms % 1000 == 0 ? 0 : (ms % 100 == 0 ? 1 : (ms % 10 == 0 ? 2 : 3))) |
| 4111 | << std::showpoint; |
| 4112 | ss << (static_cast<double>(ms) * 1e-3); |
| 4113 | return ss.str(); |
| 4114 | } |
| 4115 | |
| 4116 | static bool PortableLocaltime(time_t seconds, struct tm* out) { |
| 4117 | #if defined(_MSC_VER) |
| 4118 | return localtime_s(out, &seconds) == 0; |
| 4119 | #elif defined(__MINGW32__) || defined(__MINGW64__) |
| 4120 | // MINGW <time.h> provides neither localtime_r nor localtime_s, but uses |
| 4121 | // Windows' localtime(), which has a thread-local tm buffer. |
| 4122 | struct tm* tm_ptr = localtime(&seconds); // NOLINT |
| 4123 | if (tm_ptr == nullptr) return false; |
| 4124 | *out = *tm_ptr; |
| 4125 | return true; |
| 4126 | #elif defined(__STDC_LIB_EXT1__) |
| 4127 | // Uses localtime_s when available as localtime_r is only available from |
| 4128 | // C23 standard. |
| 4129 | return localtime_s(&seconds, out) != nullptr; |
| 4130 | #else |
| 4131 | return localtime_r(timer: &seconds, tp: out) != nullptr; |
| 4132 | #endif |
| 4133 | } |
| 4134 | |
| 4135 | // Converts the given epoch time in milliseconds to a date string in the ISO |
| 4136 | // 8601 format, without the timezone information. |
| 4137 | std::string FormatEpochTimeInMillisAsIso8601(TimeInMillis ms) { |
| 4138 | struct tm time_struct; |
| 4139 | if (!PortableLocaltime(seconds: static_cast<time_t>(ms / 1000), out: &time_struct)) |
| 4140 | return "" ; |
| 4141 | // YYYY-MM-DDThh:mm:ss.sss |
| 4142 | return StreamableToString(streamable: time_struct.tm_year + 1900) + "-" + |
| 4143 | String::FormatIntWidth2(value: time_struct.tm_mon + 1) + "-" + |
| 4144 | String::FormatIntWidth2(value: time_struct.tm_mday) + "T" + |
| 4145 | String::FormatIntWidth2(value: time_struct.tm_hour) + ":" + |
| 4146 | String::FormatIntWidth2(value: time_struct.tm_min) + ":" + |
| 4147 | String::FormatIntWidth2(value: time_struct.tm_sec) + "." + |
| 4148 | String::FormatIntWidthN(value: static_cast<int>(ms % 1000), width: 3); |
| 4149 | } |
| 4150 | |
| 4151 | // Streams an XML CDATA section, escaping invalid CDATA sequences as needed. |
| 4152 | void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream, |
| 4153 | const char* data) { |
| 4154 | const char* segment = data; |
| 4155 | *stream << "<![CDATA[" ; |
| 4156 | for (;;) { |
| 4157 | const char* const next_segment = strstr(haystack: segment, needle: "]]>" ); |
| 4158 | if (next_segment != nullptr) { |
| 4159 | stream->write(s: segment, |
| 4160 | n: static_cast<std::streamsize>(next_segment - segment)); |
| 4161 | *stream << "]]>]]><![CDATA[" ; |
| 4162 | segment = next_segment + strlen(s: "]]>" ); |
| 4163 | } else { |
| 4164 | *stream << segment; |
| 4165 | break; |
| 4166 | } |
| 4167 | } |
| 4168 | *stream << "]]>" ; |
| 4169 | } |
| 4170 | |
| 4171 | void XmlUnitTestResultPrinter::OutputXmlAttribute( |
| 4172 | std::ostream* stream, const std::string& element_name, |
| 4173 | const std::string& name, const std::string& value) { |
| 4174 | const std::vector<std::string>& allowed_names = |
| 4175 | GetReservedOutputAttributesForElement(xml_element: element_name); |
| 4176 | |
| 4177 | GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != |
| 4178 | allowed_names.end()) |
| 4179 | << "Attribute " << name << " is not allowed for element <" << element_name |
| 4180 | << ">." ; |
| 4181 | |
| 4182 | *stream << " " << name << "=\"" << EscapeXmlAttribute(str: value) << "\"" ; |
| 4183 | } |
| 4184 | |
| 4185 | // Streams a test suite XML stanza containing the given test result. |
| 4186 | void XmlUnitTestResultPrinter::OutputXmlTestSuiteForTestResult( |
| 4187 | ::std::ostream* stream, const TestResult& result) { |
| 4188 | // Output the boilerplate for a minimal test suite with one test. |
| 4189 | *stream << " <testsuite" ; |
| 4190 | OutputXmlAttribute(stream, element_name: "testsuite" , name: "name" , value: "NonTestSuiteFailure" ); |
| 4191 | OutputXmlAttribute(stream, element_name: "testsuite" , name: "tests" , value: "1" ); |
| 4192 | OutputXmlAttribute(stream, element_name: "testsuite" , name: "failures" , value: "1" ); |
| 4193 | OutputXmlAttribute(stream, element_name: "testsuite" , name: "disabled" , value: "0" ); |
| 4194 | OutputXmlAttribute(stream, element_name: "testsuite" , name: "skipped" , value: "0" ); |
| 4195 | OutputXmlAttribute(stream, element_name: "testsuite" , name: "errors" , value: "0" ); |
| 4196 | OutputXmlAttribute(stream, element_name: "testsuite" , name: "time" , |
| 4197 | value: FormatTimeInMillisAsSeconds(ms: result.elapsed_time())); |
| 4198 | OutputXmlAttribute( |
| 4199 | stream, element_name: "testsuite" , name: "timestamp" , |
| 4200 | value: FormatEpochTimeInMillisAsIso8601(ms: result.start_timestamp())); |
| 4201 | *stream << ">" ; |
| 4202 | |
| 4203 | // Output the boilerplate for a minimal test case with a single test. |
| 4204 | *stream << " <testcase" ; |
| 4205 | OutputXmlAttribute(stream, element_name: "testcase" , name: "name" , value: "" ); |
| 4206 | OutputXmlAttribute(stream, element_name: "testcase" , name: "status" , value: "run" ); |
| 4207 | OutputXmlAttribute(stream, element_name: "testcase" , name: "result" , value: "completed" ); |
| 4208 | OutputXmlAttribute(stream, element_name: "testcase" , name: "classname" , value: "" ); |
| 4209 | OutputXmlAttribute(stream, element_name: "testcase" , name: "time" , |
| 4210 | value: FormatTimeInMillisAsSeconds(ms: result.elapsed_time())); |
| 4211 | OutputXmlAttribute( |
| 4212 | stream, element_name: "testcase" , name: "timestamp" , |
| 4213 | value: FormatEpochTimeInMillisAsIso8601(ms: result.start_timestamp())); |
| 4214 | |
| 4215 | // Output the actual test result. |
| 4216 | OutputXmlTestResult(stream, result); |
| 4217 | |
| 4218 | // Complete the test suite. |
| 4219 | *stream << " </testsuite>\n" ; |
| 4220 | } |
| 4221 | |
| 4222 | // Prints an XML representation of a TestInfo object. |
| 4223 | void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream, |
| 4224 | const char* test_suite_name, |
| 4225 | const TestInfo& test_info) { |
| 4226 | const TestResult& result = *test_info.result(); |
| 4227 | const std::string kTestsuite = "testcase" ; |
| 4228 | |
| 4229 | if (test_info.is_in_another_shard()) { |
| 4230 | return; |
| 4231 | } |
| 4232 | |
| 4233 | *stream << " <testcase" ; |
| 4234 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "name" , value: test_info.name()); |
| 4235 | |
| 4236 | if (test_info.value_param() != nullptr) { |
| 4237 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "value_param" , |
| 4238 | value: test_info.value_param()); |
| 4239 | } |
| 4240 | if (test_info.type_param() != nullptr) { |
| 4241 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "type_param" , |
| 4242 | value: test_info.type_param()); |
| 4243 | } |
| 4244 | |
| 4245 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "file" , value: test_info.file()); |
| 4246 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "line" , |
| 4247 | value: StreamableToString(streamable: test_info.line())); |
| 4248 | if (GTEST_FLAG_GET(list_tests)) { |
| 4249 | *stream << " />\n" ; |
| 4250 | return; |
| 4251 | } |
| 4252 | |
| 4253 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "status" , |
| 4254 | value: test_info.should_run() ? "run" : "notrun" ); |
| 4255 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "result" , |
| 4256 | value: test_info.should_run() |
| 4257 | ? (result.Skipped() ? "skipped" : "completed" ) |
| 4258 | : "suppressed" ); |
| 4259 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "time" , |
| 4260 | value: FormatTimeInMillisAsSeconds(ms: result.elapsed_time())); |
| 4261 | OutputXmlAttribute( |
| 4262 | stream, element_name: kTestsuite, name: "timestamp" , |
| 4263 | value: FormatEpochTimeInMillisAsIso8601(ms: result.start_timestamp())); |
| 4264 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "classname" , value: test_suite_name); |
| 4265 | |
| 4266 | OutputXmlTestResult(stream, result); |
| 4267 | } |
| 4268 | |
| 4269 | void XmlUnitTestResultPrinter::OutputXmlTestResult(::std::ostream* stream, |
| 4270 | const TestResult& result) { |
| 4271 | int failures = 0; |
| 4272 | int skips = 0; |
| 4273 | for (int i = 0; i < result.total_part_count(); ++i) { |
| 4274 | const TestPartResult& part = result.GetTestPartResult(i); |
| 4275 | if (part.failed()) { |
| 4276 | if (++failures == 1 && skips == 0) { |
| 4277 | *stream << ">\n" ; |
| 4278 | } |
| 4279 | const std::string location = |
| 4280 | internal::FormatCompilerIndependentFileLocation(file: part.file_name(), |
| 4281 | line: part.line_number()); |
| 4282 | const std::string summary = location + "\n" + part.summary(); |
| 4283 | *stream << " <failure message=\"" << EscapeXmlAttribute(str: summary) |
| 4284 | << "\" type=\"\">" ; |
| 4285 | const std::string detail = location + "\n" + part.message(); |
| 4286 | OutputXmlCDataSection(stream, data: RemoveInvalidXmlCharacters(str: detail).c_str()); |
| 4287 | *stream << "</failure>\n" ; |
| 4288 | } else if (part.skipped()) { |
| 4289 | if (++skips == 1 && failures == 0) { |
| 4290 | *stream << ">\n" ; |
| 4291 | } |
| 4292 | const std::string location = |
| 4293 | internal::FormatCompilerIndependentFileLocation(file: part.file_name(), |
| 4294 | line: part.line_number()); |
| 4295 | const std::string summary = location + "\n" + part.summary(); |
| 4296 | *stream << " <skipped message=\"" |
| 4297 | << EscapeXmlAttribute(str: summary.c_str()) << "\">" ; |
| 4298 | const std::string detail = location + "\n" + part.message(); |
| 4299 | OutputXmlCDataSection(stream, data: RemoveInvalidXmlCharacters(str: detail).c_str()); |
| 4300 | *stream << "</skipped>\n" ; |
| 4301 | } |
| 4302 | } |
| 4303 | |
| 4304 | if (failures == 0 && skips == 0 && result.test_property_count() == 0) { |
| 4305 | *stream << " />\n" ; |
| 4306 | } else { |
| 4307 | if (failures == 0 && skips == 0) { |
| 4308 | *stream << ">\n" ; |
| 4309 | } |
| 4310 | OutputXmlTestProperties(stream, result); |
| 4311 | *stream << " </testcase>\n" ; |
| 4312 | } |
| 4313 | } |
| 4314 | |
| 4315 | // Prints an XML representation of a TestSuite object |
| 4316 | void XmlUnitTestResultPrinter::PrintXmlTestSuite(std::ostream* stream, |
| 4317 | const TestSuite& test_suite) { |
| 4318 | const std::string kTestsuite = "testsuite" ; |
| 4319 | *stream << " <" << kTestsuite; |
| 4320 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "name" , value: test_suite.name()); |
| 4321 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "tests" , |
| 4322 | value: StreamableToString(streamable: test_suite.reportable_test_count())); |
| 4323 | if (!GTEST_FLAG_GET(list_tests)) { |
| 4324 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "failures" , |
| 4325 | value: StreamableToString(streamable: test_suite.failed_test_count())); |
| 4326 | OutputXmlAttribute( |
| 4327 | stream, element_name: kTestsuite, name: "disabled" , |
| 4328 | value: StreamableToString(streamable: test_suite.reportable_disabled_test_count())); |
| 4329 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "skipped" , |
| 4330 | value: StreamableToString(streamable: test_suite.skipped_test_count())); |
| 4331 | |
| 4332 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "errors" , value: "0" ); |
| 4333 | |
| 4334 | OutputXmlAttribute(stream, element_name: kTestsuite, name: "time" , |
| 4335 | value: FormatTimeInMillisAsSeconds(ms: test_suite.elapsed_time())); |
| 4336 | OutputXmlAttribute( |
| 4337 | stream, element_name: kTestsuite, name: "timestamp" , |
| 4338 | value: FormatEpochTimeInMillisAsIso8601(ms: test_suite.start_timestamp())); |
| 4339 | *stream << TestPropertiesAsXmlAttributes(result: test_suite.ad_hoc_test_result()); |
| 4340 | } |
| 4341 | *stream << ">\n" ; |
| 4342 | for (int i = 0; i < test_suite.total_test_count(); ++i) { |
| 4343 | if (test_suite.GetTestInfo(i)->is_reportable()) |
| 4344 | OutputXmlTestInfo(stream, test_suite_name: test_suite.name(), test_info: *test_suite.GetTestInfo(i)); |
| 4345 | } |
| 4346 | *stream << " </" << kTestsuite << ">\n" ; |
| 4347 | } |
| 4348 | |
| 4349 | // Prints an XML summary of unit_test to output stream out. |
| 4350 | void XmlUnitTestResultPrinter::PrintXmlUnitTest(std::ostream* stream, |
| 4351 | const UnitTest& unit_test) { |
| 4352 | const std::string kTestsuites = "testsuites" ; |
| 4353 | |
| 4354 | *stream << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" ; |
| 4355 | *stream << "<" << kTestsuites; |
| 4356 | |
| 4357 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "tests" , |
| 4358 | value: StreamableToString(streamable: unit_test.reportable_test_count())); |
| 4359 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "failures" , |
| 4360 | value: StreamableToString(streamable: unit_test.failed_test_count())); |
| 4361 | OutputXmlAttribute( |
| 4362 | stream, element_name: kTestsuites, name: "disabled" , |
| 4363 | value: StreamableToString(streamable: unit_test.reportable_disabled_test_count())); |
| 4364 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "errors" , value: "0" ); |
| 4365 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "time" , |
| 4366 | value: FormatTimeInMillisAsSeconds(ms: unit_test.elapsed_time())); |
| 4367 | OutputXmlAttribute( |
| 4368 | stream, element_name: kTestsuites, name: "timestamp" , |
| 4369 | value: FormatEpochTimeInMillisAsIso8601(ms: unit_test.start_timestamp())); |
| 4370 | |
| 4371 | if (GTEST_FLAG_GET(shuffle)) { |
| 4372 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "random_seed" , |
| 4373 | value: StreamableToString(streamable: unit_test.random_seed())); |
| 4374 | } |
| 4375 | *stream << TestPropertiesAsXmlAttributes(result: unit_test.ad_hoc_test_result()); |
| 4376 | |
| 4377 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "name" , value: "AllTests" ); |
| 4378 | *stream << ">\n" ; |
| 4379 | |
| 4380 | for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { |
| 4381 | if (unit_test.GetTestSuite(i)->reportable_test_count() > 0) |
| 4382 | PrintXmlTestSuite(stream, test_suite: *unit_test.GetTestSuite(i)); |
| 4383 | } |
| 4384 | |
| 4385 | // If there was a test failure outside of one of the test suites (like in a |
| 4386 | // test environment) include that in the output. |
| 4387 | if (unit_test.ad_hoc_test_result().Failed()) { |
| 4388 | OutputXmlTestSuiteForTestResult(stream, result: unit_test.ad_hoc_test_result()); |
| 4389 | } |
| 4390 | |
| 4391 | *stream << "</" << kTestsuites << ">\n" ; |
| 4392 | } |
| 4393 | |
| 4394 | void XmlUnitTestResultPrinter::PrintXmlTestsList( |
| 4395 | std::ostream* stream, const std::vector<TestSuite*>& test_suites) { |
| 4396 | const std::string kTestsuites = "testsuites" ; |
| 4397 | |
| 4398 | *stream << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" ; |
| 4399 | *stream << "<" << kTestsuites; |
| 4400 | |
| 4401 | int total_tests = 0; |
| 4402 | for (auto test_suite : test_suites) { |
| 4403 | total_tests += test_suite->total_test_count(); |
| 4404 | } |
| 4405 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "tests" , |
| 4406 | value: StreamableToString(streamable: total_tests)); |
| 4407 | OutputXmlAttribute(stream, element_name: kTestsuites, name: "name" , value: "AllTests" ); |
| 4408 | *stream << ">\n" ; |
| 4409 | |
| 4410 | for (auto test_suite : test_suites) { |
| 4411 | PrintXmlTestSuite(stream, test_suite: *test_suite); |
| 4412 | } |
| 4413 | *stream << "</" << kTestsuites << ">\n" ; |
| 4414 | } |
| 4415 | |
| 4416 | // Produces a string representing the test properties in a result as space |
| 4417 | // delimited XML attributes based on the property key="value" pairs. |
| 4418 | std::string XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes( |
| 4419 | const TestResult& result) { |
| 4420 | Message attributes; |
| 4421 | for (int i = 0; i < result.test_property_count(); ++i) { |
| 4422 | const TestProperty& property = result.GetTestProperty(i); |
| 4423 | attributes << " " << property.key() << "=" |
| 4424 | << "\"" << EscapeXmlAttribute(str: property.value()) << "\"" ; |
| 4425 | } |
| 4426 | return attributes.GetString(); |
| 4427 | } |
| 4428 | |
| 4429 | void XmlUnitTestResultPrinter::OutputXmlTestProperties( |
| 4430 | std::ostream* stream, const TestResult& result) { |
| 4431 | const std::string kProperties = "properties" ; |
| 4432 | const std::string kProperty = "property" ; |
| 4433 | |
| 4434 | if (result.test_property_count() <= 0) { |
| 4435 | return; |
| 4436 | } |
| 4437 | |
| 4438 | *stream << " <" << kProperties << ">\n" ; |
| 4439 | for (int i = 0; i < result.test_property_count(); ++i) { |
| 4440 | const TestProperty& property = result.GetTestProperty(i); |
| 4441 | *stream << " <" << kProperty; |
| 4442 | *stream << " name=\"" << EscapeXmlAttribute(str: property.key()) << "\"" ; |
| 4443 | *stream << " value=\"" << EscapeXmlAttribute(str: property.value()) << "\"" ; |
| 4444 | *stream << "/>\n" ; |
| 4445 | } |
| 4446 | *stream << " </" << kProperties << ">\n" ; |
| 4447 | } |
| 4448 | |
| 4449 | // End XmlUnitTestResultPrinter |
| 4450 | #endif // GTEST_HAS_FILE_SYSTEM |
| 4451 | |
| 4452 | #if GTEST_HAS_FILE_SYSTEM |
| 4453 | // This class generates an JSON output file. |
| 4454 | class JsonUnitTestResultPrinter : public EmptyTestEventListener { |
| 4455 | public: |
| 4456 | explicit JsonUnitTestResultPrinter(const char* output_file); |
| 4457 | |
| 4458 | void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override; |
| 4459 | |
| 4460 | // Prints an JSON summary of all unit tests. |
| 4461 | static void PrintJsonTestList(::std::ostream* stream, |
| 4462 | const std::vector<TestSuite*>& test_suites); |
| 4463 | |
| 4464 | private: |
| 4465 | // Returns an JSON-escaped copy of the input string str. |
| 4466 | static std::string EscapeJson(const std::string& str); |
| 4467 | |
| 4468 | //// Verifies that the given attribute belongs to the given element and |
| 4469 | //// streams the attribute as JSON. |
| 4470 | static void OutputJsonKey(std::ostream* stream, |
| 4471 | const std::string& element_name, |
| 4472 | const std::string& name, const std::string& value, |
| 4473 | const std::string& indent, bool comma = true); |
| 4474 | static void OutputJsonKey(std::ostream* stream, |
| 4475 | const std::string& element_name, |
| 4476 | const std::string& name, int value, |
| 4477 | const std::string& indent, bool comma = true); |
| 4478 | |
| 4479 | // Streams a test suite JSON stanza containing the given test result. |
| 4480 | // |
| 4481 | // Requires: result.Failed() |
| 4482 | static void OutputJsonTestSuiteForTestResult(::std::ostream* stream, |
| 4483 | const TestResult& result); |
| 4484 | |
| 4485 | // Streams a JSON representation of a TestResult object. |
| 4486 | static void OutputJsonTestResult(::std::ostream* stream, |
| 4487 | const TestResult& result); |
| 4488 | |
| 4489 | // Streams a JSON representation of a TestInfo object. |
| 4490 | static void OutputJsonTestInfo(::std::ostream* stream, |
| 4491 | const char* test_suite_name, |
| 4492 | const TestInfo& test_info); |
| 4493 | |
| 4494 | // Prints a JSON representation of a TestSuite object |
| 4495 | static void PrintJsonTestSuite(::std::ostream* stream, |
| 4496 | const TestSuite& test_suite); |
| 4497 | |
| 4498 | // Prints a JSON summary of unit_test to output stream out. |
| 4499 | static void PrintJsonUnitTest(::std::ostream* stream, |
| 4500 | const UnitTest& unit_test); |
| 4501 | |
| 4502 | // Produces a string representing the test properties in a result as |
| 4503 | // a JSON dictionary. |
| 4504 | static std::string TestPropertiesAsJson(const TestResult& result, |
| 4505 | const std::string& indent); |
| 4506 | |
| 4507 | // The output file. |
| 4508 | const std::string output_file_; |
| 4509 | |
| 4510 | JsonUnitTestResultPrinter(const JsonUnitTestResultPrinter&) = delete; |
| 4511 | JsonUnitTestResultPrinter& operator=(const JsonUnitTestResultPrinter&) = |
| 4512 | delete; |
| 4513 | }; |
| 4514 | |
| 4515 | // Creates a new JsonUnitTestResultPrinter. |
| 4516 | JsonUnitTestResultPrinter::JsonUnitTestResultPrinter(const char* output_file) |
| 4517 | : output_file_(output_file) { |
| 4518 | if (output_file_.empty()) { |
| 4519 | GTEST_LOG_(FATAL) << "JSON output file may not be null" ; |
| 4520 | } |
| 4521 | } |
| 4522 | |
| 4523 | void JsonUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, |
| 4524 | int /*iteration*/) { |
| 4525 | FILE* jsonout = OpenFileForWriting(output_file: output_file_); |
| 4526 | std::stringstream stream; |
| 4527 | PrintJsonUnitTest(stream: &stream, unit_test); |
| 4528 | fprintf(stream: jsonout, format: "%s" , StringStreamToString(ss: &stream).c_str()); |
| 4529 | fclose(stream: jsonout); |
| 4530 | } |
| 4531 | |
| 4532 | // Returns an JSON-escaped copy of the input string str. |
| 4533 | std::string JsonUnitTestResultPrinter::EscapeJson(const std::string& str) { |
| 4534 | Message m; |
| 4535 | |
| 4536 | for (size_t i = 0; i < str.size(); ++i) { |
| 4537 | const char ch = str[i]; |
| 4538 | switch (ch) { |
| 4539 | case '\\': |
| 4540 | case '"': |
| 4541 | case '/': |
| 4542 | m << '\\' << ch; |
| 4543 | break; |
| 4544 | case '\b': |
| 4545 | m << "\\b" ; |
| 4546 | break; |
| 4547 | case '\t': |
| 4548 | m << "\\t" ; |
| 4549 | break; |
| 4550 | case '\n': |
| 4551 | m << "\\n" ; |
| 4552 | break; |
| 4553 | case '\f': |
| 4554 | m << "\\f" ; |
| 4555 | break; |
| 4556 | case '\r': |
| 4557 | m << "\\r" ; |
| 4558 | break; |
| 4559 | default: |
| 4560 | if (ch < ' ') { |
| 4561 | m << "\\u00" << String::FormatByte(value: static_cast<unsigned char>(ch)); |
| 4562 | } else { |
| 4563 | m << ch; |
| 4564 | } |
| 4565 | break; |
| 4566 | } |
| 4567 | } |
| 4568 | |
| 4569 | return m.GetString(); |
| 4570 | } |
| 4571 | |
| 4572 | // The following routines generate an JSON representation of a UnitTest |
| 4573 | // object. |
| 4574 | |
| 4575 | // Formats the given time in milliseconds as seconds. |
| 4576 | static std::string FormatTimeInMillisAsDuration(TimeInMillis ms) { |
| 4577 | ::std::stringstream ss; |
| 4578 | ss << (static_cast<double>(ms) * 1e-3) << "s" ; |
| 4579 | return ss.str(); |
| 4580 | } |
| 4581 | |
| 4582 | // Converts the given epoch time in milliseconds to a date string in the |
| 4583 | // RFC3339 format, without the timezone information. |
| 4584 | static std::string FormatEpochTimeInMillisAsRFC3339(TimeInMillis ms) { |
| 4585 | struct tm time_struct; |
| 4586 | if (!PortableLocaltime(seconds: static_cast<time_t>(ms / 1000), out: &time_struct)) |
| 4587 | return "" ; |
| 4588 | // YYYY-MM-DDThh:mm:ss |
| 4589 | return StreamableToString(streamable: time_struct.tm_year + 1900) + "-" + |
| 4590 | String::FormatIntWidth2(value: time_struct.tm_mon + 1) + "-" + |
| 4591 | String::FormatIntWidth2(value: time_struct.tm_mday) + "T" + |
| 4592 | String::FormatIntWidth2(value: time_struct.tm_hour) + ":" + |
| 4593 | String::FormatIntWidth2(value: time_struct.tm_min) + ":" + |
| 4594 | String::FormatIntWidth2(value: time_struct.tm_sec) + "Z" ; |
| 4595 | } |
| 4596 | |
| 4597 | static inline std::string Indent(size_t width) { |
| 4598 | return std::string(width, ' '); |
| 4599 | } |
| 4600 | |
| 4601 | void JsonUnitTestResultPrinter::OutputJsonKey(std::ostream* stream, |
| 4602 | const std::string& element_name, |
| 4603 | const std::string& name, |
| 4604 | const std::string& value, |
| 4605 | const std::string& indent, |
| 4606 | bool comma) { |
| 4607 | const std::vector<std::string>& allowed_names = |
| 4608 | GetReservedOutputAttributesForElement(xml_element: element_name); |
| 4609 | |
| 4610 | GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != |
| 4611 | allowed_names.end()) |
| 4612 | << "Key \"" << name << "\" is not allowed for value \"" << element_name |
| 4613 | << "\"." ; |
| 4614 | |
| 4615 | *stream << indent << "\"" << name << "\": \"" << EscapeJson(str: value) << "\"" ; |
| 4616 | if (comma) *stream << ",\n" ; |
| 4617 | } |
| 4618 | |
| 4619 | void JsonUnitTestResultPrinter::OutputJsonKey( |
| 4620 | std::ostream* stream, const std::string& element_name, |
| 4621 | const std::string& name, int value, const std::string& indent, bool comma) { |
| 4622 | const std::vector<std::string>& allowed_names = |
| 4623 | GetReservedOutputAttributesForElement(xml_element: element_name); |
| 4624 | |
| 4625 | GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != |
| 4626 | allowed_names.end()) |
| 4627 | << "Key \"" << name << "\" is not allowed for value \"" << element_name |
| 4628 | << "\"." ; |
| 4629 | |
| 4630 | *stream << indent << "\"" << name << "\": " << StreamableToString(streamable: value); |
| 4631 | if (comma) *stream << ",\n" ; |
| 4632 | } |
| 4633 | |
| 4634 | // Streams a test suite JSON stanza containing the given test result. |
| 4635 | void JsonUnitTestResultPrinter::OutputJsonTestSuiteForTestResult( |
| 4636 | ::std::ostream* stream, const TestResult& result) { |
| 4637 | // Output the boilerplate for a new test suite. |
| 4638 | *stream << Indent(width: 4) << "{\n" ; |
| 4639 | OutputJsonKey(stream, element_name: "testsuite" , name: "name" , value: "NonTestSuiteFailure" , indent: Indent(width: 6)); |
| 4640 | OutputJsonKey(stream, element_name: "testsuite" , name: "tests" , value: 1, indent: Indent(width: 6)); |
| 4641 | if (!GTEST_FLAG_GET(list_tests)) { |
| 4642 | OutputJsonKey(stream, element_name: "testsuite" , name: "failures" , value: 1, indent: Indent(width: 6)); |
| 4643 | OutputJsonKey(stream, element_name: "testsuite" , name: "disabled" , value: 0, indent: Indent(width: 6)); |
| 4644 | OutputJsonKey(stream, element_name: "testsuite" , name: "skipped" , value: 0, indent: Indent(width: 6)); |
| 4645 | OutputJsonKey(stream, element_name: "testsuite" , name: "errors" , value: 0, indent: Indent(width: 6)); |
| 4646 | OutputJsonKey(stream, element_name: "testsuite" , name: "time" , |
| 4647 | value: FormatTimeInMillisAsDuration(ms: result.elapsed_time()), |
| 4648 | indent: Indent(width: 6)); |
| 4649 | OutputJsonKey(stream, element_name: "testsuite" , name: "timestamp" , |
| 4650 | value: FormatEpochTimeInMillisAsRFC3339(ms: result.start_timestamp()), |
| 4651 | indent: Indent(width: 6)); |
| 4652 | } |
| 4653 | *stream << Indent(width: 6) << "\"testsuite\": [\n" ; |
| 4654 | |
| 4655 | // Output the boilerplate for a new test case. |
| 4656 | *stream << Indent(width: 8) << "{\n" ; |
| 4657 | OutputJsonKey(stream, element_name: "testcase" , name: "name" , value: "" , indent: Indent(width: 10)); |
| 4658 | OutputJsonKey(stream, element_name: "testcase" , name: "status" , value: "RUN" , indent: Indent(width: 10)); |
| 4659 | OutputJsonKey(stream, element_name: "testcase" , name: "result" , value: "COMPLETED" , indent: Indent(width: 10)); |
| 4660 | OutputJsonKey(stream, element_name: "testcase" , name: "timestamp" , |
| 4661 | value: FormatEpochTimeInMillisAsRFC3339(ms: result.start_timestamp()), |
| 4662 | indent: Indent(width: 10)); |
| 4663 | OutputJsonKey(stream, element_name: "testcase" , name: "time" , |
| 4664 | value: FormatTimeInMillisAsDuration(ms: result.elapsed_time()), |
| 4665 | indent: Indent(width: 10)); |
| 4666 | OutputJsonKey(stream, element_name: "testcase" , name: "classname" , value: "" , indent: Indent(width: 10), comma: false); |
| 4667 | *stream << TestPropertiesAsJson(result, indent: Indent(width: 10)); |
| 4668 | |
| 4669 | // Output the actual test result. |
| 4670 | OutputJsonTestResult(stream, result); |
| 4671 | |
| 4672 | // Finish the test suite. |
| 4673 | *stream << "\n" << Indent(width: 6) << "]\n" << Indent(width: 4) << "}" ; |
| 4674 | } |
| 4675 | |
| 4676 | // Prints a JSON representation of a TestInfo object. |
| 4677 | void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream, |
| 4678 | const char* test_suite_name, |
| 4679 | const TestInfo& test_info) { |
| 4680 | const TestResult& result = *test_info.result(); |
| 4681 | const std::string kTestsuite = "testcase" ; |
| 4682 | const std::string kIndent = Indent(width: 10); |
| 4683 | |
| 4684 | *stream << Indent(width: 8) << "{\n" ; |
| 4685 | OutputJsonKey(stream, element_name: kTestsuite, name: "name" , value: test_info.name(), indent: kIndent); |
| 4686 | |
| 4687 | if (test_info.value_param() != nullptr) { |
| 4688 | OutputJsonKey(stream, element_name: kTestsuite, name: "value_param" , value: test_info.value_param(), |
| 4689 | indent: kIndent); |
| 4690 | } |
| 4691 | if (test_info.type_param() != nullptr) { |
| 4692 | OutputJsonKey(stream, element_name: kTestsuite, name: "type_param" , value: test_info.type_param(), |
| 4693 | indent: kIndent); |
| 4694 | } |
| 4695 | |
| 4696 | OutputJsonKey(stream, element_name: kTestsuite, name: "file" , value: test_info.file(), indent: kIndent); |
| 4697 | OutputJsonKey(stream, element_name: kTestsuite, name: "line" , value: test_info.line(), indent: kIndent, comma: false); |
| 4698 | if (GTEST_FLAG_GET(list_tests)) { |
| 4699 | *stream << "\n" << Indent(width: 8) << "}" ; |
| 4700 | return; |
| 4701 | } else { |
| 4702 | *stream << ",\n" ; |
| 4703 | } |
| 4704 | |
| 4705 | OutputJsonKey(stream, element_name: kTestsuite, name: "status" , |
| 4706 | value: test_info.should_run() ? "RUN" : "NOTRUN" , indent: kIndent); |
| 4707 | OutputJsonKey(stream, element_name: kTestsuite, name: "result" , |
| 4708 | value: test_info.should_run() |
| 4709 | ? (result.Skipped() ? "SKIPPED" : "COMPLETED" ) |
| 4710 | : "SUPPRESSED" , |
| 4711 | indent: kIndent); |
| 4712 | OutputJsonKey(stream, element_name: kTestsuite, name: "timestamp" , |
| 4713 | value: FormatEpochTimeInMillisAsRFC3339(ms: result.start_timestamp()), |
| 4714 | indent: kIndent); |
| 4715 | OutputJsonKey(stream, element_name: kTestsuite, name: "time" , |
| 4716 | value: FormatTimeInMillisAsDuration(ms: result.elapsed_time()), indent: kIndent); |
| 4717 | OutputJsonKey(stream, element_name: kTestsuite, name: "classname" , value: test_suite_name, indent: kIndent, |
| 4718 | comma: false); |
| 4719 | *stream << TestPropertiesAsJson(result, indent: kIndent); |
| 4720 | |
| 4721 | OutputJsonTestResult(stream, result); |
| 4722 | } |
| 4723 | |
| 4724 | void JsonUnitTestResultPrinter::OutputJsonTestResult(::std::ostream* stream, |
| 4725 | const TestResult& result) { |
| 4726 | const std::string kIndent = Indent(width: 10); |
| 4727 | |
| 4728 | int failures = 0; |
| 4729 | for (int i = 0; i < result.total_part_count(); ++i) { |
| 4730 | const TestPartResult& part = result.GetTestPartResult(i); |
| 4731 | if (part.failed()) { |
| 4732 | *stream << ",\n" ; |
| 4733 | if (++failures == 1) { |
| 4734 | *stream << kIndent << "\"" |
| 4735 | << "failures" |
| 4736 | << "\": [\n" ; |
| 4737 | } |
| 4738 | const std::string location = |
| 4739 | internal::FormatCompilerIndependentFileLocation(file: part.file_name(), |
| 4740 | line: part.line_number()); |
| 4741 | const std::string message = EscapeJson(str: location + "\n" + part.message()); |
| 4742 | *stream << kIndent << " {\n" |
| 4743 | << kIndent << " \"failure\": \"" << message << "\",\n" |
| 4744 | << kIndent << " \"type\": \"\"\n" |
| 4745 | << kIndent << " }" ; |
| 4746 | } |
| 4747 | } |
| 4748 | |
| 4749 | if (failures > 0) *stream << "\n" << kIndent << "]" ; |
| 4750 | *stream << "\n" << Indent(width: 8) << "}" ; |
| 4751 | } |
| 4752 | |
| 4753 | // Prints an JSON representation of a TestSuite object |
| 4754 | void JsonUnitTestResultPrinter::PrintJsonTestSuite( |
| 4755 | std::ostream* stream, const TestSuite& test_suite) { |
| 4756 | const std::string kTestsuite = "testsuite" ; |
| 4757 | const std::string kIndent = Indent(width: 6); |
| 4758 | |
| 4759 | *stream << Indent(width: 4) << "{\n" ; |
| 4760 | OutputJsonKey(stream, element_name: kTestsuite, name: "name" , value: test_suite.name(), indent: kIndent); |
| 4761 | OutputJsonKey(stream, element_name: kTestsuite, name: "tests" , value: test_suite.reportable_test_count(), |
| 4762 | indent: kIndent); |
| 4763 | if (!GTEST_FLAG_GET(list_tests)) { |
| 4764 | OutputJsonKey(stream, element_name: kTestsuite, name: "failures" , |
| 4765 | value: test_suite.failed_test_count(), indent: kIndent); |
| 4766 | OutputJsonKey(stream, element_name: kTestsuite, name: "disabled" , |
| 4767 | value: test_suite.reportable_disabled_test_count(), indent: kIndent); |
| 4768 | OutputJsonKey(stream, element_name: kTestsuite, name: "errors" , value: 0, indent: kIndent); |
| 4769 | OutputJsonKey( |
| 4770 | stream, element_name: kTestsuite, name: "timestamp" , |
| 4771 | value: FormatEpochTimeInMillisAsRFC3339(ms: test_suite.start_timestamp()), |
| 4772 | indent: kIndent); |
| 4773 | OutputJsonKey(stream, element_name: kTestsuite, name: "time" , |
| 4774 | value: FormatTimeInMillisAsDuration(ms: test_suite.elapsed_time()), |
| 4775 | indent: kIndent, comma: false); |
| 4776 | *stream << TestPropertiesAsJson(result: test_suite.ad_hoc_test_result(), indent: kIndent) |
| 4777 | << ",\n" ; |
| 4778 | } |
| 4779 | |
| 4780 | *stream << kIndent << "\"" << kTestsuite << "\": [\n" ; |
| 4781 | |
| 4782 | bool comma = false; |
| 4783 | for (int i = 0; i < test_suite.total_test_count(); ++i) { |
| 4784 | if (test_suite.GetTestInfo(i)->is_reportable()) { |
| 4785 | if (comma) { |
| 4786 | *stream << ",\n" ; |
| 4787 | } else { |
| 4788 | comma = true; |
| 4789 | } |
| 4790 | OutputJsonTestInfo(stream, test_suite_name: test_suite.name(), test_info: *test_suite.GetTestInfo(i)); |
| 4791 | } |
| 4792 | } |
| 4793 | *stream << "\n" << kIndent << "]\n" << Indent(width: 4) << "}" ; |
| 4794 | } |
| 4795 | |
| 4796 | // Prints a JSON summary of unit_test to output stream out. |
| 4797 | void JsonUnitTestResultPrinter::PrintJsonUnitTest(std::ostream* stream, |
| 4798 | const UnitTest& unit_test) { |
| 4799 | const std::string kTestsuites = "testsuites" ; |
| 4800 | const std::string kIndent = Indent(width: 2); |
| 4801 | *stream << "{\n" ; |
| 4802 | |
| 4803 | OutputJsonKey(stream, element_name: kTestsuites, name: "tests" , value: unit_test.reportable_test_count(), |
| 4804 | indent: kIndent); |
| 4805 | OutputJsonKey(stream, element_name: kTestsuites, name: "failures" , value: unit_test.failed_test_count(), |
| 4806 | indent: kIndent); |
| 4807 | OutputJsonKey(stream, element_name: kTestsuites, name: "disabled" , |
| 4808 | value: unit_test.reportable_disabled_test_count(), indent: kIndent); |
| 4809 | OutputJsonKey(stream, element_name: kTestsuites, name: "errors" , value: 0, indent: kIndent); |
| 4810 | if (GTEST_FLAG_GET(shuffle)) { |
| 4811 | OutputJsonKey(stream, element_name: kTestsuites, name: "random_seed" , value: unit_test.random_seed(), |
| 4812 | indent: kIndent); |
| 4813 | } |
| 4814 | OutputJsonKey(stream, element_name: kTestsuites, name: "timestamp" , |
| 4815 | value: FormatEpochTimeInMillisAsRFC3339(ms: unit_test.start_timestamp()), |
| 4816 | indent: kIndent); |
| 4817 | OutputJsonKey(stream, element_name: kTestsuites, name: "time" , |
| 4818 | value: FormatTimeInMillisAsDuration(ms: unit_test.elapsed_time()), indent: kIndent, |
| 4819 | comma: false); |
| 4820 | |
| 4821 | *stream << TestPropertiesAsJson(result: unit_test.ad_hoc_test_result(), indent: kIndent) |
| 4822 | << ",\n" ; |
| 4823 | |
| 4824 | OutputJsonKey(stream, element_name: kTestsuites, name: "name" , value: "AllTests" , indent: kIndent); |
| 4825 | *stream << kIndent << "\"" << kTestsuites << "\": [\n" ; |
| 4826 | |
| 4827 | bool comma = false; |
| 4828 | for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { |
| 4829 | if (unit_test.GetTestSuite(i)->reportable_test_count() > 0) { |
| 4830 | if (comma) { |
| 4831 | *stream << ",\n" ; |
| 4832 | } else { |
| 4833 | comma = true; |
| 4834 | } |
| 4835 | PrintJsonTestSuite(stream, test_suite: *unit_test.GetTestSuite(i)); |
| 4836 | } |
| 4837 | } |
| 4838 | |
| 4839 | // If there was a test failure outside of one of the test suites (like in a |
| 4840 | // test environment) include that in the output. |
| 4841 | if (unit_test.ad_hoc_test_result().Failed()) { |
| 4842 | if (comma) { |
| 4843 | *stream << ",\n" ; |
| 4844 | } |
| 4845 | OutputJsonTestSuiteForTestResult(stream, result: unit_test.ad_hoc_test_result()); |
| 4846 | } |
| 4847 | |
| 4848 | *stream << "\n" |
| 4849 | << kIndent << "]\n" |
| 4850 | << "}\n" ; |
| 4851 | } |
| 4852 | |
| 4853 | void JsonUnitTestResultPrinter::PrintJsonTestList( |
| 4854 | std::ostream* stream, const std::vector<TestSuite*>& test_suites) { |
| 4855 | const std::string kTestsuites = "testsuites" ; |
| 4856 | const std::string kIndent = Indent(width: 2); |
| 4857 | *stream << "{\n" ; |
| 4858 | int total_tests = 0; |
| 4859 | for (auto test_suite : test_suites) { |
| 4860 | total_tests += test_suite->total_test_count(); |
| 4861 | } |
| 4862 | OutputJsonKey(stream, element_name: kTestsuites, name: "tests" , value: total_tests, indent: kIndent); |
| 4863 | |
| 4864 | OutputJsonKey(stream, element_name: kTestsuites, name: "name" , value: "AllTests" , indent: kIndent); |
| 4865 | *stream << kIndent << "\"" << kTestsuites << "\": [\n" ; |
| 4866 | |
| 4867 | for (size_t i = 0; i < test_suites.size(); ++i) { |
| 4868 | if (i != 0) { |
| 4869 | *stream << ",\n" ; |
| 4870 | } |
| 4871 | PrintJsonTestSuite(stream, test_suite: *test_suites[i]); |
| 4872 | } |
| 4873 | |
| 4874 | *stream << "\n" |
| 4875 | << kIndent << "]\n" |
| 4876 | << "}\n" ; |
| 4877 | } |
| 4878 | // Produces a string representing the test properties in a result as |
| 4879 | // a JSON dictionary. |
| 4880 | std::string JsonUnitTestResultPrinter::TestPropertiesAsJson( |
| 4881 | const TestResult& result, const std::string& indent) { |
| 4882 | Message attributes; |
| 4883 | for (int i = 0; i < result.test_property_count(); ++i) { |
| 4884 | const TestProperty& property = result.GetTestProperty(i); |
| 4885 | attributes << ",\n" |
| 4886 | << indent << "\"" << property.key() << "\": " |
| 4887 | << "\"" << EscapeJson(str: property.value()) << "\"" ; |
| 4888 | } |
| 4889 | return attributes.GetString(); |
| 4890 | } |
| 4891 | |
| 4892 | // End JsonUnitTestResultPrinter |
| 4893 | #endif // GTEST_HAS_FILE_SYSTEM |
| 4894 | |
| 4895 | #if GTEST_CAN_STREAM_RESULTS_ |
| 4896 | |
| 4897 | // Checks if str contains '=', '&', '%' or '\n' characters. If yes, |
| 4898 | // replaces them by "%xx" where xx is their hexadecimal value. For |
| 4899 | // example, replaces "=" with "%3D". This algorithm is O(strlen(str)) |
| 4900 | // in both time and space -- important as the input str may contain an |
| 4901 | // arbitrarily long test failure message and stack trace. |
| 4902 | std::string StreamingListener::UrlEncode(const char* str) { |
| 4903 | std::string result; |
| 4904 | result.reserve(res: strlen(s: str) + 1); |
| 4905 | for (char ch = *str; ch != '\0'; ch = *++str) { |
| 4906 | switch (ch) { |
| 4907 | case '%': |
| 4908 | case '=': |
| 4909 | case '&': |
| 4910 | case '\n': |
| 4911 | result.push_back(c: '%'); |
| 4912 | result.append(str: String::FormatByte(value: static_cast<unsigned char>(ch))); |
| 4913 | break; |
| 4914 | default: |
| 4915 | result.push_back(c: ch); |
| 4916 | break; |
| 4917 | } |
| 4918 | } |
| 4919 | return result; |
| 4920 | } |
| 4921 | |
| 4922 | void StreamingListener::SocketWriter::MakeConnection() { |
| 4923 | GTEST_CHECK_(sockfd_ == -1) |
| 4924 | << "MakeConnection() can't be called when there is already a connection." ; |
| 4925 | |
| 4926 | addrinfo hints; |
| 4927 | memset(s: &hints, c: 0, n: sizeof(hints)); |
| 4928 | hints.ai_family = AF_UNSPEC; // To allow both IPv4 and IPv6 addresses. |
| 4929 | hints.ai_socktype = SOCK_STREAM; |
| 4930 | addrinfo* servinfo = nullptr; |
| 4931 | |
| 4932 | // Use the getaddrinfo() to get a linked list of IP addresses for |
| 4933 | // the given host name. |
| 4934 | const int error_num = |
| 4935 | getaddrinfo(name: host_name_.c_str(), service: port_num_.c_str(), req: &hints, pai: &servinfo); |
| 4936 | if (error_num != 0) { |
| 4937 | GTEST_LOG_(WARNING) << "stream_result_to: getaddrinfo() failed: " |
| 4938 | << gai_strerror(ecode: error_num); |
| 4939 | } |
| 4940 | |
| 4941 | // Loop through all the results and connect to the first we can. |
| 4942 | for (addrinfo* cur_addr = servinfo; sockfd_ == -1 && cur_addr != nullptr; |
| 4943 | cur_addr = cur_addr->ai_next) { |
| 4944 | sockfd_ = socket(domain: cur_addr->ai_family, type: cur_addr->ai_socktype, |
| 4945 | protocol: cur_addr->ai_protocol); |
| 4946 | if (sockfd_ != -1) { |
| 4947 | // Connect the client socket to the server socket. |
| 4948 | if (connect(fd: sockfd_, addr: cur_addr->ai_addr, len: cur_addr->ai_addrlen) == -1) { |
| 4949 | close(fd: sockfd_); |
| 4950 | sockfd_ = -1; |
| 4951 | } |
| 4952 | } |
| 4953 | } |
| 4954 | |
| 4955 | freeaddrinfo(ai: servinfo); // all done with this structure |
| 4956 | |
| 4957 | if (sockfd_ == -1) { |
| 4958 | GTEST_LOG_(WARNING) << "stream_result_to: failed to connect to " |
| 4959 | << host_name_ << ":" << port_num_; |
| 4960 | } |
| 4961 | } |
| 4962 | |
| 4963 | // End of class Streaming Listener |
| 4964 | #endif // GTEST_CAN_STREAM_RESULTS__ |
| 4965 | |
| 4966 | // class OsStackTraceGetter |
| 4967 | |
| 4968 | const char* const OsStackTraceGetterInterface::kElidedFramesMarker = |
| 4969 | "... " GTEST_NAME_ " internal frames ..." ; |
| 4970 | |
| 4971 | std::string OsStackTraceGetter::CurrentStackTrace(int max_depth, int skip_count) |
| 4972 | GTEST_LOCK_EXCLUDED_(mutex_) { |
| 4973 | #ifdef GTEST_HAS_ABSL |
| 4974 | std::string result; |
| 4975 | |
| 4976 | if (max_depth <= 0) { |
| 4977 | return result; |
| 4978 | } |
| 4979 | |
| 4980 | max_depth = std::min(max_depth, kMaxStackTraceDepth); |
| 4981 | |
| 4982 | std::vector<void*> raw_stack(max_depth); |
| 4983 | // Skips the frames requested by the caller, plus this function. |
| 4984 | const int raw_stack_size = |
| 4985 | absl::GetStackTrace(&raw_stack[0], max_depth, skip_count + 1); |
| 4986 | |
| 4987 | void* caller_frame = nullptr; |
| 4988 | { |
| 4989 | MutexLock lock(&mutex_); |
| 4990 | caller_frame = caller_frame_; |
| 4991 | } |
| 4992 | |
| 4993 | for (int i = 0; i < raw_stack_size; ++i) { |
| 4994 | if (raw_stack[i] == caller_frame && |
| 4995 | !GTEST_FLAG_GET(show_internal_stack_frames)) { |
| 4996 | // Add a marker to the trace and stop adding frames. |
| 4997 | absl::StrAppend(&result, kElidedFramesMarker, "\n" ); |
| 4998 | break; |
| 4999 | } |
| 5000 | |
| 5001 | char tmp[1024]; |
| 5002 | const char* symbol = "(unknown)" ; |
| 5003 | if (absl::Symbolize(raw_stack[i], tmp, sizeof(tmp))) { |
| 5004 | symbol = tmp; |
| 5005 | } |
| 5006 | |
| 5007 | char line[1024]; |
| 5008 | snprintf(line, sizeof(line), " %p: %s\n" , raw_stack[i], symbol); |
| 5009 | result += line; |
| 5010 | } |
| 5011 | |
| 5012 | return result; |
| 5013 | |
| 5014 | #else // !GTEST_HAS_ABSL |
| 5015 | static_cast<void>(max_depth); |
| 5016 | static_cast<void>(skip_count); |
| 5017 | return "" ; |
| 5018 | #endif // GTEST_HAS_ABSL |
| 5019 | } |
| 5020 | |
| 5021 | void OsStackTraceGetter::UponLeavingGTest() GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5022 | #ifdef GTEST_HAS_ABSL |
| 5023 | void* caller_frame = nullptr; |
| 5024 | if (absl::GetStackTrace(&caller_frame, 1, 3) <= 0) { |
| 5025 | caller_frame = nullptr; |
| 5026 | } |
| 5027 | |
| 5028 | MutexLock lock(&mutex_); |
| 5029 | caller_frame_ = caller_frame; |
| 5030 | #endif // GTEST_HAS_ABSL |
| 5031 | } |
| 5032 | |
| 5033 | #ifdef GTEST_HAS_DEATH_TEST |
| 5034 | // A helper class that creates the premature-exit file in its |
| 5035 | // constructor and deletes the file in its destructor. |
| 5036 | class ScopedPrematureExitFile { |
| 5037 | public: |
| 5038 | explicit ScopedPrematureExitFile(const char* premature_exit_filepath) |
| 5039 | : premature_exit_filepath_( |
| 5040 | premature_exit_filepath ? premature_exit_filepath : "" ) { |
| 5041 | // If a path to the premature-exit file is specified... |
| 5042 | if (!premature_exit_filepath_.empty()) { |
| 5043 | // create the file with a single "0" character in it. I/O |
| 5044 | // errors are ignored as there's nothing better we can do and we |
| 5045 | // don't want to fail the test because of this. |
| 5046 | FILE* pfile = posix::FOpen(path: premature_exit_filepath_.c_str(), mode: "w" ); |
| 5047 | fwrite(ptr: "0" , size: 1, n: 1, s: pfile); |
| 5048 | fclose(stream: pfile); |
| 5049 | } |
| 5050 | } |
| 5051 | |
| 5052 | ~ScopedPrematureExitFile() { |
| 5053 | #ifndef GTEST_OS_ESP8266 |
| 5054 | if (!premature_exit_filepath_.empty()) { |
| 5055 | int retval = remove(filename: premature_exit_filepath_.c_str()); |
| 5056 | if (retval) { |
| 5057 | GTEST_LOG_(ERROR) << "Failed to remove premature exit filepath \"" |
| 5058 | << premature_exit_filepath_ << "\" with error " |
| 5059 | << retval; |
| 5060 | } |
| 5061 | } |
| 5062 | #endif |
| 5063 | } |
| 5064 | |
| 5065 | private: |
| 5066 | const std::string premature_exit_filepath_; |
| 5067 | |
| 5068 | ScopedPrematureExitFile(const ScopedPrematureExitFile&) = delete; |
| 5069 | ScopedPrematureExitFile& operator=(const ScopedPrematureExitFile&) = delete; |
| 5070 | }; |
| 5071 | #endif // GTEST_HAS_DEATH_TEST |
| 5072 | |
| 5073 | } // namespace internal |
| 5074 | |
| 5075 | // class TestEventListeners |
| 5076 | |
| 5077 | TestEventListeners::TestEventListeners() |
| 5078 | : repeater_(new internal::TestEventRepeater()), |
| 5079 | default_result_printer_(nullptr), |
| 5080 | default_xml_generator_(nullptr) {} |
| 5081 | |
| 5082 | TestEventListeners::~TestEventListeners() { delete repeater_; } |
| 5083 | |
| 5084 | // Returns the standard listener responsible for the default console |
| 5085 | // output. Can be removed from the listeners list to shut down default |
| 5086 | // console output. Note that removing this object from the listener list |
| 5087 | // with Release transfers its ownership to the user. |
| 5088 | void TestEventListeners::Append(TestEventListener* listener) { |
| 5089 | repeater_->Append(listener); |
| 5090 | } |
| 5091 | |
| 5092 | // Removes the given event listener from the list and returns it. It then |
| 5093 | // becomes the caller's responsibility to delete the listener. Returns |
| 5094 | // NULL if the listener is not found in the list. |
| 5095 | TestEventListener* TestEventListeners::Release(TestEventListener* listener) { |
| 5096 | if (listener == default_result_printer_) |
| 5097 | default_result_printer_ = nullptr; |
| 5098 | else if (listener == default_xml_generator_) |
| 5099 | default_xml_generator_ = nullptr; |
| 5100 | return repeater_->Release(listener); |
| 5101 | } |
| 5102 | |
| 5103 | // Returns repeater that broadcasts the TestEventListener events to all |
| 5104 | // subscribers. |
| 5105 | TestEventListener* TestEventListeners::repeater() { return repeater_; } |
| 5106 | |
| 5107 | // Sets the default_result_printer attribute to the provided listener. |
| 5108 | // The listener is also added to the listener list and previous |
| 5109 | // default_result_printer is removed from it and deleted. The listener can |
| 5110 | // also be NULL in which case it will not be added to the list. Does |
| 5111 | // nothing if the previous and the current listener objects are the same. |
| 5112 | void TestEventListeners::SetDefaultResultPrinter(TestEventListener* listener) { |
| 5113 | if (default_result_printer_ != listener) { |
| 5114 | // It is an error to pass this method a listener that is already in the |
| 5115 | // list. |
| 5116 | delete Release(listener: default_result_printer_); |
| 5117 | default_result_printer_ = listener; |
| 5118 | if (listener != nullptr) Append(listener); |
| 5119 | } |
| 5120 | } |
| 5121 | |
| 5122 | // Sets the default_xml_generator attribute to the provided listener. The |
| 5123 | // listener is also added to the listener list and previous |
| 5124 | // default_xml_generator is removed from it and deleted. The listener can |
| 5125 | // also be NULL in which case it will not be added to the list. Does |
| 5126 | // nothing if the previous and the current listener objects are the same. |
| 5127 | void TestEventListeners::SetDefaultXmlGenerator(TestEventListener* listener) { |
| 5128 | if (default_xml_generator_ != listener) { |
| 5129 | // It is an error to pass this method a listener that is already in the |
| 5130 | // list. |
| 5131 | delete Release(listener: default_xml_generator_); |
| 5132 | default_xml_generator_ = listener; |
| 5133 | if (listener != nullptr) Append(listener); |
| 5134 | } |
| 5135 | } |
| 5136 | |
| 5137 | // Controls whether events will be forwarded by the repeater to the |
| 5138 | // listeners in the list. |
| 5139 | bool TestEventListeners::EventForwardingEnabled() const { |
| 5140 | return repeater_->forwarding_enabled(); |
| 5141 | } |
| 5142 | |
| 5143 | void TestEventListeners::SuppressEventForwarding(bool suppress) { |
| 5144 | repeater_->set_forwarding_enabled(!suppress); |
| 5145 | } |
| 5146 | |
| 5147 | // class UnitTest |
| 5148 | |
| 5149 | // Gets the singleton UnitTest object. The first time this method is |
| 5150 | // called, a UnitTest object is constructed and returned. Consecutive |
| 5151 | // calls will return the same object. |
| 5152 | // |
| 5153 | // We don't protect this under mutex_ as a user is not supposed to |
| 5154 | // call this before main() starts, from which point on the return |
| 5155 | // value will never change. |
| 5156 | UnitTest* UnitTest::GetInstance() { |
| 5157 | // CodeGear C++Builder insists on a public destructor for the |
| 5158 | // default implementation. Use this implementation to keep good OO |
| 5159 | // design with private destructor. |
| 5160 | |
| 5161 | #if defined(__BORLANDC__) |
| 5162 | static UnitTest* const instance = new UnitTest; |
| 5163 | return instance; |
| 5164 | #else |
| 5165 | static UnitTest instance; |
| 5166 | return &instance; |
| 5167 | #endif // defined(__BORLANDC__) |
| 5168 | } |
| 5169 | |
| 5170 | // Gets the number of successful test suites. |
| 5171 | int UnitTest::successful_test_suite_count() const { |
| 5172 | return impl()->successful_test_suite_count(); |
| 5173 | } |
| 5174 | |
| 5175 | // Gets the number of failed test suites. |
| 5176 | int UnitTest::failed_test_suite_count() const { |
| 5177 | return impl()->failed_test_suite_count(); |
| 5178 | } |
| 5179 | |
| 5180 | // Gets the number of all test suites. |
| 5181 | int UnitTest::total_test_suite_count() const { |
| 5182 | return impl()->total_test_suite_count(); |
| 5183 | } |
| 5184 | |
| 5185 | // Gets the number of all test suites that contain at least one test |
| 5186 | // that should run. |
| 5187 | int UnitTest::test_suite_to_run_count() const { |
| 5188 | return impl()->test_suite_to_run_count(); |
| 5189 | } |
| 5190 | |
| 5191 | // Legacy API is deprecated but still available |
| 5192 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 5193 | int UnitTest::successful_test_case_count() const { |
| 5194 | return impl()->successful_test_suite_count(); |
| 5195 | } |
| 5196 | int UnitTest::failed_test_case_count() const { |
| 5197 | return impl()->failed_test_suite_count(); |
| 5198 | } |
| 5199 | int UnitTest::total_test_case_count() const { |
| 5200 | return impl()->total_test_suite_count(); |
| 5201 | } |
| 5202 | int UnitTest::test_case_to_run_count() const { |
| 5203 | return impl()->test_suite_to_run_count(); |
| 5204 | } |
| 5205 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 5206 | |
| 5207 | // Gets the number of successful tests. |
| 5208 | int UnitTest::successful_test_count() const { |
| 5209 | return impl()->successful_test_count(); |
| 5210 | } |
| 5211 | |
| 5212 | // Gets the number of skipped tests. |
| 5213 | int UnitTest::skipped_test_count() const { |
| 5214 | return impl()->skipped_test_count(); |
| 5215 | } |
| 5216 | |
| 5217 | // Gets the number of failed tests. |
| 5218 | int UnitTest::failed_test_count() const { return impl()->failed_test_count(); } |
| 5219 | |
| 5220 | // Gets the number of disabled tests that will be reported in the XML report. |
| 5221 | int UnitTest::reportable_disabled_test_count() const { |
| 5222 | return impl()->reportable_disabled_test_count(); |
| 5223 | } |
| 5224 | |
| 5225 | // Gets the number of disabled tests. |
| 5226 | int UnitTest::disabled_test_count() const { |
| 5227 | return impl()->disabled_test_count(); |
| 5228 | } |
| 5229 | |
| 5230 | // Gets the number of tests to be printed in the XML report. |
| 5231 | int UnitTest::reportable_test_count() const { |
| 5232 | return impl()->reportable_test_count(); |
| 5233 | } |
| 5234 | |
| 5235 | // Gets the number of all tests. |
| 5236 | int UnitTest::total_test_count() const { return impl()->total_test_count(); } |
| 5237 | |
| 5238 | // Gets the number of tests that should run. |
| 5239 | int UnitTest::test_to_run_count() const { return impl()->test_to_run_count(); } |
| 5240 | |
| 5241 | // Gets the time of the test program start, in ms from the start of the |
| 5242 | // UNIX epoch. |
| 5243 | internal::TimeInMillis UnitTest::start_timestamp() const { |
| 5244 | return impl()->start_timestamp(); |
| 5245 | } |
| 5246 | |
| 5247 | // Gets the elapsed time, in milliseconds. |
| 5248 | internal::TimeInMillis UnitTest::elapsed_time() const { |
| 5249 | return impl()->elapsed_time(); |
| 5250 | } |
| 5251 | |
| 5252 | // Returns true if and only if the unit test passed (i.e. all test suites |
| 5253 | // passed). |
| 5254 | bool UnitTest::Passed() const { return impl()->Passed(); } |
| 5255 | |
| 5256 | // Returns true if and only if the unit test failed (i.e. some test suite |
| 5257 | // failed or something outside of all tests failed). |
| 5258 | bool UnitTest::Failed() const { return impl()->Failed(); } |
| 5259 | |
| 5260 | // Gets the i-th test suite among all the test suites. i can range from 0 to |
| 5261 | // total_test_suite_count() - 1. If i is not in that range, returns NULL. |
| 5262 | const TestSuite* UnitTest::GetTestSuite(int i) const { |
| 5263 | return impl()->GetTestSuite(i); |
| 5264 | } |
| 5265 | |
| 5266 | // Legacy API is deprecated but still available |
| 5267 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 5268 | const TestCase* UnitTest::GetTestCase(int i) const { |
| 5269 | return impl()->GetTestCase(i); |
| 5270 | } |
| 5271 | #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 5272 | |
| 5273 | // Returns the TestResult containing information on test failures and |
| 5274 | // properties logged outside of individual test suites. |
| 5275 | const TestResult& UnitTest::ad_hoc_test_result() const { |
| 5276 | return *impl()->ad_hoc_test_result(); |
| 5277 | } |
| 5278 | |
| 5279 | // Gets the i-th test suite among all the test suites. i can range from 0 to |
| 5280 | // total_test_suite_count() - 1. If i is not in that range, returns NULL. |
| 5281 | TestSuite* UnitTest::GetMutableTestSuite(int i) { |
| 5282 | return impl()->GetMutableSuiteCase(i); |
| 5283 | } |
| 5284 | |
| 5285 | // Returns the list of event listeners that can be used to track events |
| 5286 | // inside Google Test. |
| 5287 | TestEventListeners& UnitTest::listeners() { return *impl()->listeners(); } |
| 5288 | |
| 5289 | // Registers and returns a global test environment. When a test |
| 5290 | // program is run, all global test environments will be set-up in the |
| 5291 | // order they were registered. After all tests in the program have |
| 5292 | // finished, all global test environments will be torn-down in the |
| 5293 | // *reverse* order they were registered. |
| 5294 | // |
| 5295 | // The UnitTest object takes ownership of the given environment. |
| 5296 | // |
| 5297 | // We don't protect this under mutex_, as we only support calling it |
| 5298 | // from the main thread. |
| 5299 | Environment* UnitTest::AddEnvironment(Environment* env) { |
| 5300 | if (env == nullptr) { |
| 5301 | return nullptr; |
| 5302 | } |
| 5303 | |
| 5304 | impl_->environments().push_back(x: env); |
| 5305 | return env; |
| 5306 | } |
| 5307 | |
| 5308 | // Adds a TestPartResult to the current TestResult object. All Google Test |
| 5309 | // assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc) eventually call |
| 5310 | // this to report their results. The user code should use the |
| 5311 | // assertion macros instead of calling this directly. |
| 5312 | void UnitTest::AddTestPartResult(TestPartResult::Type result_type, |
| 5313 | const char* file_name, int line_number, |
| 5314 | const std::string& message, |
| 5315 | const std::string& os_stack_trace) |
| 5316 | GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5317 | Message msg; |
| 5318 | msg << message; |
| 5319 | |
| 5320 | internal::MutexLock lock(&mutex_); |
| 5321 | if (!impl_->gtest_trace_stack().empty()) { |
| 5322 | msg << "\n" << GTEST_NAME_ << " trace:" ; |
| 5323 | |
| 5324 | for (size_t i = impl_->gtest_trace_stack().size(); i > 0; --i) { |
| 5325 | const internal::TraceInfo& trace = impl_->gtest_trace_stack()[i - 1]; |
| 5326 | msg << "\n" |
| 5327 | << internal::FormatFileLocation(file: trace.file, line: trace.line) << " " |
| 5328 | << trace.message; |
| 5329 | } |
| 5330 | } |
| 5331 | |
| 5332 | if (os_stack_trace.c_str() != nullptr && !os_stack_trace.empty()) { |
| 5333 | msg << internal::kStackTraceMarker << os_stack_trace; |
| 5334 | } else { |
| 5335 | msg << "\n" ; |
| 5336 | } |
| 5337 | |
| 5338 | const TestPartResult result = TestPartResult( |
| 5339 | result_type, file_name, line_number, msg.GetString().c_str()); |
| 5340 | impl_->GetTestPartResultReporterForCurrentThread()->ReportTestPartResult( |
| 5341 | result); |
| 5342 | |
| 5343 | if (result_type != TestPartResult::kSuccess && |
| 5344 | result_type != TestPartResult::kSkip) { |
| 5345 | // gtest_break_on_failure takes precedence over |
| 5346 | // gtest_throw_on_failure. This allows a user to set the latter |
| 5347 | // in the code (perhaps in order to use Google Test assertions |
| 5348 | // with another testing framework) and specify the former on the |
| 5349 | // command line for debugging. |
| 5350 | if (GTEST_FLAG_GET(break_on_failure)) { |
| 5351 | #if defined(GTEST_OS_WINDOWS) && !defined(GTEST_OS_WINDOWS_PHONE) && \ |
| 5352 | !defined(GTEST_OS_WINDOWS_RT) |
| 5353 | // Using DebugBreak on Windows allows gtest to still break into a debugger |
| 5354 | // when a failure happens and both the --gtest_break_on_failure and |
| 5355 | // the --gtest_catch_exceptions flags are specified. |
| 5356 | DebugBreak(); |
| 5357 | #elif (!defined(__native_client__)) && \ |
| 5358 | ((defined(__clang__) || defined(__GNUC__)) && \ |
| 5359 | (defined(__x86_64__) || defined(__i386__))) |
| 5360 | // with clang/gcc we can achieve the same effect on x86 by invoking int3 |
| 5361 | asm("int3" ); |
| 5362 | #elif GTEST_HAS_BUILTIN(__builtin_trap) |
| 5363 | __builtin_trap(); |
| 5364 | #elif defined(SIGTRAP) |
| 5365 | raise(SIGTRAP); |
| 5366 | #else |
| 5367 | // Dereference nullptr through a volatile pointer to prevent the compiler |
| 5368 | // from removing. We use this rather than abort() or __builtin_trap() for |
| 5369 | // portability: some debuggers don't correctly trap abort(). |
| 5370 | *static_cast<volatile int*>(nullptr) = 1; |
| 5371 | #endif // GTEST_OS_WINDOWS |
| 5372 | } else if (GTEST_FLAG_GET(throw_on_failure)) { |
| 5373 | #if GTEST_HAS_EXCEPTIONS |
| 5374 | throw internal::GoogleTestFailureException(result); |
| 5375 | #else |
| 5376 | // We cannot call abort() as it generates a pop-up in debug mode |
| 5377 | // that cannot be suppressed in VC 7.1 or below. |
| 5378 | exit(status: 1); |
| 5379 | #endif |
| 5380 | } |
| 5381 | } |
| 5382 | } |
| 5383 | |
| 5384 | // Adds a TestProperty to the current TestResult object when invoked from |
| 5385 | // inside a test, to current TestSuite's ad_hoc_test_result_ when invoked |
| 5386 | // from SetUpTestSuite or TearDownTestSuite, or to the global property set |
| 5387 | // when invoked elsewhere. If the result already contains a property with |
| 5388 | // the same key, the value will be updated. |
| 5389 | void UnitTest::RecordProperty(const std::string& key, |
| 5390 | const std::string& value) { |
| 5391 | impl_->RecordProperty(test_property: TestProperty(key, value)); |
| 5392 | } |
| 5393 | |
| 5394 | // Runs all tests in this UnitTest object and prints the result. |
| 5395 | // Returns 0 if successful, or 1 otherwise. |
| 5396 | // |
| 5397 | // We don't protect this under mutex_, as we only support calling it |
| 5398 | // from the main thread. |
| 5399 | int UnitTest::Run() { |
| 5400 | #ifdef GTEST_HAS_DEATH_TEST |
| 5401 | const bool in_death_test_child_process = |
| 5402 | GTEST_FLAG_GET(internal_run_death_test).length() > 0; |
| 5403 | |
| 5404 | // Google Test implements this protocol for catching that a test |
| 5405 | // program exits before returning control to Google Test: |
| 5406 | // |
| 5407 | // 1. Upon start, Google Test creates a file whose absolute path |
| 5408 | // is specified by the environment variable |
| 5409 | // TEST_PREMATURE_EXIT_FILE. |
| 5410 | // 2. When Google Test has finished its work, it deletes the file. |
| 5411 | // |
| 5412 | // This allows a test runner to set TEST_PREMATURE_EXIT_FILE before |
| 5413 | // running a Google-Test-based test program and check the existence |
| 5414 | // of the file at the end of the test execution to see if it has |
| 5415 | // exited prematurely. |
| 5416 | |
| 5417 | // If we are in the child process of a death test, don't |
| 5418 | // create/delete the premature exit file, as doing so is unnecessary |
| 5419 | // and will confuse the parent process. Otherwise, create/delete |
| 5420 | // the file upon entering/leaving this function. If the program |
| 5421 | // somehow exits before this function has a chance to return, the |
| 5422 | // premature-exit file will be left undeleted, causing a test runner |
| 5423 | // that understands the premature-exit-file protocol to report the |
| 5424 | // test as having failed. |
| 5425 | const internal::ScopedPrematureExitFile premature_exit_file( |
| 5426 | in_death_test_child_process |
| 5427 | ? nullptr |
| 5428 | : internal::posix::GetEnv(name: "TEST_PREMATURE_EXIT_FILE" )); |
| 5429 | #else |
| 5430 | const bool in_death_test_child_process = false; |
| 5431 | #endif // GTEST_HAS_DEATH_TEST |
| 5432 | |
| 5433 | // Captures the value of GTEST_FLAG(catch_exceptions). This value will be |
| 5434 | // used for the duration of the program. |
| 5435 | impl()->set_catch_exceptions(GTEST_FLAG_GET(catch_exceptions)); |
| 5436 | |
| 5437 | #ifdef GTEST_OS_WINDOWS |
| 5438 | // Either the user wants Google Test to catch exceptions thrown by the |
| 5439 | // tests or this is executing in the context of death test child |
| 5440 | // process. In either case the user does not want to see pop-up dialogs |
| 5441 | // about crashes - they are expected. |
| 5442 | if (impl()->catch_exceptions() || in_death_test_child_process) { |
| 5443 | #if !defined(GTEST_OS_WINDOWS_MOBILE) && !defined(GTEST_OS_WINDOWS_PHONE) && \ |
| 5444 | !defined(GTEST_OS_WINDOWS_RT) |
| 5445 | // SetErrorMode doesn't exist on CE. |
| 5446 | SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOALIGNMENTFAULTEXCEPT | |
| 5447 | SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX); |
| 5448 | #endif // !GTEST_OS_WINDOWS_MOBILE |
| 5449 | |
| 5450 | #if (defined(_MSC_VER) || defined(GTEST_OS_WINDOWS_MINGW)) && \ |
| 5451 | !defined(GTEST_OS_WINDOWS_MOBILE) |
| 5452 | // Death test children can be terminated with _abort(). On Windows, |
| 5453 | // _abort() can show a dialog with a warning message. This forces the |
| 5454 | // abort message to go to stderr instead. |
| 5455 | _set_error_mode(_OUT_TO_STDERR); |
| 5456 | #endif |
| 5457 | |
| 5458 | #if defined(_MSC_VER) && !defined(GTEST_OS_WINDOWS_MOBILE) |
| 5459 | // In the debug version, Visual Studio pops up a separate dialog |
| 5460 | // offering a choice to debug the aborted program. We need to suppress |
| 5461 | // this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement |
| 5462 | // executed. Google Test will notify the user of any unexpected |
| 5463 | // failure via stderr. |
| 5464 | if (!GTEST_FLAG_GET(break_on_failure)) |
| 5465 | _set_abort_behavior( |
| 5466 | 0x0, // Clear the following flags: |
| 5467 | _WRITE_ABORT_MSG | _CALL_REPORTFAULT); // pop-up window, core dump. |
| 5468 | |
| 5469 | // In debug mode, the Windows CRT can crash with an assertion over invalid |
| 5470 | // input (e.g. passing an invalid file descriptor). The default handling |
| 5471 | // for these assertions is to pop up a dialog and wait for user input. |
| 5472 | // Instead ask the CRT to dump such assertions to stderr non-interactively. |
| 5473 | if (!IsDebuggerPresent()) { |
| 5474 | (void)_CrtSetReportMode(_CRT_ASSERT, |
| 5475 | _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG); |
| 5476 | (void)_CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR); |
| 5477 | } |
| 5478 | #endif |
| 5479 | } |
| 5480 | #else |
| 5481 | (void)in_death_test_child_process; // Needed inside the #if block above |
| 5482 | #endif // GTEST_OS_WINDOWS |
| 5483 | |
| 5484 | return internal::HandleExceptionsInMethodIfSupported( |
| 5485 | object: impl(), method: &internal::UnitTestImpl::RunAllTests, |
| 5486 | location: "auxiliary test code (environments or event listeners)" ) |
| 5487 | ? 0 |
| 5488 | : 1; |
| 5489 | } |
| 5490 | |
| 5491 | #if GTEST_HAS_FILE_SYSTEM |
| 5492 | // Returns the working directory when the first TEST() or TEST_F() was |
| 5493 | // executed. |
| 5494 | const char* UnitTest::original_working_dir() const { |
| 5495 | return impl_->original_working_dir_.c_str(); |
| 5496 | } |
| 5497 | #endif // GTEST_HAS_FILE_SYSTEM |
| 5498 | |
| 5499 | // Returns the TestSuite object for the test that's currently running, |
| 5500 | // or NULL if no test is running. |
| 5501 | const TestSuite* UnitTest::current_test_suite() const |
| 5502 | GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5503 | internal::MutexLock lock(&mutex_); |
| 5504 | return impl_->current_test_suite(); |
| 5505 | } |
| 5506 | |
| 5507 | // Legacy API is still available but deprecated |
| 5508 | #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ |
| 5509 | const TestCase* UnitTest::current_test_case() const |
| 5510 | GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5511 | internal::MutexLock lock(&mutex_); |
| 5512 | return impl_->current_test_suite(); |
| 5513 | } |
| 5514 | #endif |
| 5515 | |
| 5516 | // Returns the TestInfo object for the test that's currently running, |
| 5517 | // or NULL if no test is running. |
| 5518 | const TestInfo* UnitTest::current_test_info() const |
| 5519 | GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5520 | internal::MutexLock lock(&mutex_); |
| 5521 | return impl_->current_test_info(); |
| 5522 | } |
| 5523 | |
| 5524 | // Returns the random seed used at the start of the current test run. |
| 5525 | int UnitTest::random_seed() const { return impl_->random_seed(); } |
| 5526 | |
| 5527 | // Returns ParameterizedTestSuiteRegistry object used to keep track of |
| 5528 | // value-parameterized tests and instantiate and register them. |
| 5529 | internal::ParameterizedTestSuiteRegistry& |
| 5530 | UnitTest::parameterized_test_registry() GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5531 | return impl_->parameterized_test_registry(); |
| 5532 | } |
| 5533 | |
| 5534 | // Creates an empty UnitTest. |
| 5535 | UnitTest::UnitTest() { impl_ = new internal::UnitTestImpl(this); } |
| 5536 | |
| 5537 | // Destructor of UnitTest. |
| 5538 | UnitTest::~UnitTest() { delete impl_; } |
| 5539 | |
| 5540 | // Pushes a trace defined by SCOPED_TRACE() on to the per-thread |
| 5541 | // Google Test trace stack. |
| 5542 | void UnitTest::PushGTestTrace(const internal::TraceInfo& trace) |
| 5543 | GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5544 | internal::MutexLock lock(&mutex_); |
| 5545 | impl_->gtest_trace_stack().push_back(x: trace); |
| 5546 | } |
| 5547 | |
| 5548 | // Pops a trace from the per-thread Google Test trace stack. |
| 5549 | void UnitTest::PopGTestTrace() GTEST_LOCK_EXCLUDED_(mutex_) { |
| 5550 | internal::MutexLock lock(&mutex_); |
| 5551 | impl_->gtest_trace_stack().pop_back(); |
| 5552 | } |
| 5553 | |
| 5554 | namespace internal { |
| 5555 | |
| 5556 | UnitTestImpl::UnitTestImpl(UnitTest* parent) |
| 5557 | : parent_(parent), |
| 5558 | GTEST_DISABLE_MSC_WARNINGS_PUSH_(4355 /* using this in initializer */) |
| 5559 | default_global_test_part_result_reporter_(this), |
| 5560 | default_per_thread_test_part_result_reporter_(this), |
| 5561 | GTEST_DISABLE_MSC_WARNINGS_POP_() global_test_part_result_reporter_( |
| 5562 | &default_global_test_part_result_reporter_), |
| 5563 | per_thread_test_part_result_reporter_( |
| 5564 | &default_per_thread_test_part_result_reporter_), |
| 5565 | parameterized_test_registry_(), |
| 5566 | parameterized_tests_registered_(false), |
| 5567 | last_death_test_suite_(-1), |
| 5568 | current_test_suite_(nullptr), |
| 5569 | current_test_info_(nullptr), |
| 5570 | ad_hoc_test_result_(), |
| 5571 | os_stack_trace_getter_(nullptr), |
| 5572 | post_flag_parse_init_performed_(false), |
| 5573 | random_seed_(0), // Will be overridden by the flag before first use. |
| 5574 | random_(0), // Will be reseeded before first use. |
| 5575 | start_timestamp_(0), |
| 5576 | elapsed_time_(0), |
| 5577 | #ifdef GTEST_HAS_DEATH_TEST |
| 5578 | death_test_factory_(new DefaultDeathTestFactory), |
| 5579 | #endif |
| 5580 | // Will be overridden by the flag before first use. |
| 5581 | catch_exceptions_(false) { |
| 5582 | listeners()->SetDefaultResultPrinter(new PrettyUnitTestResultPrinter); |
| 5583 | } |
| 5584 | |
| 5585 | UnitTestImpl::~UnitTestImpl() { |
| 5586 | // Deletes every TestSuite. |
| 5587 | ForEach(c: test_suites_, functor: internal::Delete<TestSuite>); |
| 5588 | |
| 5589 | // Deletes every Environment. |
| 5590 | ForEach(c: environments_, functor: internal::Delete<Environment>); |
| 5591 | |
| 5592 | delete os_stack_trace_getter_; |
| 5593 | } |
| 5594 | |
| 5595 | // Adds a TestProperty to the current TestResult object when invoked in a |
| 5596 | // context of a test, to current test suite's ad_hoc_test_result when invoke |
| 5597 | // from SetUpTestSuite/TearDownTestSuite, or to the global property set |
| 5598 | // otherwise. If the result already contains a property with the same key, |
| 5599 | // the value will be updated. |
| 5600 | void UnitTestImpl::RecordProperty(const TestProperty& test_property) { |
| 5601 | std::string xml_element; |
| 5602 | TestResult* test_result; // TestResult appropriate for property recording. |
| 5603 | |
| 5604 | if (current_test_info_ != nullptr) { |
| 5605 | xml_element = "testcase" ; |
| 5606 | test_result = &(current_test_info_->result_); |
| 5607 | } else if (current_test_suite_ != nullptr) { |
| 5608 | xml_element = "testsuite" ; |
| 5609 | test_result = &(current_test_suite_->ad_hoc_test_result_); |
| 5610 | } else { |
| 5611 | xml_element = "testsuites" ; |
| 5612 | test_result = &ad_hoc_test_result_; |
| 5613 | } |
| 5614 | test_result->RecordProperty(xml_element, test_property); |
| 5615 | } |
| 5616 | |
| 5617 | #ifdef GTEST_HAS_DEATH_TEST |
| 5618 | // Disables event forwarding if the control is currently in a death test |
| 5619 | // subprocess. Must not be called before InitGoogleTest. |
| 5620 | void UnitTestImpl::SuppressTestEventsIfInSubprocess() { |
| 5621 | if (internal_run_death_test_flag_ != nullptr) |
| 5622 | listeners()->SuppressEventForwarding(suppress: true); |
| 5623 | } |
| 5624 | #endif // GTEST_HAS_DEATH_TEST |
| 5625 | |
| 5626 | // Initializes event listeners performing XML output as specified by |
| 5627 | // UnitTestOptions. Must not be called before InitGoogleTest. |
| 5628 | void UnitTestImpl::ConfigureXmlOutput() { |
| 5629 | const std::string& output_format = UnitTestOptions::GetOutputFormat(); |
| 5630 | #if GTEST_HAS_FILE_SYSTEM |
| 5631 | if (output_format == "xml" ) { |
| 5632 | listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter( |
| 5633 | UnitTestOptions::GetAbsolutePathToOutputFile().c_str())); |
| 5634 | } else if (output_format == "json" ) { |
| 5635 | listeners()->SetDefaultXmlGenerator(new JsonUnitTestResultPrinter( |
| 5636 | UnitTestOptions::GetAbsolutePathToOutputFile().c_str())); |
| 5637 | } else if (!output_format.empty()) { |
| 5638 | GTEST_LOG_(WARNING) << "WARNING: unrecognized output format \"" |
| 5639 | << output_format << "\" ignored." ; |
| 5640 | } |
| 5641 | #else |
| 5642 | if (!output_format.empty()) { |
| 5643 | GTEST_LOG_(ERROR) << "ERROR: alternative output formats require " |
| 5644 | << "GTEST_HAS_FILE_SYSTEM to be enabled" ; |
| 5645 | } |
| 5646 | #endif // GTEST_HAS_FILE_SYSTEM |
| 5647 | } |
| 5648 | |
| 5649 | #if GTEST_CAN_STREAM_RESULTS_ |
| 5650 | // Initializes event listeners for streaming test results in string form. |
| 5651 | // Must not be called before InitGoogleTest. |
| 5652 | void UnitTestImpl::ConfigureStreamingOutput() { |
| 5653 | const std::string& target = GTEST_FLAG_GET(stream_result_to); |
| 5654 | if (!target.empty()) { |
| 5655 | const size_t pos = target.find(c: ':'); |
| 5656 | if (pos != std::string::npos) { |
| 5657 | listeners()->Append( |
| 5658 | listener: new StreamingListener(target.substr(pos: 0, n: pos), target.substr(pos: pos + 1))); |
| 5659 | } else { |
| 5660 | GTEST_LOG_(WARNING) << "unrecognized streaming target \"" << target |
| 5661 | << "\" ignored." ; |
| 5662 | } |
| 5663 | } |
| 5664 | } |
| 5665 | #endif // GTEST_CAN_STREAM_RESULTS_ |
| 5666 | |
| 5667 | // Performs initialization dependent upon flag values obtained in |
| 5668 | // ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to |
| 5669 | // ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest |
| 5670 | // this function is also called from RunAllTests. Since this function can be |
| 5671 | // called more than once, it has to be idempotent. |
| 5672 | void UnitTestImpl::PostFlagParsingInit() { |
| 5673 | // Ensures that this function does not execute more than once. |
| 5674 | if (!post_flag_parse_init_performed_) { |
| 5675 | post_flag_parse_init_performed_ = true; |
| 5676 | |
| 5677 | #if defined(GTEST_CUSTOM_TEST_EVENT_LISTENER_) |
| 5678 | // Register to send notifications about key process state changes. |
| 5679 | listeners()->Append(new GTEST_CUSTOM_TEST_EVENT_LISTENER_()); |
| 5680 | #endif // defined(GTEST_CUSTOM_TEST_EVENT_LISTENER_) |
| 5681 | |
| 5682 | #ifdef GTEST_HAS_DEATH_TEST |
| 5683 | InitDeathTestSubprocessControlInfo(); |
| 5684 | SuppressTestEventsIfInSubprocess(); |
| 5685 | #endif // GTEST_HAS_DEATH_TEST |
| 5686 | |
| 5687 | // Registers parameterized tests. This makes parameterized tests |
| 5688 | // available to the UnitTest reflection API without running |
| 5689 | // RUN_ALL_TESTS. |
| 5690 | RegisterParameterizedTests(); |
| 5691 | |
| 5692 | // Configures listeners for XML output. This makes it possible for users |
| 5693 | // to shut down the default XML output before invoking RUN_ALL_TESTS. |
| 5694 | ConfigureXmlOutput(); |
| 5695 | |
| 5696 | if (GTEST_FLAG_GET(brief)) { |
| 5697 | listeners()->SetDefaultResultPrinter(new BriefUnitTestResultPrinter); |
| 5698 | } |
| 5699 | |
| 5700 | #if GTEST_CAN_STREAM_RESULTS_ |
| 5701 | // Configures listeners for streaming test results to the specified server. |
| 5702 | ConfigureStreamingOutput(); |
| 5703 | #endif // GTEST_CAN_STREAM_RESULTS_ |
| 5704 | |
| 5705 | #ifdef GTEST_HAS_ABSL |
| 5706 | if (GTEST_FLAG_GET(install_failure_signal_handler)) { |
| 5707 | absl::FailureSignalHandlerOptions options; |
| 5708 | absl::InstallFailureSignalHandler(options); |
| 5709 | } |
| 5710 | #endif // GTEST_HAS_ABSL |
| 5711 | } |
| 5712 | } |
| 5713 | |
| 5714 | // A predicate that checks the name of a TestSuite against a known |
| 5715 | // value. |
| 5716 | // |
| 5717 | // This is used for implementation of the UnitTest class only. We put |
| 5718 | // it in the anonymous namespace to prevent polluting the outer |
| 5719 | // namespace. |
| 5720 | // |
| 5721 | // TestSuiteNameIs is copyable. |
| 5722 | class TestSuiteNameIs { |
| 5723 | public: |
| 5724 | // Constructor. |
| 5725 | explicit TestSuiteNameIs(const std::string& name) : name_(name) {} |
| 5726 | |
| 5727 | // Returns true if and only if the name of test_suite matches name_. |
| 5728 | bool operator()(const TestSuite* test_suite) const { |
| 5729 | return test_suite != nullptr && |
| 5730 | strcmp(s1: test_suite->name(), s2: name_.c_str()) == 0; |
| 5731 | } |
| 5732 | |
| 5733 | private: |
| 5734 | std::string name_; |
| 5735 | }; |
| 5736 | |
| 5737 | // Finds and returns a TestSuite with the given name. If one doesn't |
| 5738 | // exist, creates one and returns it. It's the CALLER'S |
| 5739 | // RESPONSIBILITY to ensure that this function is only called WHEN THE |
| 5740 | // TESTS ARE NOT SHUFFLED. |
| 5741 | // |
| 5742 | // Arguments: |
| 5743 | // |
| 5744 | // test_suite_name: name of the test suite |
| 5745 | // type_param: the name of the test suite's type parameter, or NULL if |
| 5746 | // this is not a typed or a type-parameterized test suite. |
| 5747 | // set_up_tc: pointer to the function that sets up the test suite |
| 5748 | // tear_down_tc: pointer to the function that tears down the test suite |
| 5749 | TestSuite* UnitTestImpl::GetTestSuite( |
| 5750 | const char* test_suite_name, const char* type_param, |
| 5751 | internal::SetUpTestSuiteFunc set_up_tc, |
| 5752 | internal::TearDownTestSuiteFunc tear_down_tc) { |
| 5753 | // Can we find a TestSuite with the given name? |
| 5754 | const auto test_suite = |
| 5755 | std::find_if(first: test_suites_.rbegin(), last: test_suites_.rend(), |
| 5756 | pred: TestSuiteNameIs(test_suite_name)); |
| 5757 | |
| 5758 | if (test_suite != test_suites_.rend()) return *test_suite; |
| 5759 | |
| 5760 | // No. Let's create one. |
| 5761 | auto* const new_test_suite = |
| 5762 | new TestSuite(test_suite_name, type_param, set_up_tc, tear_down_tc); |
| 5763 | |
| 5764 | const UnitTestFilter death_test_suite_filter(kDeathTestSuiteFilter); |
| 5765 | // Is this a death test suite? |
| 5766 | if (death_test_suite_filter.MatchesName(name: test_suite_name)) { |
| 5767 | // Yes. Inserts the test suite after the last death test suite |
| 5768 | // defined so far. This only works when the test suites haven't |
| 5769 | // been shuffled. Otherwise we may end up running a death test |
| 5770 | // after a non-death test. |
| 5771 | ++last_death_test_suite_; |
| 5772 | test_suites_.insert(position: test_suites_.begin() + last_death_test_suite_, |
| 5773 | x: new_test_suite); |
| 5774 | } else { |
| 5775 | // No. Appends to the end of the list. |
| 5776 | test_suites_.push_back(x: new_test_suite); |
| 5777 | } |
| 5778 | |
| 5779 | test_suite_indices_.push_back(x: static_cast<int>(test_suite_indices_.size())); |
| 5780 | return new_test_suite; |
| 5781 | } |
| 5782 | |
| 5783 | // Helpers for setting up / tearing down the given environment. They |
| 5784 | // are for use in the ForEach() function. |
| 5785 | static void SetUpEnvironment(Environment* env) { env->SetUp(); } |
| 5786 | static void TearDownEnvironment(Environment* env) { env->TearDown(); } |
| 5787 | |
| 5788 | // Runs all tests in this UnitTest object, prints the result, and |
| 5789 | // returns true if all tests are successful. If any exception is |
| 5790 | // thrown during a test, the test is considered to be failed, but the |
| 5791 | // rest of the tests will still be run. |
| 5792 | // |
| 5793 | // When parameterized tests are enabled, it expands and registers |
| 5794 | // parameterized tests first in RegisterParameterizedTests(). |
| 5795 | // All other functions called from RunAllTests() may safely assume that |
| 5796 | // parameterized tests are ready to be counted and run. |
| 5797 | bool UnitTestImpl::RunAllTests() { |
| 5798 | // True if and only if Google Test is initialized before RUN_ALL_TESTS() is |
| 5799 | // called. |
| 5800 | const bool gtest_is_initialized_before_run_all_tests = GTestIsInitialized(); |
| 5801 | |
| 5802 | // Do not run any test if the --help flag was specified. |
| 5803 | if (g_help_flag) return true; |
| 5804 | |
| 5805 | // Repeats the call to the post-flag parsing initialization in case the |
| 5806 | // user didn't call InitGoogleTest. |
| 5807 | PostFlagParsingInit(); |
| 5808 | |
| 5809 | #if GTEST_HAS_FILE_SYSTEM |
| 5810 | // Even if sharding is not on, test runners may want to use the |
| 5811 | // GTEST_SHARD_STATUS_FILE to query whether the test supports the sharding |
| 5812 | // protocol. |
| 5813 | internal::WriteToShardStatusFileIfNeeded(); |
| 5814 | #endif // GTEST_HAS_FILE_SYSTEM |
| 5815 | |
| 5816 | // True if and only if we are in a subprocess for running a thread-safe-style |
| 5817 | // death test. |
| 5818 | bool in_subprocess_for_death_test = false; |
| 5819 | |
| 5820 | #ifdef GTEST_HAS_DEATH_TEST |
| 5821 | in_subprocess_for_death_test = (internal_run_death_test_flag_ != nullptr); |
| 5822 | #if defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) |
| 5823 | if (in_subprocess_for_death_test) { |
| 5824 | GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_(); |
| 5825 | } |
| 5826 | #endif // defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) |
| 5827 | #endif // GTEST_HAS_DEATH_TEST |
| 5828 | |
| 5829 | const bool should_shard = ShouldShard(total_shards_str: kTestTotalShards, shard_index_str: kTestShardIndex, |
| 5830 | in_subprocess_for_death_test); |
| 5831 | |
| 5832 | // Compares the full test names with the filter to decide which |
| 5833 | // tests to run. |
| 5834 | const bool has_tests_to_run = |
| 5835 | FilterTests(shard_tests: should_shard ? HONOR_SHARDING_PROTOCOL |
| 5836 | : IGNORE_SHARDING_PROTOCOL) > 0; |
| 5837 | |
| 5838 | // Lists the tests and exits if the --gtest_list_tests flag was specified. |
| 5839 | if (GTEST_FLAG_GET(list_tests)) { |
| 5840 | // This must be called *after* FilterTests() has been called. |
| 5841 | ListTestsMatchingFilter(); |
| 5842 | return true; |
| 5843 | } |
| 5844 | |
| 5845 | random_seed_ = GetRandomSeedFromFlag(GTEST_FLAG_GET(random_seed)); |
| 5846 | |
| 5847 | // True if and only if at least one test has failed. |
| 5848 | bool failed = false; |
| 5849 | |
| 5850 | TestEventListener* repeater = listeners()->repeater(); |
| 5851 | |
| 5852 | start_timestamp_ = GetTimeInMillis(); |
| 5853 | repeater->OnTestProgramStart(unit_test: *parent_); |
| 5854 | |
| 5855 | // How many times to repeat the tests? We don't want to repeat them |
| 5856 | // when we are inside the subprocess of a death test. |
| 5857 | const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG_GET(repeat); |
| 5858 | |
| 5859 | // Repeats forever if the repeat count is negative. |
| 5860 | const bool gtest_repeat_forever = repeat < 0; |
| 5861 | |
| 5862 | // Should test environments be set up and torn down for each repeat, or only |
| 5863 | // set up on the first and torn down on the last iteration? If there is no |
| 5864 | // "last" iteration because the tests will repeat forever, always recreate the |
| 5865 | // environments to avoid leaks in case one of the environments is using |
| 5866 | // resources that are external to this process. Without this check there would |
| 5867 | // be no way to clean up those external resources automatically. |
| 5868 | const bool recreate_environments_when_repeating = |
| 5869 | GTEST_FLAG_GET(recreate_environments_when_repeating) || |
| 5870 | gtest_repeat_forever; |
| 5871 | |
| 5872 | for (int i = 0; gtest_repeat_forever || i != repeat; i++) { |
| 5873 | // We want to preserve failures generated by ad-hoc test |
| 5874 | // assertions executed before RUN_ALL_TESTS(). |
| 5875 | ClearNonAdHocTestResult(); |
| 5876 | |
| 5877 | Timer timer; |
| 5878 | |
| 5879 | // Shuffles test suites and tests if requested. |
| 5880 | if (has_tests_to_run && GTEST_FLAG_GET(shuffle)) { |
| 5881 | random()->Reseed(seed: static_cast<uint32_t>(random_seed_)); |
| 5882 | // This should be done before calling OnTestIterationStart(), |
| 5883 | // such that a test event listener can see the actual test order |
| 5884 | // in the event. |
| 5885 | ShuffleTests(); |
| 5886 | } |
| 5887 | |
| 5888 | // Tells the unit test event listeners that the tests are about to start. |
| 5889 | repeater->OnTestIterationStart(unit_test: *parent_, iteration: i); |
| 5890 | |
| 5891 | // Runs each test suite if there is at least one test to run. |
| 5892 | if (has_tests_to_run) { |
| 5893 | // Sets up all environments beforehand. If test environments aren't |
| 5894 | // recreated for each iteration, only do so on the first iteration. |
| 5895 | if (i == 0 || recreate_environments_when_repeating) { |
| 5896 | repeater->OnEnvironmentsSetUpStart(unit_test: *parent_); |
| 5897 | ForEach(c: environments_, functor: SetUpEnvironment); |
| 5898 | repeater->OnEnvironmentsSetUpEnd(unit_test: *parent_); |
| 5899 | } |
| 5900 | |
| 5901 | // Runs the tests only if there was no fatal failure or skip triggered |
| 5902 | // during global set-up. |
| 5903 | if (Test::IsSkipped()) { |
| 5904 | // Emit diagnostics when global set-up calls skip, as it will not be |
| 5905 | // emitted by default. |
| 5906 | TestResult& test_result = |
| 5907 | *internal::GetUnitTestImpl()->current_test_result(); |
| 5908 | for (int j = 0; j < test_result.total_part_count(); ++j) { |
| 5909 | const TestPartResult& test_part_result = |
| 5910 | test_result.GetTestPartResult(i: j); |
| 5911 | if (test_part_result.type() == TestPartResult::kSkip) { |
| 5912 | const std::string& result = test_part_result.message(); |
| 5913 | printf(format: "%s\n" , result.c_str()); |
| 5914 | } |
| 5915 | } |
| 5916 | fflush(stdout); |
| 5917 | } else if (!Test::HasFatalFailure()) { |
| 5918 | for (int test_index = 0; test_index < total_test_suite_count(); |
| 5919 | test_index++) { |
| 5920 | GetMutableSuiteCase(i: test_index)->Run(); |
| 5921 | if (GTEST_FLAG_GET(fail_fast) && |
| 5922 | GetMutableSuiteCase(i: test_index)->Failed()) { |
| 5923 | for (int j = test_index + 1; j < total_test_suite_count(); j++) { |
| 5924 | GetMutableSuiteCase(i: j)->Skip(); |
| 5925 | } |
| 5926 | break; |
| 5927 | } |
| 5928 | } |
| 5929 | } else if (Test::HasFatalFailure()) { |
| 5930 | // If there was a fatal failure during the global setup then we know we |
| 5931 | // aren't going to run any tests. Explicitly mark all of the tests as |
| 5932 | // skipped to make this obvious in the output. |
| 5933 | for (int test_index = 0; test_index < total_test_suite_count(); |
| 5934 | test_index++) { |
| 5935 | GetMutableSuiteCase(i: test_index)->Skip(); |
| 5936 | } |
| 5937 | } |
| 5938 | |
| 5939 | // Tears down all environments in reverse order afterwards. If test |
| 5940 | // environments aren't recreated for each iteration, only do so on the |
| 5941 | // last iteration. |
| 5942 | if (i == repeat - 1 || recreate_environments_when_repeating) { |
| 5943 | repeater->OnEnvironmentsTearDownStart(unit_test: *parent_); |
| 5944 | std::for_each(first: environments_.rbegin(), last: environments_.rend(), |
| 5945 | f: TearDownEnvironment); |
| 5946 | repeater->OnEnvironmentsTearDownEnd(unit_test: *parent_); |
| 5947 | } |
| 5948 | } |
| 5949 | |
| 5950 | elapsed_time_ = timer.Elapsed(); |
| 5951 | |
| 5952 | // Tells the unit test event listener that the tests have just finished. |
| 5953 | repeater->OnTestIterationEnd(unit_test: *parent_, iteration: i); |
| 5954 | |
| 5955 | // Gets the result and clears it. |
| 5956 | if (!Passed()) { |
| 5957 | failed = true; |
| 5958 | } |
| 5959 | |
| 5960 | // Restores the original test order after the iteration. This |
| 5961 | // allows the user to quickly repro a failure that happens in the |
| 5962 | // N-th iteration without repeating the first (N - 1) iterations. |
| 5963 | // This is not enclosed in "if (GTEST_FLAG(shuffle)) { ... }", in |
| 5964 | // case the user somehow changes the value of the flag somewhere |
| 5965 | // (it's always safe to unshuffle the tests). |
| 5966 | UnshuffleTests(); |
| 5967 | |
| 5968 | if (GTEST_FLAG_GET(shuffle)) { |
| 5969 | // Picks a new random seed for each iteration. |
| 5970 | random_seed_ = GetNextRandomSeed(seed: random_seed_); |
| 5971 | } |
| 5972 | } |
| 5973 | |
| 5974 | repeater->OnTestProgramEnd(unit_test: *parent_); |
| 5975 | |
| 5976 | if (!gtest_is_initialized_before_run_all_tests) { |
| 5977 | ColoredPrintf( |
| 5978 | color: GTestColor::kRed, |
| 5979 | fmt: "\nIMPORTANT NOTICE - DO NOT IGNORE:\n" |
| 5980 | "This test program did NOT call " GTEST_INIT_GOOGLE_TEST_NAME_ |
| 5981 | "() before calling RUN_ALL_TESTS(). This is INVALID. Soon " GTEST_NAME_ |
| 5982 | " will start to enforce the valid usage. " |
| 5983 | "Please fix it ASAP, or IT WILL START TO FAIL.\n" ); // NOLINT |
| 5984 | } |
| 5985 | |
| 5986 | return !failed; |
| 5987 | } |
| 5988 | |
| 5989 | #if GTEST_HAS_FILE_SYSTEM |
| 5990 | // Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file |
| 5991 | // if the variable is present. If a file already exists at this location, this |
| 5992 | // function will write over it. If the variable is present, but the file cannot |
| 5993 | // be created, prints an error and exits. |
| 5994 | void WriteToShardStatusFileIfNeeded() { |
| 5995 | const char* const test_shard_file = posix::GetEnv(name: kTestShardStatusFile); |
| 5996 | if (test_shard_file != nullptr) { |
| 5997 | FILE* const file = posix::FOpen(path: test_shard_file, mode: "w" ); |
| 5998 | if (file == nullptr) { |
| 5999 | ColoredPrintf(color: GTestColor::kRed, |
| 6000 | fmt: "Could not write to the test shard status file \"%s\" " |
| 6001 | "specified by the %s environment variable.\n" , |
| 6002 | test_shard_file, kTestShardStatusFile); |
| 6003 | fflush(stdout); |
| 6004 | exit(EXIT_FAILURE); |
| 6005 | } |
| 6006 | fclose(stream: file); |
| 6007 | } |
| 6008 | } |
| 6009 | #endif // GTEST_HAS_FILE_SYSTEM |
| 6010 | |
| 6011 | // Checks whether sharding is enabled by examining the relevant |
| 6012 | // environment variable values. If the variables are present, |
| 6013 | // but inconsistent (i.e., shard_index >= total_shards), prints |
| 6014 | // an error and exits. If in_subprocess_for_death_test, sharding is |
| 6015 | // disabled because it must only be applied to the original test |
| 6016 | // process. Otherwise, we could filter out death tests we intended to execute. |
| 6017 | bool ShouldShard(const char* total_shards_env, const char* shard_index_env, |
| 6018 | bool in_subprocess_for_death_test) { |
| 6019 | if (in_subprocess_for_death_test) { |
| 6020 | return false; |
| 6021 | } |
| 6022 | |
| 6023 | const int32_t total_shards = Int32FromEnvOrDie(env_var: total_shards_env, default_val: -1); |
| 6024 | const int32_t shard_index = Int32FromEnvOrDie(env_var: shard_index_env, default_val: -1); |
| 6025 | |
| 6026 | if (total_shards == -1 && shard_index == -1) { |
| 6027 | return false; |
| 6028 | } else if (total_shards == -1 && shard_index != -1) { |
| 6029 | const Message msg = Message() << "Invalid environment variables: you have " |
| 6030 | << kTestShardIndex << " = " << shard_index |
| 6031 | << ", but have left " << kTestTotalShards |
| 6032 | << " unset.\n" ; |
| 6033 | ColoredPrintf(color: GTestColor::kRed, fmt: "%s" , msg.GetString().c_str()); |
| 6034 | fflush(stdout); |
| 6035 | exit(EXIT_FAILURE); |
| 6036 | } else if (total_shards != -1 && shard_index == -1) { |
| 6037 | const Message msg = Message() |
| 6038 | << "Invalid environment variables: you have " |
| 6039 | << kTestTotalShards << " = " << total_shards |
| 6040 | << ", but have left " << kTestShardIndex << " unset.\n" ; |
| 6041 | ColoredPrintf(color: GTestColor::kRed, fmt: "%s" , msg.GetString().c_str()); |
| 6042 | fflush(stdout); |
| 6043 | exit(EXIT_FAILURE); |
| 6044 | } else if (shard_index < 0 || shard_index >= total_shards) { |
| 6045 | const Message msg = |
| 6046 | Message() << "Invalid environment variables: we require 0 <= " |
| 6047 | << kTestShardIndex << " < " << kTestTotalShards |
| 6048 | << ", but you have " << kTestShardIndex << "=" << shard_index |
| 6049 | << ", " << kTestTotalShards << "=" << total_shards << ".\n" ; |
| 6050 | ColoredPrintf(color: GTestColor::kRed, fmt: "%s" , msg.GetString().c_str()); |
| 6051 | fflush(stdout); |
| 6052 | exit(EXIT_FAILURE); |
| 6053 | } |
| 6054 | |
| 6055 | return total_shards > 1; |
| 6056 | } |
| 6057 | |
| 6058 | // Parses the environment variable var as an Int32. If it is unset, |
| 6059 | // returns default_val. If it is not an Int32, prints an error |
| 6060 | // and aborts. |
| 6061 | int32_t Int32FromEnvOrDie(const char* var, int32_t default_val) { |
| 6062 | const char* str_val = posix::GetEnv(name: var); |
| 6063 | if (str_val == nullptr) { |
| 6064 | return default_val; |
| 6065 | } |
| 6066 | |
| 6067 | int32_t result; |
| 6068 | if (!ParseInt32(src_text: Message() << "The value of environment variable " << var, |
| 6069 | str: str_val, value: &result)) { |
| 6070 | exit(EXIT_FAILURE); |
| 6071 | } |
| 6072 | return result; |
| 6073 | } |
| 6074 | |
| 6075 | // Given the total number of shards, the shard index, and the test id, |
| 6076 | // returns true if and only if the test should be run on this shard. The test id |
| 6077 | // is some arbitrary but unique non-negative integer assigned to each test |
| 6078 | // method. Assumes that 0 <= shard_index < total_shards. |
| 6079 | bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) { |
| 6080 | return (test_id % total_shards) == shard_index; |
| 6081 | } |
| 6082 | |
| 6083 | // Compares the name of each test with the user-specified filter to |
| 6084 | // decide whether the test should be run, then records the result in |
| 6085 | // each TestSuite and TestInfo object. |
| 6086 | // If shard_tests == true, further filters tests based on sharding |
| 6087 | // variables in the environment - see |
| 6088 | // https://github.com/google/googletest/blob/main/docs/advanced.md |
| 6089 | // . Returns the number of tests that should run. |
| 6090 | int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) { |
| 6091 | const int32_t total_shards = shard_tests == HONOR_SHARDING_PROTOCOL |
| 6092 | ? Int32FromEnvOrDie(var: kTestTotalShards, default_val: -1) |
| 6093 | : -1; |
| 6094 | const int32_t shard_index = shard_tests == HONOR_SHARDING_PROTOCOL |
| 6095 | ? Int32FromEnvOrDie(var: kTestShardIndex, default_val: -1) |
| 6096 | : -1; |
| 6097 | |
| 6098 | const PositiveAndNegativeUnitTestFilter gtest_flag_filter( |
| 6099 | GTEST_FLAG_GET(filter)); |
| 6100 | const UnitTestFilter disable_test_filter(kDisableTestFilter); |
| 6101 | // num_runnable_tests are the number of tests that will |
| 6102 | // run across all shards (i.e., match filter and are not disabled). |
| 6103 | // num_selected_tests are the number of tests to be run on |
| 6104 | // this shard. |
| 6105 | int num_runnable_tests = 0; |
| 6106 | int num_selected_tests = 0; |
| 6107 | for (auto* test_suite : test_suites_) { |
| 6108 | const std::string& test_suite_name = test_suite->name(); |
| 6109 | test_suite->set_should_run(false); |
| 6110 | |
| 6111 | for (size_t j = 0; j < test_suite->test_info_list().size(); j++) { |
| 6112 | TestInfo* const test_info = test_suite->test_info_list()[j]; |
| 6113 | const std::string test_name(test_info->name()); |
| 6114 | // A test is disabled if test suite name or test name matches |
| 6115 | // kDisableTestFilter. |
| 6116 | const bool is_disabled = |
| 6117 | disable_test_filter.MatchesName(name: test_suite_name) || |
| 6118 | disable_test_filter.MatchesName(name: test_name); |
| 6119 | test_info->is_disabled_ = is_disabled; |
| 6120 | |
| 6121 | const bool matches_filter = |
| 6122 | gtest_flag_filter.MatchesTest(test_suite_name, test_name); |
| 6123 | test_info->matches_filter_ = matches_filter; |
| 6124 | |
| 6125 | const bool is_runnable = |
| 6126 | (GTEST_FLAG_GET(also_run_disabled_tests) || !is_disabled) && |
| 6127 | matches_filter; |
| 6128 | |
| 6129 | const bool is_in_another_shard = |
| 6130 | shard_tests != IGNORE_SHARDING_PROTOCOL && |
| 6131 | !ShouldRunTestOnShard(total_shards, shard_index, test_id: num_runnable_tests); |
| 6132 | test_info->is_in_another_shard_ = is_in_another_shard; |
| 6133 | const bool is_selected = is_runnable && !is_in_another_shard; |
| 6134 | |
| 6135 | num_runnable_tests += is_runnable; |
| 6136 | num_selected_tests += is_selected; |
| 6137 | |
| 6138 | test_info->should_run_ = is_selected; |
| 6139 | test_suite->set_should_run(test_suite->should_run() || is_selected); |
| 6140 | } |
| 6141 | } |
| 6142 | return num_selected_tests; |
| 6143 | } |
| 6144 | |
| 6145 | // Prints the given C-string on a single line by replacing all '\n' |
| 6146 | // characters with string "\\n". If the output takes more than |
| 6147 | // max_length characters, only prints the first max_length characters |
| 6148 | // and "...". |
| 6149 | static void PrintOnOneLine(const char* str, int max_length) { |
| 6150 | if (str != nullptr) { |
| 6151 | for (int i = 0; *str != '\0'; ++str) { |
| 6152 | if (i >= max_length) { |
| 6153 | printf(format: "..." ); |
| 6154 | break; |
| 6155 | } |
| 6156 | if (*str == '\n') { |
| 6157 | printf(format: "\\n" ); |
| 6158 | i += 2; |
| 6159 | } else { |
| 6160 | printf(format: "%c" , *str); |
| 6161 | ++i; |
| 6162 | } |
| 6163 | } |
| 6164 | } |
| 6165 | } |
| 6166 | |
| 6167 | // Prints the names of the tests matching the user-specified filter flag. |
| 6168 | void UnitTestImpl::ListTestsMatchingFilter() { |
| 6169 | // Print at most this many characters for each type/value parameter. |
| 6170 | const int kMaxParamLength = 250; |
| 6171 | |
| 6172 | for (auto* test_suite : test_suites_) { |
| 6173 | bool printed_test_suite_name = false; |
| 6174 | |
| 6175 | for (size_t j = 0; j < test_suite->test_info_list().size(); j++) { |
| 6176 | const TestInfo* const test_info = test_suite->test_info_list()[j]; |
| 6177 | if (test_info->matches_filter_) { |
| 6178 | if (!printed_test_suite_name) { |
| 6179 | printed_test_suite_name = true; |
| 6180 | printf(format: "%s." , test_suite->name()); |
| 6181 | if (test_suite->type_param() != nullptr) { |
| 6182 | printf(format: " # %s = " , kTypeParamLabel); |
| 6183 | // We print the type parameter on a single line to make |
| 6184 | // the output easy to parse by a program. |
| 6185 | PrintOnOneLine(str: test_suite->type_param(), max_length: kMaxParamLength); |
| 6186 | } |
| 6187 | printf(format: "\n" ); |
| 6188 | } |
| 6189 | printf(format: " %s" , test_info->name()); |
| 6190 | if (test_info->value_param() != nullptr) { |
| 6191 | printf(format: " # %s = " , kValueParamLabel); |
| 6192 | // We print the value parameter on a single line to make the |
| 6193 | // output easy to parse by a program. |
| 6194 | PrintOnOneLine(str: test_info->value_param(), max_length: kMaxParamLength); |
| 6195 | } |
| 6196 | printf(format: "\n" ); |
| 6197 | } |
| 6198 | } |
| 6199 | } |
| 6200 | fflush(stdout); |
| 6201 | #if GTEST_HAS_FILE_SYSTEM |
| 6202 | const std::string& output_format = UnitTestOptions::GetOutputFormat(); |
| 6203 | if (output_format == "xml" || output_format == "json" ) { |
| 6204 | FILE* fileout = OpenFileForWriting( |
| 6205 | output_file: UnitTestOptions::GetAbsolutePathToOutputFile().c_str()); |
| 6206 | std::stringstream stream; |
| 6207 | if (output_format == "xml" ) { |
| 6208 | XmlUnitTestResultPrinter( |
| 6209 | UnitTestOptions::GetAbsolutePathToOutputFile().c_str()) |
| 6210 | .PrintXmlTestsList(stream: &stream, test_suites: test_suites_); |
| 6211 | } else if (output_format == "json" ) { |
| 6212 | JsonUnitTestResultPrinter( |
| 6213 | UnitTestOptions::GetAbsolutePathToOutputFile().c_str()) |
| 6214 | .PrintJsonTestList(stream: &stream, test_suites: test_suites_); |
| 6215 | } |
| 6216 | fprintf(stream: fileout, format: "%s" , StringStreamToString(ss: &stream).c_str()); |
| 6217 | fclose(stream: fileout); |
| 6218 | } |
| 6219 | #endif // GTEST_HAS_FILE_SYSTEM |
| 6220 | } |
| 6221 | |
| 6222 | // Sets the OS stack trace getter. |
| 6223 | // |
| 6224 | // Does nothing if the input and the current OS stack trace getter are |
| 6225 | // the same; otherwise, deletes the old getter and makes the input the |
| 6226 | // current getter. |
| 6227 | void UnitTestImpl::set_os_stack_trace_getter( |
| 6228 | OsStackTraceGetterInterface* getter) { |
| 6229 | if (os_stack_trace_getter_ != getter) { |
| 6230 | delete os_stack_trace_getter_; |
| 6231 | os_stack_trace_getter_ = getter; |
| 6232 | } |
| 6233 | } |
| 6234 | |
| 6235 | // Returns the current OS stack trace getter if it is not NULL; |
| 6236 | // otherwise, creates an OsStackTraceGetter, makes it the current |
| 6237 | // getter, and returns it. |
| 6238 | OsStackTraceGetterInterface* UnitTestImpl::os_stack_trace_getter() { |
| 6239 | if (os_stack_trace_getter_ == nullptr) { |
| 6240 | #ifdef GTEST_OS_STACK_TRACE_GETTER_ |
| 6241 | os_stack_trace_getter_ = new GTEST_OS_STACK_TRACE_GETTER_; |
| 6242 | #else |
| 6243 | os_stack_trace_getter_ = new OsStackTraceGetter; |
| 6244 | #endif // GTEST_OS_STACK_TRACE_GETTER_ |
| 6245 | } |
| 6246 | |
| 6247 | return os_stack_trace_getter_; |
| 6248 | } |
| 6249 | |
| 6250 | // Returns the most specific TestResult currently running. |
| 6251 | TestResult* UnitTestImpl::current_test_result() { |
| 6252 | if (current_test_info_ != nullptr) { |
| 6253 | return ¤t_test_info_->result_; |
| 6254 | } |
| 6255 | if (current_test_suite_ != nullptr) { |
| 6256 | return ¤t_test_suite_->ad_hoc_test_result_; |
| 6257 | } |
| 6258 | return &ad_hoc_test_result_; |
| 6259 | } |
| 6260 | |
| 6261 | // Shuffles all test suites, and the tests within each test suite, |
| 6262 | // making sure that death tests are still run first. |
| 6263 | void UnitTestImpl::ShuffleTests() { |
| 6264 | // Shuffles the death test suites. |
| 6265 | ShuffleRange(random: random(), begin: 0, end: last_death_test_suite_ + 1, v: &test_suite_indices_); |
| 6266 | |
| 6267 | // Shuffles the non-death test suites. |
| 6268 | ShuffleRange(random: random(), begin: last_death_test_suite_ + 1, |
| 6269 | end: static_cast<int>(test_suites_.size()), v: &test_suite_indices_); |
| 6270 | |
| 6271 | // Shuffles the tests inside each test suite. |
| 6272 | for (auto& test_suite : test_suites_) { |
| 6273 | test_suite->ShuffleTests(random: random()); |
| 6274 | } |
| 6275 | } |
| 6276 | |
| 6277 | // Restores the test suites and tests to their order before the first shuffle. |
| 6278 | void UnitTestImpl::UnshuffleTests() { |
| 6279 | for (size_t i = 0; i < test_suites_.size(); i++) { |
| 6280 | // Unshuffles the tests in each test suite. |
| 6281 | test_suites_[i]->UnshuffleTests(); |
| 6282 | // Resets the index of each test suite. |
| 6283 | test_suite_indices_[i] = static_cast<int>(i); |
| 6284 | } |
| 6285 | } |
| 6286 | |
| 6287 | // Returns the current OS stack trace as an std::string. |
| 6288 | // |
| 6289 | // The maximum number of stack frames to be included is specified by |
| 6290 | // the gtest_stack_trace_depth flag. The skip_count parameter |
| 6291 | // specifies the number of top frames to be skipped, which doesn't |
| 6292 | // count against the number of frames to be included. |
| 6293 | // |
| 6294 | // For example, if Foo() calls Bar(), which in turn calls |
| 6295 | // GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in |
| 6296 | // the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't. |
| 6297 | GTEST_NO_INLINE_ GTEST_NO_TAIL_CALL_ std::string |
| 6298 | GetCurrentOsStackTraceExceptTop(int skip_count) { |
| 6299 | // We pass skip_count + 1 to skip this wrapper function in addition |
| 6300 | // to what the user really wants to skip. |
| 6301 | return GetUnitTestImpl()->CurrentOsStackTraceExceptTop(skip_count: skip_count + 1); |
| 6302 | } |
| 6303 | |
| 6304 | // Used by the GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_ macro to |
| 6305 | // suppress unreachable code warnings. |
| 6306 | namespace { |
| 6307 | class ClassUniqueToAlwaysTrue {}; |
| 6308 | } // namespace |
| 6309 | |
| 6310 | bool IsTrue(bool condition) { return condition; } |
| 6311 | |
| 6312 | bool AlwaysTrue() { |
| 6313 | #if GTEST_HAS_EXCEPTIONS |
| 6314 | // This condition is always false so AlwaysTrue() never actually throws, |
| 6315 | // but it makes the compiler think that it may throw. |
| 6316 | if (IsTrue(false)) throw ClassUniqueToAlwaysTrue(); |
| 6317 | #endif // GTEST_HAS_EXCEPTIONS |
| 6318 | return true; |
| 6319 | } |
| 6320 | |
| 6321 | // If *pstr starts with the given prefix, modifies *pstr to be right |
| 6322 | // past the prefix and returns true; otherwise leaves *pstr unchanged |
| 6323 | // and returns false. None of pstr, *pstr, and prefix can be NULL. |
| 6324 | bool SkipPrefix(const char* prefix, const char** pstr) { |
| 6325 | const size_t prefix_len = strlen(s: prefix); |
| 6326 | if (strncmp(s1: *pstr, s2: prefix, n: prefix_len) == 0) { |
| 6327 | *pstr += prefix_len; |
| 6328 | return true; |
| 6329 | } |
| 6330 | return false; |
| 6331 | } |
| 6332 | |
| 6333 | // Parses a string as a command line flag. The string should have |
| 6334 | // the format "--flag=value". When def_optional is true, the "=value" |
| 6335 | // part can be omitted. |
| 6336 | // |
| 6337 | // Returns the value of the flag, or NULL if the parsing failed. |
| 6338 | static const char* ParseFlagValue(const char* str, const char* flag_name, |
| 6339 | bool def_optional) { |
| 6340 | // str and flag must not be NULL. |
| 6341 | if (str == nullptr || flag_name == nullptr) return nullptr; |
| 6342 | |
| 6343 | // The flag must start with "--" followed by GTEST_FLAG_PREFIX_. |
| 6344 | const std::string flag_str = |
| 6345 | std::string("--" ) + GTEST_FLAG_PREFIX_ + flag_name; |
| 6346 | const size_t flag_len = flag_str.length(); |
| 6347 | if (strncmp(s1: str, s2: flag_str.c_str(), n: flag_len) != 0) return nullptr; |
| 6348 | |
| 6349 | // Skips the flag name. |
| 6350 | const char* flag_end = str + flag_len; |
| 6351 | |
| 6352 | // When def_optional is true, it's OK to not have a "=value" part. |
| 6353 | if (def_optional && (flag_end[0] == '\0')) { |
| 6354 | return flag_end; |
| 6355 | } |
| 6356 | |
| 6357 | // If def_optional is true and there are more characters after the |
| 6358 | // flag name, or if def_optional is false, there must be a '=' after |
| 6359 | // the flag name. |
| 6360 | if (flag_end[0] != '=') return nullptr; |
| 6361 | |
| 6362 | // Returns the string after "=". |
| 6363 | return flag_end + 1; |
| 6364 | } |
| 6365 | |
| 6366 | // Parses a string for a bool flag, in the form of either |
| 6367 | // "--flag=value" or "--flag". |
| 6368 | // |
| 6369 | // In the former case, the value is taken as true as long as it does |
| 6370 | // not start with '0', 'f', or 'F'. |
| 6371 | // |
| 6372 | // In the latter case, the value is taken as true. |
| 6373 | // |
| 6374 | // On success, stores the value of the flag in *value, and returns |
| 6375 | // true. On failure, returns false without changing *value. |
| 6376 | static bool ParseFlag(const char* str, const char* flag_name, bool* value) { |
| 6377 | // Gets the value of the flag as a string. |
| 6378 | const char* const value_str = ParseFlagValue(str, flag_name, def_optional: true); |
| 6379 | |
| 6380 | // Aborts if the parsing failed. |
| 6381 | if (value_str == nullptr) return false; |
| 6382 | |
| 6383 | // Converts the string value to a bool. |
| 6384 | *value = !(*value_str == '0' || *value_str == 'f' || *value_str == 'F'); |
| 6385 | return true; |
| 6386 | } |
| 6387 | |
| 6388 | // Parses a string for an int32_t flag, in the form of "--flag=value". |
| 6389 | // |
| 6390 | // On success, stores the value of the flag in *value, and returns |
| 6391 | // true. On failure, returns false without changing *value. |
| 6392 | bool ParseFlag(const char* str, const char* flag_name, int32_t* value) { |
| 6393 | // Gets the value of the flag as a string. |
| 6394 | const char* const value_str = ParseFlagValue(str, flag_name, def_optional: false); |
| 6395 | |
| 6396 | // Aborts if the parsing failed. |
| 6397 | if (value_str == nullptr) return false; |
| 6398 | |
| 6399 | // Sets *value to the value of the flag. |
| 6400 | return ParseInt32(src_text: Message() << "The value of flag --" << flag_name, str: value_str, |
| 6401 | value); |
| 6402 | } |
| 6403 | |
| 6404 | // Parses a string for a string flag, in the form of "--flag=value". |
| 6405 | // |
| 6406 | // On success, stores the value of the flag in *value, and returns |
| 6407 | // true. On failure, returns false without changing *value. |
| 6408 | template <typename String> |
| 6409 | static bool ParseFlag(const char* str, const char* flag_name, String* value) { |
| 6410 | // Gets the value of the flag as a string. |
| 6411 | const char* const value_str = ParseFlagValue(str, flag_name, def_optional: false); |
| 6412 | |
| 6413 | // Aborts if the parsing failed. |
| 6414 | if (value_str == nullptr) return false; |
| 6415 | |
| 6416 | // Sets *value to the value of the flag. |
| 6417 | *value = value_str; |
| 6418 | return true; |
| 6419 | } |
| 6420 | |
| 6421 | // Determines whether a string has a prefix that Google Test uses for its |
| 6422 | // flags, i.e., starts with GTEST_FLAG_PREFIX_ or GTEST_FLAG_PREFIX_DASH_. |
| 6423 | // If Google Test detects that a command line flag has its prefix but is not |
| 6424 | // recognized, it will print its help message. Flags starting with |
| 6425 | // GTEST_INTERNAL_PREFIX_ followed by "internal_" are considered Google Test |
| 6426 | // internal flags and do not trigger the help message. |
| 6427 | static bool HasGoogleTestFlagPrefix(const char* str) { |
| 6428 | return (SkipPrefix(prefix: "--" , pstr: &str) || SkipPrefix(prefix: "-" , pstr: &str) || |
| 6429 | SkipPrefix(prefix: "/" , pstr: &str)) && |
| 6430 | !SkipPrefix(GTEST_FLAG_PREFIX_ "internal_" , pstr: &str) && |
| 6431 | (SkipPrefix(GTEST_FLAG_PREFIX_, pstr: &str) || |
| 6432 | SkipPrefix(GTEST_FLAG_PREFIX_DASH_, pstr: &str)); |
| 6433 | } |
| 6434 | |
| 6435 | // Prints a string containing code-encoded text. The following escape |
| 6436 | // sequences can be used in the string to control the text color: |
| 6437 | // |
| 6438 | // @@ prints a single '@' character. |
| 6439 | // @R changes the color to red. |
| 6440 | // @G changes the color to green. |
| 6441 | // @Y changes the color to yellow. |
| 6442 | // @D changes to the default terminal text color. |
| 6443 | // |
| 6444 | static void PrintColorEncoded(const char* str) { |
| 6445 | GTestColor color = GTestColor::kDefault; // The current color. |
| 6446 | |
| 6447 | // Conceptually, we split the string into segments divided by escape |
| 6448 | // sequences. Then we print one segment at a time. At the end of |
| 6449 | // each iteration, the str pointer advances to the beginning of the |
| 6450 | // next segment. |
| 6451 | for (;;) { |
| 6452 | const char* p = strchr(s: str, c: '@'); |
| 6453 | if (p == nullptr) { |
| 6454 | ColoredPrintf(color, fmt: "%s" , str); |
| 6455 | return; |
| 6456 | } |
| 6457 | |
| 6458 | ColoredPrintf(color, fmt: "%s" , std::string(str, p).c_str()); |
| 6459 | |
| 6460 | const char ch = p[1]; |
| 6461 | str = p + 2; |
| 6462 | if (ch == '@') { |
| 6463 | ColoredPrintf(color, fmt: "@" ); |
| 6464 | } else if (ch == 'D') { |
| 6465 | color = GTestColor::kDefault; |
| 6466 | } else if (ch == 'R') { |
| 6467 | color = GTestColor::kRed; |
| 6468 | } else if (ch == 'G') { |
| 6469 | color = GTestColor::kGreen; |
| 6470 | } else if (ch == 'Y') { |
| 6471 | color = GTestColor::kYellow; |
| 6472 | } else { |
| 6473 | --str; |
| 6474 | } |
| 6475 | } |
| 6476 | } |
| 6477 | |
| 6478 | static const char kColorEncodedHelpMessage[] = |
| 6479 | "This program contains tests written using " GTEST_NAME_ |
| 6480 | ". You can use the\n" |
| 6481 | "following command line flags to control its behavior:\n" |
| 6482 | "\n" |
| 6483 | "Test Selection:\n" |
| 6484 | " @G--" GTEST_FLAG_PREFIX_ |
| 6485 | "list_tests@D\n" |
| 6486 | " List the names of all tests instead of running them. The name of\n" |
| 6487 | " TEST(Foo, Bar) is \"Foo.Bar\".\n" |
| 6488 | " @G--" GTEST_FLAG_PREFIX_ |
| 6489 | "filter=@YPOSITIVE_PATTERNS" |
| 6490 | "[@G-@YNEGATIVE_PATTERNS]@D\n" |
| 6491 | " Run only the tests whose name matches one of the positive patterns " |
| 6492 | "but\n" |
| 6493 | " none of the negative patterns. '?' matches any single character; " |
| 6494 | "'*'\n" |
| 6495 | " matches any substring; ':' separates two patterns.\n" |
| 6496 | " @G--" GTEST_FLAG_PREFIX_ |
| 6497 | "also_run_disabled_tests@D\n" |
| 6498 | " Run all disabled tests too.\n" |
| 6499 | "\n" |
| 6500 | "Test Execution:\n" |
| 6501 | " @G--" GTEST_FLAG_PREFIX_ |
| 6502 | "repeat=@Y[COUNT]@D\n" |
| 6503 | " Run the tests repeatedly; use a negative count to repeat forever.\n" |
| 6504 | " @G--" GTEST_FLAG_PREFIX_ |
| 6505 | "shuffle@D\n" |
| 6506 | " Randomize tests' orders on every iteration.\n" |
| 6507 | " @G--" GTEST_FLAG_PREFIX_ |
| 6508 | "random_seed=@Y[NUMBER]@D\n" |
| 6509 | " Random number seed to use for shuffling test orders (between 1 and\n" |
| 6510 | " 99999, or 0 to use a seed based on the current time).\n" |
| 6511 | " @G--" GTEST_FLAG_PREFIX_ |
| 6512 | "recreate_environments_when_repeating@D\n" |
| 6513 | " Sets up and tears down the global test environment on each repeat\n" |
| 6514 | " of the test.\n" |
| 6515 | "\n" |
| 6516 | "Test Output:\n" |
| 6517 | " @G--" GTEST_FLAG_PREFIX_ |
| 6518 | "color=@Y(@Gyes@Y|@Gno@Y|@Gauto@Y)@D\n" |
| 6519 | " Enable/disable colored output. The default is @Gauto@D.\n" |
| 6520 | " @G--" GTEST_FLAG_PREFIX_ |
| 6521 | "brief=1@D\n" |
| 6522 | " Only print test failures.\n" |
| 6523 | " @G--" GTEST_FLAG_PREFIX_ |
| 6524 | "print_time=0@D\n" |
| 6525 | " Don't print the elapsed time of each test.\n" |
| 6526 | " @G--" GTEST_FLAG_PREFIX_ |
| 6527 | "output=@Y(@Gjson@Y|@Gxml@Y)[@G:@YDIRECTORY_PATH@G" GTEST_PATH_SEP_ |
| 6528 | "@Y|@G:@YFILE_PATH]@D\n" |
| 6529 | " Generate a JSON or XML report in the given directory or with the " |
| 6530 | "given\n" |
| 6531 | " file name. @YFILE_PATH@D defaults to @Gtest_detail.xml@D.\n" |
| 6532 | #if GTEST_CAN_STREAM_RESULTS_ |
| 6533 | " @G--" GTEST_FLAG_PREFIX_ |
| 6534 | "stream_result_to=@YHOST@G:@YPORT@D\n" |
| 6535 | " Stream test results to the given server.\n" |
| 6536 | #endif // GTEST_CAN_STREAM_RESULTS_ |
| 6537 | "\n" |
| 6538 | "Assertion Behavior:\n" |
| 6539 | #if defined(GTEST_HAS_DEATH_TEST) && !defined(GTEST_OS_WINDOWS) |
| 6540 | " @G--" GTEST_FLAG_PREFIX_ |
| 6541 | "death_test_style=@Y(@Gfast@Y|@Gthreadsafe@Y)@D\n" |
| 6542 | " Set the default death test style.\n" |
| 6543 | #endif // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS |
| 6544 | " @G--" GTEST_FLAG_PREFIX_ |
| 6545 | "break_on_failure@D\n" |
| 6546 | " Turn assertion failures into debugger break-points.\n" |
| 6547 | " @G--" GTEST_FLAG_PREFIX_ |
| 6548 | "throw_on_failure@D\n" |
| 6549 | " Turn assertion failures into C++ exceptions for use by an external\n" |
| 6550 | " test framework.\n" |
| 6551 | " @G--" GTEST_FLAG_PREFIX_ |
| 6552 | "catch_exceptions=0@D\n" |
| 6553 | " Do not report exceptions as test failures. Instead, allow them\n" |
| 6554 | " to crash the program or throw a pop-up (on Windows).\n" |
| 6555 | "\n" |
| 6556 | "Except for @G--" GTEST_FLAG_PREFIX_ |
| 6557 | "list_tests@D, you can alternatively set " |
| 6558 | "the corresponding\n" |
| 6559 | "environment variable of a flag (all letters in upper-case). For example, " |
| 6560 | "to\n" |
| 6561 | "disable colored text output, you can either specify " |
| 6562 | "@G--" GTEST_FLAG_PREFIX_ |
| 6563 | "color=no@D or set\n" |
| 6564 | "the @G" GTEST_FLAG_PREFIX_UPPER_ |
| 6565 | "COLOR@D environment variable to @Gno@D.\n" |
| 6566 | "\n" |
| 6567 | "For more information, please read the " GTEST_NAME_ |
| 6568 | " documentation at\n" |
| 6569 | "@G" GTEST_PROJECT_URL_ "@D. If you find a bug in " GTEST_NAME_ |
| 6570 | "\n" |
| 6571 | "(not one in your own code or tests), please report it to\n" |
| 6572 | "@G<" GTEST_DEV_EMAIL_ ">@D.\n" ; |
| 6573 | |
| 6574 | static bool ParseGoogleTestFlag(const char* const arg) { |
| 6575 | #define GTEST_INTERNAL_PARSE_FLAG(flag_name) \ |
| 6576 | do { \ |
| 6577 | auto value = GTEST_FLAG_GET(flag_name); \ |
| 6578 | if (ParseFlag(arg, #flag_name, &value)) { \ |
| 6579 | GTEST_FLAG_SET(flag_name, value); \ |
| 6580 | return true; \ |
| 6581 | } \ |
| 6582 | } while (false) |
| 6583 | |
| 6584 | GTEST_INTERNAL_PARSE_FLAG(also_run_disabled_tests); |
| 6585 | GTEST_INTERNAL_PARSE_FLAG(break_on_failure); |
| 6586 | GTEST_INTERNAL_PARSE_FLAG(catch_exceptions); |
| 6587 | GTEST_INTERNAL_PARSE_FLAG(color); |
| 6588 | GTEST_INTERNAL_PARSE_FLAG(death_test_style); |
| 6589 | GTEST_INTERNAL_PARSE_FLAG(death_test_use_fork); |
| 6590 | GTEST_INTERNAL_PARSE_FLAG(fail_fast); |
| 6591 | GTEST_INTERNAL_PARSE_FLAG(filter); |
| 6592 | GTEST_INTERNAL_PARSE_FLAG(internal_run_death_test); |
| 6593 | GTEST_INTERNAL_PARSE_FLAG(list_tests); |
| 6594 | GTEST_INTERNAL_PARSE_FLAG(output); |
| 6595 | GTEST_INTERNAL_PARSE_FLAG(brief); |
| 6596 | GTEST_INTERNAL_PARSE_FLAG(print_time); |
| 6597 | GTEST_INTERNAL_PARSE_FLAG(print_utf8); |
| 6598 | GTEST_INTERNAL_PARSE_FLAG(random_seed); |
| 6599 | GTEST_INTERNAL_PARSE_FLAG(repeat); |
| 6600 | GTEST_INTERNAL_PARSE_FLAG(recreate_environments_when_repeating); |
| 6601 | GTEST_INTERNAL_PARSE_FLAG(shuffle); |
| 6602 | GTEST_INTERNAL_PARSE_FLAG(stack_trace_depth); |
| 6603 | GTEST_INTERNAL_PARSE_FLAG(stream_result_to); |
| 6604 | GTEST_INTERNAL_PARSE_FLAG(throw_on_failure); |
| 6605 | return false; |
| 6606 | } |
| 6607 | |
| 6608 | #if GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM |
| 6609 | static void LoadFlagsFromFile(const std::string& path) { |
| 6610 | FILE* flagfile = posix::FOpen(path: path.c_str(), mode: "r" ); |
| 6611 | if (!flagfile) { |
| 6612 | GTEST_LOG_(FATAL) << "Unable to open file \"" << GTEST_FLAG_GET(flagfile) |
| 6613 | << "\"" ; |
| 6614 | } |
| 6615 | std::string contents(ReadEntireFile(file: flagfile)); |
| 6616 | posix::FClose(fp: flagfile); |
| 6617 | std::vector<std::string> lines; |
| 6618 | SplitString(str: contents, delimiter: '\n', dest: &lines); |
| 6619 | for (size_t i = 0; i < lines.size(); ++i) { |
| 6620 | if (lines[i].empty()) continue; |
| 6621 | if (!ParseGoogleTestFlag(arg: lines[i].c_str())) g_help_flag = true; |
| 6622 | } |
| 6623 | } |
| 6624 | #endif // GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM |
| 6625 | |
| 6626 | // Parses the command line for Google Test flags, without initializing |
| 6627 | // other parts of Google Test. The type parameter CharType can be |
| 6628 | // instantiated to either char or wchar_t. |
| 6629 | template <typename CharType> |
| 6630 | void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) { |
| 6631 | std::string flagfile_value; |
| 6632 | for (int i = 1; i < *argc; i++) { |
| 6633 | const std::string arg_string = StreamableToString(argv[i]); |
| 6634 | const char* const arg = arg_string.c_str(); |
| 6635 | |
| 6636 | using internal::ParseFlag; |
| 6637 | |
| 6638 | bool remove_flag = false; |
| 6639 | if (ParseGoogleTestFlag(arg)) { |
| 6640 | remove_flag = true; |
| 6641 | #if GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM |
| 6642 | } else if (ParseFlag(arg, "flagfile" , &flagfile_value)) { |
| 6643 | GTEST_FLAG_SET(flagfile, flagfile_value); |
| 6644 | LoadFlagsFromFile(path: flagfile_value); |
| 6645 | remove_flag = true; |
| 6646 | #endif // GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM |
| 6647 | } else if (arg_string == "--help" || HasGoogleTestFlagPrefix(str: arg)) { |
| 6648 | // Both help flag and unrecognized Google Test flags (excluding |
| 6649 | // internal ones) trigger help display. |
| 6650 | g_help_flag = true; |
| 6651 | } |
| 6652 | |
| 6653 | if (remove_flag) { |
| 6654 | // Shift the remainder of the argv list left by one. Note |
| 6655 | // that argv has (*argc + 1) elements, the last one always being |
| 6656 | // NULL. The following loop moves the trailing NULL element as |
| 6657 | // well. |
| 6658 | for (int j = i; j != *argc; j++) { |
| 6659 | argv[j] = argv[j + 1]; |
| 6660 | } |
| 6661 | |
| 6662 | // Decrements the argument count. |
| 6663 | (*argc)--; |
| 6664 | |
| 6665 | // We also need to decrement the iterator as we just removed |
| 6666 | // an element. |
| 6667 | i--; |
| 6668 | } |
| 6669 | } |
| 6670 | |
| 6671 | if (g_help_flag) { |
| 6672 | // We print the help here instead of in RUN_ALL_TESTS(), as the |
| 6673 | // latter may not be called at all if the user is using Google |
| 6674 | // Test with another testing framework. |
| 6675 | PrintColorEncoded(str: kColorEncodedHelpMessage); |
| 6676 | } |
| 6677 | } |
| 6678 | |
| 6679 | // Parses the command line for Google Test flags, without initializing |
| 6680 | // other parts of Google Test. This function updates argc and argv by removing |
| 6681 | // flags that are known to GoogleTest (including other user flags defined using |
| 6682 | // ABSL_FLAG if GoogleTest is built with GTEST_USE_ABSL). Other arguments |
| 6683 | // remain in place. Unrecognized flags are not reported and do not cause the |
| 6684 | // program to exit. |
| 6685 | void ParseGoogleTestFlagsOnly(int* argc, char** argv) { |
| 6686 | #ifdef GTEST_HAS_ABSL |
| 6687 | if (*argc <= 0) return; |
| 6688 | |
| 6689 | std::vector<char*> positional_args; |
| 6690 | std::vector<absl::UnrecognizedFlag> unrecognized_flags; |
| 6691 | absl::ParseAbseilFlagsOnly(*argc, argv, positional_args, unrecognized_flags); |
| 6692 | absl::flat_hash_set<absl::string_view> unrecognized; |
| 6693 | for (const auto& flag : unrecognized_flags) { |
| 6694 | unrecognized.insert(flag.flag_name); |
| 6695 | } |
| 6696 | absl::flat_hash_set<char*> positional; |
| 6697 | for (const auto& arg : positional_args) { |
| 6698 | positional.insert(arg); |
| 6699 | } |
| 6700 | |
| 6701 | int out_pos = 1; |
| 6702 | int in_pos = 1; |
| 6703 | for (; in_pos < *argc; ++in_pos) { |
| 6704 | char* arg = argv[in_pos]; |
| 6705 | absl::string_view arg_str(arg); |
| 6706 | if (absl::ConsumePrefix(&arg_str, "--" )) { |
| 6707 | // Flag-like argument. If the flag was unrecognized, keep it. |
| 6708 | // If it was a GoogleTest flag, remove it. |
| 6709 | if (unrecognized.contains(arg_str)) { |
| 6710 | argv[out_pos++] = argv[in_pos]; |
| 6711 | continue; |
| 6712 | } |
| 6713 | } |
| 6714 | |
| 6715 | if (arg_str.empty()) { |
| 6716 | ++in_pos; |
| 6717 | break; // '--' indicates that the rest of the arguments are positional |
| 6718 | } |
| 6719 | |
| 6720 | // Probably a positional argument. If it is in fact positional, keep it. |
| 6721 | // If it was a value for the flag argument, remove it. |
| 6722 | if (positional.contains(arg)) { |
| 6723 | argv[out_pos++] = arg; |
| 6724 | } |
| 6725 | } |
| 6726 | |
| 6727 | // The rest are positional args for sure. |
| 6728 | while (in_pos < *argc) { |
| 6729 | argv[out_pos++] = argv[in_pos++]; |
| 6730 | } |
| 6731 | |
| 6732 | *argc = out_pos; |
| 6733 | argv[out_pos] = nullptr; |
| 6734 | #else |
| 6735 | ParseGoogleTestFlagsOnlyImpl(argc, argv); |
| 6736 | #endif |
| 6737 | |
| 6738 | // Fix the value of *_NSGetArgc() on macOS, but if and only if |
| 6739 | // *_NSGetArgv() == argv |
| 6740 | // Only applicable to char** version of argv |
| 6741 | #ifdef GTEST_OS_MAC |
| 6742 | #ifndef GTEST_OS_IOS |
| 6743 | if (*_NSGetArgv() == argv) { |
| 6744 | *_NSGetArgc() = *argc; |
| 6745 | } |
| 6746 | #endif |
| 6747 | #endif |
| 6748 | } |
| 6749 | void ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv) { |
| 6750 | ParseGoogleTestFlagsOnlyImpl(argc, argv); |
| 6751 | } |
| 6752 | |
| 6753 | // The internal implementation of InitGoogleTest(). |
| 6754 | // |
| 6755 | // The type parameter CharType can be instantiated to either char or |
| 6756 | // wchar_t. |
| 6757 | template <typename CharType> |
| 6758 | void InitGoogleTestImpl(int* argc, CharType** argv) { |
| 6759 | // We don't want to run the initialization code twice. |
| 6760 | if (GTestIsInitialized()) return; |
| 6761 | |
| 6762 | if (*argc <= 0) return; |
| 6763 | |
| 6764 | g_argvs.clear(); |
| 6765 | for (int i = 0; i != *argc; i++) { |
| 6766 | g_argvs.push_back(StreamableToString(argv[i])); |
| 6767 | } |
| 6768 | |
| 6769 | #ifdef GTEST_HAS_ABSL |
| 6770 | absl::InitializeSymbolizer(g_argvs[0].c_str()); |
| 6771 | |
| 6772 | // When using the Abseil Flags library, set the program usage message to the |
| 6773 | // help message, but remove the color-encoding from the message first. |
| 6774 | absl::SetProgramUsageMessage(absl::StrReplaceAll( |
| 6775 | kColorEncodedHelpMessage, |
| 6776 | {{"@D" , "" }, {"@R" , "" }, {"@G" , "" }, {"@Y" , "" }, {"@@" , "@" }})); |
| 6777 | #endif // GTEST_HAS_ABSL |
| 6778 | |
| 6779 | ParseGoogleTestFlagsOnly(argc, argv); |
| 6780 | GetUnitTestImpl()->PostFlagParsingInit(); |
| 6781 | } |
| 6782 | |
| 6783 | } // namespace internal |
| 6784 | |
| 6785 | // Initializes Google Test. This must be called before calling |
| 6786 | // RUN_ALL_TESTS(). In particular, it parses a command line for the |
| 6787 | // flags that Google Test recognizes. Whenever a Google Test flag is |
| 6788 | // seen, it is removed from argv, and *argc is decremented. |
| 6789 | // |
| 6790 | // No value is returned. Instead, the Google Test flag variables are |
| 6791 | // updated. |
| 6792 | // |
| 6793 | // Calling the function for the second time has no user-visible effect. |
| 6794 | void InitGoogleTest(int* argc, char** argv) { |
| 6795 | #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6796 | GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv); |
| 6797 | #else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6798 | internal::InitGoogleTestImpl(argc, argv); |
| 6799 | #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6800 | } |
| 6801 | |
| 6802 | // This overloaded version can be used in Windows programs compiled in |
| 6803 | // UNICODE mode. |
| 6804 | void InitGoogleTest(int* argc, wchar_t** argv) { |
| 6805 | #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6806 | GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv); |
| 6807 | #else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6808 | internal::InitGoogleTestImpl(argc, argv); |
| 6809 | #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6810 | } |
| 6811 | |
| 6812 | // This overloaded version can be used on Arduino/embedded platforms where |
| 6813 | // there is no argc/argv. |
| 6814 | void InitGoogleTest() { |
| 6815 | // Since Arduino doesn't have a command line, fake out the argc/argv arguments |
| 6816 | int argc = 1; |
| 6817 | const auto arg0 = "dummy" ; |
| 6818 | char* argv0 = const_cast<char*>(arg0); |
| 6819 | char** argv = &argv0; |
| 6820 | |
| 6821 | #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6822 | GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(&argc, argv); |
| 6823 | #else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6824 | internal::InitGoogleTestImpl(argc: &argc, argv); |
| 6825 | #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) |
| 6826 | } |
| 6827 | |
| 6828 | #if !defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_) || \ |
| 6829 | !defined(GTEST_CUSTOM_SRCDIR_FUNCTION_) |
| 6830 | // Returns the value of the first environment variable that is set and contains |
| 6831 | // a non-empty string. If there are none, returns the "fallback" string. Adds |
| 6832 | // the director-separator character as a suffix if not provided in the |
| 6833 | // environment variable value. |
| 6834 | static std::string GetDirFromEnv( |
| 6835 | std::initializer_list<const char*> environment_variables, |
| 6836 | const char* fallback, char separator) { |
| 6837 | for (const char* variable_name : environment_variables) { |
| 6838 | const char* value = internal::posix::GetEnv(name: variable_name); |
| 6839 | if (value != nullptr && value[0] != '\0') { |
| 6840 | if (value[strlen(s: value) - 1] != separator) { |
| 6841 | return std::string(value).append(n: 1, c: separator); |
| 6842 | } |
| 6843 | return value; |
| 6844 | } |
| 6845 | } |
| 6846 | return fallback; |
| 6847 | } |
| 6848 | #endif |
| 6849 | |
| 6850 | std::string TempDir() { |
| 6851 | #if defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_) |
| 6852 | return GTEST_CUSTOM_TEMPDIR_FUNCTION_(); |
| 6853 | #elif defined(GTEST_OS_WINDOWS) || defined(GTEST_OS_WINDOWS_MOBILE) |
| 6854 | return GetDirFromEnv({"TEST_TMPDIR" , "TEMP" }, "\\temp\\" , '\\'); |
| 6855 | #elif defined(GTEST_OS_LINUX_ANDROID) |
| 6856 | return GetDirFromEnv({"TEST_TMPDIR" , "TMPDIR" }, "/data/local/tmp/" , '/'); |
| 6857 | #else |
| 6858 | return GetDirFromEnv(environment_variables: {"TEST_TMPDIR" , "TMPDIR" }, fallback: "/tmp/" , separator: '/'); |
| 6859 | #endif |
| 6860 | } |
| 6861 | |
| 6862 | #if GTEST_HAS_FILE_SYSTEM && !defined(GTEST_CUSTOM_SRCDIR_FUNCTION_) |
| 6863 | // Returns the directory path (including terminating separator) of the current |
| 6864 | // executable as derived from argv[0]. |
| 6865 | static std::string GetCurrentExecutableDirectory() { |
| 6866 | internal::FilePath argv_0(internal::GetArgvs()[0]); |
| 6867 | return argv_0.RemoveFileName().string(); |
| 6868 | } |
| 6869 | #endif |
| 6870 | |
| 6871 | #if GTEST_HAS_FILE_SYSTEM |
| 6872 | std::string SrcDir() { |
| 6873 | #if defined(GTEST_CUSTOM_SRCDIR_FUNCTION_) |
| 6874 | return GTEST_CUSTOM_SRCDIR_FUNCTION_(); |
| 6875 | #elif defined(GTEST_OS_WINDOWS) || defined(GTEST_OS_WINDOWS_MOBILE) |
| 6876 | return GetDirFromEnv({"TEST_SRCDIR" }, GetCurrentExecutableDirectory().c_str(), |
| 6877 | '\\'); |
| 6878 | #elif defined(GTEST_OS_LINUX_ANDROID) |
| 6879 | return GetDirFromEnv({"TEST_SRCDIR" }, GetCurrentExecutableDirectory().c_str(), |
| 6880 | '/'); |
| 6881 | #else |
| 6882 | return GetDirFromEnv(environment_variables: {"TEST_SRCDIR" }, fallback: GetCurrentExecutableDirectory().c_str(), |
| 6883 | separator: '/'); |
| 6884 | #endif |
| 6885 | } |
| 6886 | #endif |
| 6887 | |
| 6888 | // Class ScopedTrace |
| 6889 | |
| 6890 | // Pushes the given source file location and message onto a per-thread |
| 6891 | // trace stack maintained by Google Test. |
| 6892 | void ScopedTrace::PushTrace(const char* file, int line, std::string message) { |
| 6893 | internal::TraceInfo trace; |
| 6894 | trace.file = file; |
| 6895 | trace.line = line; |
| 6896 | trace.message.swap(s&: message); |
| 6897 | |
| 6898 | UnitTest::GetInstance()->PushGTestTrace(trace); |
| 6899 | } |
| 6900 | |
| 6901 | // Pops the info pushed by the c'tor. |
| 6902 | ScopedTrace::~ScopedTrace() GTEST_LOCK_EXCLUDED_(&UnitTest::mutex_) { |
| 6903 | UnitTest::GetInstance()->PopGTestTrace(); |
| 6904 | } |
| 6905 | |
| 6906 | } // namespace testing |
| 6907 | |