| 1 | //===-- sanitizer_stack_store_test.cpp --------------------------*- C++ -*-===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | #include "sanitizer_common/sanitizer_stack_store.h" |
| 9 | |
| 10 | #include <algorithm> |
| 11 | #include <numeric> |
| 12 | #include <vector> |
| 13 | |
| 14 | #include "gtest/gtest.h" |
| 15 | #include "sanitizer_atomic.h" |
| 16 | #include "sanitizer_hash.h" |
| 17 | #include "sanitizer_stacktrace.h" |
| 18 | |
| 19 | namespace __sanitizer { |
| 20 | |
| 21 | class StackStoreTest : public testing::Test { |
| 22 | protected: |
| 23 | void SetUp() override {} |
| 24 | void TearDown() override { store_.TestOnlyUnmap(); } |
| 25 | |
| 26 | template <typename Fn> |
| 27 | void ForEachTrace(Fn fn, uptr n = 1000000) { |
| 28 | std::vector<uptr> frames(kStackTraceMax); |
| 29 | std::iota(frames.begin(), frames.end(), 0x100000); |
| 30 | MurMur2HashBuilder h(0); |
| 31 | for (uptr i = 0; i < n; ++i) { |
| 32 | h.add(k: i); |
| 33 | u32 size = h.get() % kStackTraceMax; |
| 34 | h.add(k: i); |
| 35 | uptr tag = h.get() % 256; |
| 36 | StackTrace s(frames.data(), size, tag); |
| 37 | if (!s.size && !s.tag) |
| 38 | continue; |
| 39 | fn(s); |
| 40 | if (HasFailure()) |
| 41 | return; |
| 42 | std::next_permutation(frames.begin(), frames.end()); |
| 43 | }; |
| 44 | } |
| 45 | |
| 46 | using BlockInfo = StackStore::BlockInfo; |
| 47 | |
| 48 | uptr GetTotalFramesCount() const { |
| 49 | return atomic_load_relaxed(a: &store_.total_frames_); |
| 50 | } |
| 51 | |
| 52 | uptr CountReadyToPackBlocks() { |
| 53 | uptr res = 0; |
| 54 | for (BlockInfo& b : store_.blocks_) res += b.Stored(n: 0); |
| 55 | return res; |
| 56 | } |
| 57 | |
| 58 | uptr CountPackedBlocks() const { |
| 59 | uptr res = 0; |
| 60 | for (const BlockInfo& b : store_.blocks_) res += b.IsPacked(); |
| 61 | return res; |
| 62 | } |
| 63 | |
| 64 | uptr IdToOffset(StackStore::Id id) const { return store_.IdToOffset(id); } |
| 65 | |
| 66 | static constexpr uptr kBlockSizeFrames = StackStore::kBlockSizeFrames; |
| 67 | static constexpr uptr kBlockSizeBytes = StackStore::kBlockSizeBytes; |
| 68 | |
| 69 | StackStore store_ = {}; |
| 70 | }; |
| 71 | |
| 72 | TEST_F(StackStoreTest, Empty) { |
| 73 | uptr before = store_.Allocated(); |
| 74 | uptr pack = 0; |
| 75 | EXPECT_EQ(0u, store_.Store({}, &pack)); |
| 76 | uptr after = store_.Allocated(); |
| 77 | EXPECT_EQ(before, after); |
| 78 | } |
| 79 | |
| 80 | TEST_F(StackStoreTest, Basic) { |
| 81 | std::vector<StackStore::Id> ids; |
| 82 | ForEachTrace([&](const StackTrace& s) { |
| 83 | uptr pack = 0; |
| 84 | ids.push_back(store_.Store(s, &pack)); |
| 85 | }); |
| 86 | |
| 87 | auto id = ids.begin(); |
| 88 | ForEachTrace([&](const StackTrace& s) { |
| 89 | StackTrace trace = store_.Load(*(id++)); |
| 90 | EXPECT_EQ(s.size, trace.size); |
| 91 | EXPECT_EQ(s.tag, trace.tag); |
| 92 | EXPECT_EQ(std::vector<uptr>(s.trace, s.trace + s.size), |
| 93 | std::vector<uptr>(trace.trace, trace.trace + trace.size)); |
| 94 | }); |
| 95 | } |
| 96 | |
| 97 | TEST_F(StackStoreTest, Allocated) { |
| 98 | EXPECT_LE(store_.Allocated(), 0x100000u); |
| 99 | std::vector<StackStore::Id> ids; |
| 100 | ForEachTrace([&](const StackTrace& s) { |
| 101 | uptr pack = 0; |
| 102 | ids.push_back(store_.Store(s, &pack)); |
| 103 | }); |
| 104 | EXPECT_NEAR(store_.Allocated(), FIRST_32_SECOND_64(500000000u, 1000000000u), |
| 105 | FIRST_32_SECOND_64(50000000u, 100000000u)); |
| 106 | store_.TestOnlyUnmap(); |
| 107 | EXPECT_LE(store_.Allocated(), 0x100000u); |
| 108 | } |
| 109 | |
| 110 | TEST_F(StackStoreTest, ReadyToPack) { |
| 111 | uptr next_pack = kBlockSizeFrames; |
| 112 | uptr total_ready = 0; |
| 113 | ForEachTrace( |
| 114 | [&](const StackTrace& s) { |
| 115 | uptr pack = 0; |
| 116 | StackStore::Id id = store_.Store(s, &pack); |
| 117 | uptr end_idx = IdToOffset(id) + 1 + s.size; |
| 118 | if (end_idx >= next_pack) { |
| 119 | EXPECT_EQ(1u, pack); |
| 120 | next_pack += kBlockSizeFrames; |
| 121 | } else { |
| 122 | EXPECT_EQ(0u, pack); |
| 123 | } |
| 124 | total_ready += pack; |
| 125 | EXPECT_EQ(CountReadyToPackBlocks(), total_ready); |
| 126 | }, |
| 127 | 100000); |
| 128 | EXPECT_EQ(GetTotalFramesCount() / kBlockSizeFrames, total_ready); |
| 129 | } |
| 130 | |
| 131 | struct StackStorePackTest : public StackStoreTest, |
| 132 | public ::testing::WithParamInterface< |
| 133 | std::pair<StackStore::Compression, uptr>> {}; |
| 134 | |
| 135 | INSTANTIATE_TEST_SUITE_P( |
| 136 | PackUnpacks, StackStorePackTest, |
| 137 | ::testing::ValuesIn({ |
| 138 | StackStorePackTest::ParamType(StackStore::Compression::Delta, |
| 139 | FIRST_32_SECOND_64(2, 6)), |
| 140 | StackStorePackTest::ParamType(StackStore::Compression::LZW, |
| 141 | FIRST_32_SECOND_64(60, 125)), |
| 142 | })); |
| 143 | |
| 144 | TEST_P(StackStorePackTest, PackUnpack) { |
| 145 | std::vector<StackStore::Id> ids; |
| 146 | StackStore::Compression type = GetParam().first; |
| 147 | uptr expected_ratio = GetParam().second; |
| 148 | ForEachTrace([&](const StackTrace& s) { |
| 149 | uptr pack = 0; |
| 150 | ids.push_back(store_.Store(s, &pack)); |
| 151 | if (pack) { |
| 152 | uptr before = store_.Allocated(); |
| 153 | uptr diff = store_.Pack(type); |
| 154 | uptr after = store_.Allocated(); |
| 155 | EXPECT_EQ(before - after, diff); |
| 156 | EXPECT_LT(after, before); |
| 157 | EXPECT_GE(kBlockSizeBytes / (kBlockSizeBytes - (before - after)), |
| 158 | expected_ratio); |
| 159 | } |
| 160 | }); |
| 161 | uptr packed_blocks = CountPackedBlocks(); |
| 162 | // Unpack random block. |
| 163 | store_.Load(kBlockSizeFrames * 7 + 123); |
| 164 | EXPECT_EQ(packed_blocks - 1, CountPackedBlocks()); |
| 165 | |
| 166 | // Unpack all blocks. |
| 167 | auto id = ids.begin(); |
| 168 | ForEachTrace([&](const StackTrace& s) { |
| 169 | StackTrace trace = store_.Load(*(id++)); |
| 170 | EXPECT_EQ(s.size, trace.size); |
| 171 | EXPECT_EQ(s.tag, trace.tag); |
| 172 | EXPECT_EQ(std::vector<uptr>(s.trace, s.trace + s.size), |
| 173 | std::vector<uptr>(trace.trace, trace.trace + trace.size)); |
| 174 | }); |
| 175 | EXPECT_EQ(0u, CountPackedBlocks()); |
| 176 | |
| 177 | EXPECT_EQ(0u, store_.Pack(type)); |
| 178 | EXPECT_EQ(0u, CountPackedBlocks()); |
| 179 | } |
| 180 | |
| 181 | TEST_P(StackStorePackTest, Failed) { |
| 182 | MurMur2Hash64Builder h(0); |
| 183 | StackStore::Compression type = GetParam().first; |
| 184 | std::vector<uptr> frames(200); |
| 185 | for (uptr i = 0; i < kBlockSizeFrames * 4 / frames.size(); ++i) { |
| 186 | for (uptr& f : frames) { |
| 187 | h.add(1); |
| 188 | // Make it difficult to pack. |
| 189 | f = h.get(); |
| 190 | } |
| 191 | uptr pack = 0; |
| 192 | store_.Store(StackTrace(frames.data(), frames.size()), &pack); |
| 193 | if (pack) |
| 194 | EXPECT_EQ(0u, store_.Pack(type)); |
| 195 | } |
| 196 | |
| 197 | EXPECT_EQ(0u, CountPackedBlocks()); |
| 198 | } |
| 199 | |
| 200 | } // namespace __sanitizer |
| 201 | |