1#include "llvm/ProfileData/MemProf.h"
2#include "llvm/ADT/DenseMap.h"
3#include "llvm/ADT/MapVector.h"
4#include "llvm/DebugInfo/DIContext.h"
5#include "llvm/DebugInfo/Symbolize/SymbolizableModule.h"
6#include "llvm/IR/Value.h"
7#include "llvm/Object/ObjectFile.h"
8#include "llvm/ProfileData/MemProfData.inc"
9#include "llvm/ProfileData/MemProfReader.h"
10#include "llvm/Support/raw_ostream.h"
11#include "gmock/gmock.h"
12#include "gtest/gtest.h"
13
14#include <initializer_list>
15
16namespace {
17
18using ::llvm::DIGlobal;
19using ::llvm::DIInliningInfo;
20using ::llvm::DILineInfo;
21using ::llvm::DILineInfoSpecifier;
22using ::llvm::DILocal;
23using ::llvm::StringRef;
24using ::llvm::memprof::CallStackId;
25using ::llvm::memprof::CallStackMap;
26using ::llvm::memprof::Frame;
27using ::llvm::memprof::FrameId;
28using ::llvm::memprof::IndexedAllocationInfo;
29using ::llvm::memprof::IndexedMemProfRecord;
30using ::llvm::memprof::MemInfoBlock;
31using ::llvm::memprof::MemProfReader;
32using ::llvm::memprof::MemProfRecord;
33using ::llvm::memprof::MemProfSchema;
34using ::llvm::memprof::Meta;
35using ::llvm::memprof::PortableMemInfoBlock;
36using ::llvm::memprof::RawMemProfReader;
37using ::llvm::memprof::SegmentEntry;
38using ::llvm::object::SectionedAddress;
39using ::llvm::symbolize::SymbolizableModule;
40using ::testing::Return;
41using ::testing::SizeIs;
42
43class MockSymbolizer : public SymbolizableModule {
44public:
45 MOCK_CONST_METHOD3(symbolizeInlinedCode,
46 DIInliningInfo(SectionedAddress, DILineInfoSpecifier,
47 bool));
48 // Most of the methods in the interface are unused. We only mock the
49 // method that we expect to be called from the memprof reader.
50 virtual DILineInfo symbolizeCode(SectionedAddress, DILineInfoSpecifier,
51 bool) const {
52 llvm_unreachable("unused");
53 }
54 virtual DIGlobal symbolizeData(SectionedAddress) const {
55 llvm_unreachable("unused");
56 }
57 virtual std::vector<DILocal> symbolizeFrame(SectionedAddress) const {
58 llvm_unreachable("unused");
59 }
60 virtual std::vector<SectionedAddress> findSymbol(StringRef Symbol,
61 uint64_t Offset) const {
62 llvm_unreachable("unused");
63 }
64 virtual bool isWin32Module() const { llvm_unreachable("unused"); }
65 virtual uint64_t getModulePreferredBase() const {
66 llvm_unreachable("unused");
67 }
68};
69
70struct MockInfo {
71 std::string FunctionName;
72 uint32_t Line;
73 uint32_t StartLine;
74 uint32_t Column;
75 std::string FileName = "valid/path.cc";
76};
77DIInliningInfo makeInliningInfo(std::initializer_list<MockInfo> MockFrames) {
78 DIInliningInfo Result;
79 for (const auto &Item : MockFrames) {
80 DILineInfo Frame;
81 Frame.FunctionName = Item.FunctionName;
82 Frame.Line = Item.Line;
83 Frame.StartLine = Item.StartLine;
84 Frame.Column = Item.Column;
85 Frame.FileName = Item.FileName;
86 Result.addFrame(Frame);
87 }
88 return Result;
89}
90
91llvm::SmallVector<SegmentEntry, 4> makeSegments() {
92 llvm::SmallVector<SegmentEntry, 4> Result;
93 // Mimic an entry for a non position independent executable.
94 Result.emplace_back(Args: 0x0, Args: 0x40000, Args: 0x0);
95 return Result;
96}
97
98const DILineInfoSpecifier specifier() {
99 return DILineInfoSpecifier(
100 DILineInfoSpecifier::FileLineInfoKind::RawValue,
101 DILineInfoSpecifier::FunctionNameKind::LinkageName);
102}
103
104MATCHER_P4(FrameContains, FunctionName, LineOffset, Column, Inline, "") {
105 const Frame &F = arg;
106
107 const uint64_t ExpectedHash = IndexedMemProfRecord::getGUID(FunctionName);
108 if (F.Function != ExpectedHash) {
109 *result_listener << "Hash mismatch";
110 return false;
111 }
112 if (F.SymbolName && *F.SymbolName != FunctionName) {
113 *result_listener << "SymbolName mismatch\nWant: " << FunctionName
114 << "\nGot: " << *F.SymbolName;
115 return false;
116 }
117 if (F.LineOffset == LineOffset && F.Column == Column &&
118 F.IsInlineFrame == Inline) {
119 return true;
120 }
121 *result_listener << "LineOffset, Column or Inline mismatch";
122 return false;
123}
124
125MemProfSchema getFullSchema() {
126 MemProfSchema Schema;
127#define MIBEntryDef(NameTag, Name, Type) Schema.push_back(Meta::Name);
128#include "llvm/ProfileData/MIBEntryDef.inc"
129#undef MIBEntryDef
130 return Schema;
131}
132
133TEST(MemProf, FillsValue) {
134 std::unique_ptr<MockSymbolizer> Symbolizer(new MockSymbolizer());
135
136 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x1000},
137 specifier(), false))
138 .Times(n: 1) // Only once since we remember invalid PCs.
139 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
140 {.FunctionName: "new", .Line: 70, .StartLine: 57, .Column: 3, .FileName: "memprof/memprof_new_delete.cpp"},
141 })));
142
143 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x2000},
144 specifier(), false))
145 .Times(n: 1) // Only once since we cache the result for future lookups.
146 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
147 {.FunctionName: "foo", .Line: 10, .StartLine: 5, .Column: 30},
148 {.FunctionName: "bar", .Line: 201, .StartLine: 150, .Column: 20},
149 })));
150
151 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x3000},
152 specifier(), false))
153 .Times(n: 1)
154 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
155 {.FunctionName: "xyz.llvm.123", .Line: 10, .StartLine: 5, .Column: 30},
156 {.FunctionName: "abc", .Line: 10, .StartLine: 5, .Column: 30},
157 })));
158
159 CallStackMap CSM;
160 CSM[0x1] = {0x1000, 0x2000, 0x3000};
161
162 llvm::MapVector<uint64_t, MemInfoBlock> Prof;
163 Prof[0x1].AllocCount = 1;
164
165 auto Seg = makeSegments();
166
167 RawMemProfReader Reader(std::move(Symbolizer), Seg, Prof, CSM,
168 /*KeepName=*/true);
169
170 llvm::DenseMap<llvm::GlobalValue::GUID, MemProfRecord> Records;
171 for (const auto &Pair : Reader) {
172 Records.insert(KV: {Pair.first, Pair.second});
173 }
174
175 // Mock program pseudocode and expected memprof record contents.
176 //
177 // AllocSite CallSite
178 // inline foo() { new(); } Y N
179 // bar() { foo(); } Y Y
180 // inline xyz() { bar(); } N Y
181 // abc() { xyz(); } N Y
182
183 // We expect 4 records. We attach alloc site data to foo and bar, i.e.
184 // all frames bottom up until we find a non-inline frame. We attach call site
185 // data to bar, xyz and abc.
186 ASSERT_THAT(Records, SizeIs(4));
187
188 // Check the memprof record for foo.
189 const llvm::GlobalValue::GUID FooId = IndexedMemProfRecord::getGUID(FunctionName: "foo");
190 ASSERT_EQ(Records.count(FooId), 1U);
191 const MemProfRecord &Foo = Records[FooId];
192 ASSERT_THAT(Foo.AllocSites, SizeIs(1));
193 EXPECT_EQ(Foo.AllocSites[0].Info.getAllocCount(), 1U);
194 EXPECT_THAT(Foo.AllocSites[0].CallStack[0],
195 FrameContains("foo", 5U, 30U, true));
196 EXPECT_THAT(Foo.AllocSites[0].CallStack[1],
197 FrameContains("bar", 51U, 20U, false));
198 EXPECT_THAT(Foo.AllocSites[0].CallStack[2],
199 FrameContains("xyz", 5U, 30U, true));
200 EXPECT_THAT(Foo.AllocSites[0].CallStack[3],
201 FrameContains("abc", 5U, 30U, false));
202 EXPECT_TRUE(Foo.CallSites.empty());
203
204 // Check the memprof record for bar.
205 const llvm::GlobalValue::GUID BarId = IndexedMemProfRecord::getGUID(FunctionName: "bar");
206 ASSERT_EQ(Records.count(BarId), 1U);
207 const MemProfRecord &Bar = Records[BarId];
208 ASSERT_THAT(Bar.AllocSites, SizeIs(1));
209 EXPECT_EQ(Bar.AllocSites[0].Info.getAllocCount(), 1U);
210 EXPECT_THAT(Bar.AllocSites[0].CallStack[0],
211 FrameContains("foo", 5U, 30U, true));
212 EXPECT_THAT(Bar.AllocSites[0].CallStack[1],
213 FrameContains("bar", 51U, 20U, false));
214 EXPECT_THAT(Bar.AllocSites[0].CallStack[2],
215 FrameContains("xyz", 5U, 30U, true));
216 EXPECT_THAT(Bar.AllocSites[0].CallStack[3],
217 FrameContains("abc", 5U, 30U, false));
218
219 ASSERT_THAT(Bar.CallSites, SizeIs(1));
220 ASSERT_THAT(Bar.CallSites[0], SizeIs(2));
221 EXPECT_THAT(Bar.CallSites[0][0], FrameContains("foo", 5U, 30U, true));
222 EXPECT_THAT(Bar.CallSites[0][1], FrameContains("bar", 51U, 20U, false));
223
224 // Check the memprof record for xyz.
225 const llvm::GlobalValue::GUID XyzId = IndexedMemProfRecord::getGUID(FunctionName: "xyz");
226 ASSERT_EQ(Records.count(XyzId), 1U);
227 const MemProfRecord &Xyz = Records[XyzId];
228 ASSERT_THAT(Xyz.CallSites, SizeIs(1));
229 ASSERT_THAT(Xyz.CallSites[0], SizeIs(2));
230 // Expect the entire frame even though in practice we only need the first
231 // entry here.
232 EXPECT_THAT(Xyz.CallSites[0][0], FrameContains("xyz", 5U, 30U, true));
233 EXPECT_THAT(Xyz.CallSites[0][1], FrameContains("abc", 5U, 30U, false));
234
235 // Check the memprof record for abc.
236 const llvm::GlobalValue::GUID AbcId = IndexedMemProfRecord::getGUID(FunctionName: "abc");
237 ASSERT_EQ(Records.count(AbcId), 1U);
238 const MemProfRecord &Abc = Records[AbcId];
239 EXPECT_TRUE(Abc.AllocSites.empty());
240 ASSERT_THAT(Abc.CallSites, SizeIs(1));
241 ASSERT_THAT(Abc.CallSites[0], SizeIs(2));
242 EXPECT_THAT(Abc.CallSites[0][0], FrameContains("xyz", 5U, 30U, true));
243 EXPECT_THAT(Abc.CallSites[0][1], FrameContains("abc", 5U, 30U, false));
244}
245
246TEST(MemProf, PortableWrapper) {
247 MemInfoBlock Info(/*size=*/16, /*access_count=*/7, /*alloc_timestamp=*/1000,
248 /*dealloc_timestamp=*/2000, /*alloc_cpu=*/3,
249 /*dealloc_cpu=*/4);
250
251 const auto Schema = getFullSchema();
252 PortableMemInfoBlock WriteBlock(Info);
253
254 std::string Buffer;
255 llvm::raw_string_ostream OS(Buffer);
256 WriteBlock.serialize(Schema, OS);
257 OS.flush();
258
259 PortableMemInfoBlock ReadBlock(
260 Schema, reinterpret_cast<const unsigned char *>(Buffer.data()));
261
262 EXPECT_EQ(ReadBlock, WriteBlock);
263 // Here we compare directly with the actual counts instead of MemInfoBlock
264 // members. Since the MemInfoBlock struct is packed and the EXPECT_EQ macros
265 // take a reference to the params, this results in unaligned accesses.
266 EXPECT_EQ(1UL, ReadBlock.getAllocCount());
267 EXPECT_EQ(7ULL, ReadBlock.getTotalAccessCount());
268 EXPECT_EQ(3UL, ReadBlock.getAllocCpuId());
269}
270
271// Version0 and Version1 serialize IndexedMemProfRecord in the same format, so
272// we share one test.
273TEST(MemProf, RecordSerializationRoundTripVersion0And1) {
274 const MemProfSchema Schema = getFullSchema();
275
276 MemInfoBlock Info(/*size=*/16, /*access_count=*/7, /*alloc_timestamp=*/1000,
277 /*dealloc_timestamp=*/2000, /*alloc_cpu=*/3,
278 /*dealloc_cpu=*/4);
279
280 llvm::SmallVector<llvm::SmallVector<FrameId>> AllocCallStacks = {
281 {0x123, 0x345}, {0x123, 0x567}};
282
283 llvm::SmallVector<llvm::SmallVector<FrameId>> CallSites = {{0x333, 0x777}};
284
285 IndexedMemProfRecord Record;
286 for (const auto &ACS : AllocCallStacks) {
287 // Use the same info block for both allocation sites.
288 Record.AllocSites.emplace_back(Args: ACS, Args: llvm::memprof::hashCallStack(CS: ACS),
289 Args&: Info);
290 }
291 Record.CallSites.assign(RHS: CallSites);
292 for (const auto &CS : CallSites)
293 Record.CallSiteIds.push_back(Elt: llvm::memprof::hashCallStack(CS));
294
295 std::string Buffer;
296 llvm::raw_string_ostream OS(Buffer);
297 Record.serialize(Schema, OS, Version: llvm::memprof::Version0);
298 OS.flush();
299
300 const IndexedMemProfRecord GotRecord = IndexedMemProfRecord::deserialize(
301 Schema, Buffer: reinterpret_cast<const unsigned char *>(Buffer.data()),
302 Version: llvm::memprof::Version0);
303
304 EXPECT_EQ(Record, GotRecord);
305}
306
307TEST(MemProf, RecordSerializationRoundTripVerion2) {
308 const MemProfSchema Schema = getFullSchema();
309
310 MemInfoBlock Info(/*size=*/16, /*access_count=*/7, /*alloc_timestamp=*/1000,
311 /*dealloc_timestamp=*/2000, /*alloc_cpu=*/3,
312 /*dealloc_cpu=*/4);
313
314 llvm::SmallVector<llvm::memprof::CallStackId> CallStackIds = {0x123, 0x456};
315
316 llvm::SmallVector<llvm::memprof::CallStackId> CallSiteIds = {0x333, 0x444};
317
318 IndexedMemProfRecord Record;
319 for (const auto &CSId : CallStackIds) {
320 // Use the same info block for both allocation sites.
321 Record.AllocSites.emplace_back(Args: llvm::SmallVector<FrameId>(), Args: CSId, Args&: Info);
322 }
323 Record.CallSiteIds.assign(RHS: CallSiteIds);
324
325 std::string Buffer;
326 llvm::raw_string_ostream OS(Buffer);
327 Record.serialize(Schema, OS, Version: llvm::memprof::Version2);
328 OS.flush();
329
330 const IndexedMemProfRecord GotRecord = IndexedMemProfRecord::deserialize(
331 Schema, Buffer: reinterpret_cast<const unsigned char *>(Buffer.data()),
332 Version: llvm::memprof::Version2);
333
334 EXPECT_EQ(Record, GotRecord);
335}
336
337TEST(MemProf, SymbolizationFilter) {
338 std::unique_ptr<MockSymbolizer> Symbolizer(new MockSymbolizer());
339
340 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x1000},
341 specifier(), false))
342 .Times(n: 1) // once since we don't lookup invalid PCs repeatedly.
343 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
344 {.FunctionName: "malloc", .Line: 70, .StartLine: 57, .Column: 3, .FileName: "memprof/memprof_malloc_linux.cpp"},
345 })));
346
347 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x2000},
348 specifier(), false))
349 .Times(n: 1) // once since we don't lookup invalid PCs repeatedly.
350 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
351 {.FunctionName: "new", .Line: 70, .StartLine: 57, .Column: 3, .FileName: "memprof/memprof_new_delete.cpp"},
352 })));
353
354 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x3000},
355 specifier(), false))
356 .Times(n: 1) // once since we don't lookup invalid PCs repeatedly.
357 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
358 {.FunctionName: DILineInfo::BadString, .Line: 0, .StartLine: 0, .Column: 0},
359 })));
360
361 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x4000},
362 specifier(), false))
363 .Times(n: 1)
364 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
365 {.FunctionName: "foo", .Line: 10, .StartLine: 5, .Column: 30, .FileName: "memprof/memprof_test_file.cpp"},
366 })));
367
368 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x5000},
369 specifier(), false))
370 .Times(n: 1)
371 .WillRepeatedly(action: Return(value: makeInliningInfo(MockFrames: {
372 // Depending on how the runtime was compiled, only the filename
373 // may be present in the debug information.
374 {.FunctionName: "malloc", .Line: 70, .StartLine: 57, .Column: 3, .FileName: "memprof_malloc_linux.cpp"},
375 })));
376
377 CallStackMap CSM;
378 CSM[0x1] = {0x1000, 0x2000, 0x3000, 0x4000};
379 // This entry should be dropped since all PCs are either not
380 // symbolizable or belong to the runtime.
381 CSM[0x2] = {0x1000, 0x2000, 0x5000};
382
383 llvm::MapVector<uint64_t, MemInfoBlock> Prof;
384 Prof[0x1].AllocCount = 1;
385 Prof[0x2].AllocCount = 1;
386
387 auto Seg = makeSegments();
388
389 RawMemProfReader Reader(std::move(Symbolizer), Seg, Prof, CSM);
390
391 llvm::SmallVector<MemProfRecord, 1> Records;
392 for (const auto &KeyRecordPair : Reader) {
393 Records.push_back(Elt: KeyRecordPair.second);
394 }
395
396 ASSERT_THAT(Records, SizeIs(1));
397 ASSERT_THAT(Records[0].AllocSites, SizeIs(1));
398 ASSERT_THAT(Records[0].AllocSites[0].CallStack, SizeIs(1));
399 EXPECT_THAT(Records[0].AllocSites[0].CallStack[0],
400 FrameContains("foo", 5U, 30U, false));
401}
402
403TEST(MemProf, BaseMemProfReader) {
404 llvm::DenseMap<FrameId, Frame> FrameIdMap;
405 Frame F1(/*Hash=*/IndexedMemProfRecord::getGUID(FunctionName: "foo"), /*LineOffset=*/20,
406 /*Column=*/5, /*IsInlineFrame=*/true);
407 Frame F2(/*Hash=*/IndexedMemProfRecord::getGUID(FunctionName: "bar"), /*LineOffset=*/10,
408 /*Column=*/2, /*IsInlineFrame=*/false);
409 FrameIdMap.insert(KV: {F1.hash(), F1});
410 FrameIdMap.insert(KV: {F2.hash(), F2});
411
412 llvm::MapVector<llvm::GlobalValue::GUID, IndexedMemProfRecord> ProfData;
413 IndexedMemProfRecord FakeRecord;
414 MemInfoBlock Block;
415 Block.AllocCount = 1U, Block.TotalAccessDensity = 4,
416 Block.TotalLifetime = 200001;
417 std::array<FrameId, 2> CallStack{F1.hash(), F2.hash()};
418 FakeRecord.AllocSites.emplace_back(
419 /*CS=*/Args&: CallStack, /*CSId=*/Args: llvm::memprof::hashCallStack(CS: CallStack),
420 /*MB=*/Args&: Block);
421 ProfData.insert(KV: {F1.hash(), FakeRecord});
422
423 MemProfReader Reader(FrameIdMap, ProfData);
424
425 llvm::SmallVector<MemProfRecord, 1> Records;
426 for (const auto &KeyRecordPair : Reader) {
427 Records.push_back(Elt: KeyRecordPair.second);
428 }
429
430 ASSERT_THAT(Records, SizeIs(1));
431 ASSERT_THAT(Records[0].AllocSites, SizeIs(1));
432 ASSERT_THAT(Records[0].AllocSites[0].CallStack, SizeIs(2));
433 EXPECT_THAT(Records[0].AllocSites[0].CallStack[0],
434 FrameContains("foo", 20U, 5U, true));
435 EXPECT_THAT(Records[0].AllocSites[0].CallStack[1],
436 FrameContains("bar", 10U, 2U, false));
437}
438
439TEST(MemProf, BaseMemProfReaderWithCSIdMap) {
440 llvm::DenseMap<FrameId, Frame> FrameIdMap;
441 Frame F1(/*Hash=*/IndexedMemProfRecord::getGUID(FunctionName: "foo"), /*LineOffset=*/20,
442 /*Column=*/5, /*IsInlineFrame=*/true);
443 Frame F2(/*Hash=*/IndexedMemProfRecord::getGUID(FunctionName: "bar"), /*LineOffset=*/10,
444 /*Column=*/2, /*IsInlineFrame=*/false);
445 FrameIdMap.insert(KV: {F1.hash(), F1});
446 FrameIdMap.insert(KV: {F2.hash(), F2});
447
448 llvm::DenseMap<CallStackId, llvm::SmallVector<FrameId>> CSIdMap;
449 llvm::SmallVector<FrameId> CallStack = {F1.hash(), F2.hash()};
450 CallStackId CSId = llvm::memprof::hashCallStack(CS: CallStack);
451 CSIdMap.insert(KV: {CSId, CallStack});
452
453 llvm::MapVector<llvm::GlobalValue::GUID, IndexedMemProfRecord> ProfData;
454 IndexedMemProfRecord FakeRecord;
455 MemInfoBlock Block;
456 Block.AllocCount = 1U, Block.TotalAccessDensity = 4,
457 Block.TotalLifetime = 200001;
458 FakeRecord.AllocSites.emplace_back(
459 /*CS=*/Args: llvm::SmallVector<FrameId>(),
460 /*CSId=*/Args: llvm::memprof::hashCallStack(CS: CallStack),
461 /*MB=*/Args&: Block);
462 ProfData.insert(KV: {F1.hash(), FakeRecord});
463
464 MemProfReader Reader(FrameIdMap, CSIdMap, ProfData);
465
466 llvm::SmallVector<MemProfRecord, 1> Records;
467 for (const auto &KeyRecordPair : Reader) {
468 Records.push_back(Elt: KeyRecordPair.second);
469 }
470
471 ASSERT_THAT(Records, SizeIs(1));
472 ASSERT_THAT(Records[0].AllocSites, SizeIs(1));
473 ASSERT_THAT(Records[0].AllocSites[0].CallStack, SizeIs(2));
474 EXPECT_THAT(Records[0].AllocSites[0].CallStack[0],
475 FrameContains("foo", 20U, 5U, true));
476 EXPECT_THAT(Records[0].AllocSites[0].CallStack[1],
477 FrameContains("bar", 10U, 2U, false));
478}
479
480TEST(MemProf, IndexedMemProfRecordToMemProfRecord) {
481 // Verify that MemProfRecord can be constructed from IndexedMemProfRecord with
482 // CallStackIds only.
483
484 llvm::DenseMap<FrameId, Frame> FrameIdMap;
485 Frame F1(1, 0, 0, false);
486 Frame F2(2, 0, 0, false);
487 Frame F3(3, 0, 0, false);
488 Frame F4(4, 0, 0, false);
489 FrameIdMap.insert(KV: {F1.hash(), F1});
490 FrameIdMap.insert(KV: {F2.hash(), F2});
491 FrameIdMap.insert(KV: {F3.hash(), F3});
492 FrameIdMap.insert(KV: {F4.hash(), F4});
493
494 llvm::DenseMap<CallStackId, llvm::SmallVector<FrameId>> CallStackIdMap;
495 llvm::SmallVector<FrameId> CS1 = {F1.hash(), F2.hash()};
496 llvm::SmallVector<FrameId> CS2 = {F1.hash(), F3.hash()};
497 llvm::SmallVector<FrameId> CS3 = {F2.hash(), F3.hash()};
498 llvm::SmallVector<FrameId> CS4 = {F2.hash(), F4.hash()};
499 CallStackIdMap.insert(KV: {llvm::memprof::hashCallStack(CS: CS1), CS1});
500 CallStackIdMap.insert(KV: {llvm::memprof::hashCallStack(CS: CS2), CS2});
501 CallStackIdMap.insert(KV: {llvm::memprof::hashCallStack(CS: CS3), CS3});
502 CallStackIdMap.insert(KV: {llvm::memprof::hashCallStack(CS: CS4), CS4});
503
504 IndexedMemProfRecord IndexedRecord;
505 IndexedAllocationInfo AI;
506 AI.CSId = llvm::memprof::hashCallStack(CS: CS1);
507 IndexedRecord.AllocSites.push_back(Elt: AI);
508 AI.CSId = llvm::memprof::hashCallStack(CS: CS2);
509 IndexedRecord.AllocSites.push_back(Elt: AI);
510 IndexedRecord.CallSiteIds.push_back(Elt: llvm::memprof::hashCallStack(CS: CS3));
511 IndexedRecord.CallSiteIds.push_back(Elt: llvm::memprof::hashCallStack(CS: CS4));
512
513 bool CSIdMissing = false;
514 bool FrameIdMissing = false;
515
516 auto Callback = [&](CallStackId CSId) -> llvm::SmallVector<Frame> {
517 llvm::SmallVector<Frame> CallStack;
518 llvm::SmallVector<FrameId> FrameIds;
519
520 auto Iter = CallStackIdMap.find(Val: CSId);
521 if (Iter == CallStackIdMap.end())
522 CSIdMissing = true;
523 else
524 FrameIds = Iter->second;
525
526 for (FrameId Id : FrameIds) {
527 Frame F(0, 0, 0, false);
528 auto Iter = FrameIdMap.find(Val: Id);
529 if (Iter == FrameIdMap.end())
530 FrameIdMissing = true;
531 else
532 F = Iter->second;
533 CallStack.push_back(Elt: F);
534 }
535
536 return CallStack;
537 };
538
539 MemProfRecord Record = IndexedRecord.toMemProfRecord(Callback);
540
541 // Make sure that all lookups are successful.
542 ASSERT_FALSE(CSIdMissing);
543 ASSERT_FALSE(FrameIdMissing);
544
545 // Verify the contents of Record.
546 ASSERT_THAT(Record.AllocSites, SizeIs(2));
547 ASSERT_THAT(Record.AllocSites[0].CallStack, SizeIs(2));
548 EXPECT_EQ(Record.AllocSites[0].CallStack[0].hash(), F1.hash());
549 EXPECT_EQ(Record.AllocSites[0].CallStack[1].hash(), F2.hash());
550 ASSERT_THAT(Record.AllocSites[1].CallStack, SizeIs(2));
551 EXPECT_EQ(Record.AllocSites[1].CallStack[0].hash(), F1.hash());
552 EXPECT_EQ(Record.AllocSites[1].CallStack[1].hash(), F3.hash());
553 ASSERT_THAT(Record.CallSites, SizeIs(2));
554 ASSERT_THAT(Record.CallSites[0], SizeIs(2));
555 EXPECT_EQ(Record.CallSites[0][0].hash(), F2.hash());
556 EXPECT_EQ(Record.CallSites[0][1].hash(), F3.hash());
557 ASSERT_THAT(Record.CallSites[1], SizeIs(2));
558 EXPECT_EQ(Record.CallSites[1][0].hash(), F2.hash());
559 EXPECT_EQ(Record.CallSites[1][1].hash(), F4.hash());
560}
561} // namespace
562

source code of llvm/unittests/ProfileData/MemProfTest.cpp