1//===-- ProfiledBinary.h - Binary decoder -----------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
10#define LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
11
12#include "CallContext.h"
13#include "ErrorHandling.h"
14#include "llvm/ADT/DenseMap.h"
15#include "llvm/ADT/StringRef.h"
16#include "llvm/ADT/StringSet.h"
17#include "llvm/DebugInfo/DWARF/DWARFContext.h"
18#include "llvm/DebugInfo/Symbolize/Symbolize.h"
19#include "llvm/MC/MCAsmInfo.h"
20#include "llvm/MC/MCContext.h"
21#include "llvm/MC/MCDisassembler/MCDisassembler.h"
22#include "llvm/MC/MCInst.h"
23#include "llvm/MC/MCInstPrinter.h"
24#include "llvm/MC/MCInstrAnalysis.h"
25#include "llvm/MC/MCInstrInfo.h"
26#include "llvm/MC/MCObjectFileInfo.h"
27#include "llvm/MC/MCPseudoProbe.h"
28#include "llvm/MC/MCRegisterInfo.h"
29#include "llvm/MC/MCSubtargetInfo.h"
30#include "llvm/MC/MCTargetOptions.h"
31#include "llvm/Object/ELFObjectFile.h"
32#include "llvm/ProfileData/SampleProf.h"
33#include "llvm/Support/CommandLine.h"
34#include "llvm/Support/Path.h"
35#include "llvm/Transforms/IPO/SampleContextTracker.h"
36#include <map>
37#include <set>
38#include <sstream>
39#include <string>
40#include <unordered_map>
41#include <unordered_set>
42#include <vector>
43
44namespace llvm {
45extern cl::opt<bool> EnableCSPreInliner;
46extern cl::opt<bool> UseContextCostForPreInliner;
47} // namespace llvm
48
49using namespace llvm;
50using namespace sampleprof;
51using namespace llvm::object;
52
53namespace llvm {
54namespace sampleprof {
55
56class ProfiledBinary;
57class MissingFrameInferrer;
58
59struct InstructionPointer {
60 const ProfiledBinary *Binary;
61 // Address of the executable segment of the binary.
62 uint64_t Address;
63 // Index to the sorted code address array of the binary.
64 uint64_t Index = 0;
65 InstructionPointer(const ProfiledBinary *Binary, uint64_t Address,
66 bool RoundToNext = false);
67 bool advance();
68 bool backward();
69 void update(uint64_t Addr);
70};
71
72// The special frame addresses.
73enum SpecialFrameAddr {
74 // Dummy root of frame trie.
75 DummyRoot = 0,
76 // Represent all the addresses outside of current binary.
77 // This's also used to indicate the call stack should be truncated since this
78 // isn't a real call context the compiler will see.
79 ExternalAddr = 1,
80};
81
82using RangesTy = std::vector<std::pair<uint64_t, uint64_t>>;
83
84struct BinaryFunction {
85 StringRef FuncName;
86 // End of range is an exclusive bound.
87 RangesTy Ranges;
88
89 uint64_t getFuncSize() {
90 uint64_t Sum = 0;
91 for (auto &R : Ranges) {
92 Sum += R.second - R.first;
93 }
94 return Sum;
95 }
96};
97
98// Info about function range. A function can be split into multiple
99// non-continuous ranges, each range corresponds to one FuncRange.
100struct FuncRange {
101 uint64_t StartAddress;
102 // EndAddress is an exclusive bound.
103 uint64_t EndAddress;
104 // Function the range belongs to
105 BinaryFunction *Func;
106 // Whether the start address is the real entry of the function.
107 bool IsFuncEntry = false;
108
109 StringRef getFuncName() { return Func->FuncName; }
110};
111
112// PrologEpilog address tracker, used to filter out broken stack samples
113// Currently we use a heuristic size (two) to infer prolog and epilog
114// based on the start address and return address. In the future,
115// we will switch to Dwarf CFI based tracker
116struct PrologEpilogTracker {
117 // A set of prolog and epilog addresses. Used by virtual unwinding.
118 std::unordered_set<uint64_t> PrologEpilogSet;
119 ProfiledBinary *Binary;
120 PrologEpilogTracker(ProfiledBinary *Bin) : Binary(Bin){};
121
122 // Take the two addresses from the start of function as prolog
123 void
124 inferPrologAddresses(std::map<uint64_t, FuncRange> &FuncStartAddressMap) {
125 for (auto I : FuncStartAddressMap) {
126 PrologEpilogSet.insert(x: I.first);
127 InstructionPointer IP(Binary, I.first);
128 if (!IP.advance())
129 break;
130 PrologEpilogSet.insert(x: IP.Address);
131 }
132 }
133
134 // Take the last two addresses before the return address as epilog
135 void inferEpilogAddresses(std::unordered_set<uint64_t> &RetAddrs) {
136 for (auto Addr : RetAddrs) {
137 PrologEpilogSet.insert(x: Addr);
138 InstructionPointer IP(Binary, Addr);
139 if (!IP.backward())
140 break;
141 PrologEpilogSet.insert(x: IP.Address);
142 }
143 }
144};
145
146// Track function byte size under different context (outlined version as well as
147// various inlined versions). It also provides query support to get function
148// size with the best matching context, which is used to help pre-inliner use
149// accurate post-optimization size to make decisions.
150// TODO: If an inlinee is completely optimized away, ideally we should have zero
151// for its context size, currently we would misss such context since it doesn't
152// have instructions. To fix this, we need to mark all inlinee with entry probe
153// but without instructions as having zero size.
154class BinarySizeContextTracker {
155public:
156 // Add instruction with given size to a context
157 void addInstructionForContext(const SampleContextFrameVector &Context,
158 uint32_t InstrSize);
159
160 // Get function size with a specific context. When there's no exact match
161 // for the given context, try to retrieve the size of that function from
162 // closest matching context.
163 uint32_t getFuncSizeForContext(const ContextTrieNode *Context);
164
165 // For inlinees that are full optimized away, we can establish zero size using
166 // their remaining probes.
167 void trackInlineesOptimizedAway(MCPseudoProbeDecoder &ProbeDecoder);
168
169 using ProbeFrameStack = SmallVector<std::pair<StringRef, uint32_t>>;
170 void trackInlineesOptimizedAway(MCPseudoProbeDecoder &ProbeDecoder,
171 MCDecodedPseudoProbeInlineTree &ProbeNode,
172 ProbeFrameStack &Context);
173
174 void dump() { RootContext.dumpTree(); }
175
176private:
177 // Root node for context trie tree, node that this is a reverse context trie
178 // with callee as parent and caller as child. This way we can traverse from
179 // root to find the best/longest matching context if an exact match does not
180 // exist. It gives us the best possible estimate for function's post-inline,
181 // post-optimization byte size.
182 ContextTrieNode RootContext;
183};
184
185using AddressRange = std::pair<uint64_t, uint64_t>;
186
187class ProfiledBinary {
188 // Absolute path of the executable binary.
189 std::string Path;
190 // Path of the debug info binary.
191 std::string DebugBinaryPath;
192 // The target triple.
193 Triple TheTriple;
194 // Path of symbolizer path which should be pointed to binary with debug info.
195 StringRef SymbolizerPath;
196 // Options used to configure the symbolizer
197 symbolize::LLVMSymbolizer::Options SymbolizerOpts;
198 // The runtime base address that the first executable segment is loaded at.
199 uint64_t BaseAddress = 0;
200 // The runtime base address that the first loadabe segment is loaded at.
201 uint64_t FirstLoadableAddress = 0;
202 // The preferred load address of each executable segment.
203 std::vector<uint64_t> PreferredTextSegmentAddresses;
204 // The file offset of each executable segment.
205 std::vector<uint64_t> TextSegmentOffsets;
206
207 // Mutiple MC component info
208 std::unique_ptr<const MCRegisterInfo> MRI;
209 std::unique_ptr<const MCAsmInfo> AsmInfo;
210 std::unique_ptr<const MCSubtargetInfo> STI;
211 std::unique_ptr<const MCInstrInfo> MII;
212 std::unique_ptr<MCDisassembler> DisAsm;
213 std::unique_ptr<const MCInstrAnalysis> MIA;
214 std::unique_ptr<MCInstPrinter> IPrinter;
215 // A list of text sections sorted by start RVA and size. Used to check
216 // if a given RVA is a valid code address.
217 std::set<std::pair<uint64_t, uint64_t>> TextSections;
218
219 // A map of mapping function name to BinaryFunction info.
220 std::unordered_map<std::string, BinaryFunction> BinaryFunctions;
221
222 // Lookup BinaryFunctions using the function name's MD5 hash. Needed if the
223 // profile is using MD5.
224 std::unordered_map<uint64_t, BinaryFunction *> HashBinaryFunctions;
225
226 // A list of binary functions that have samples.
227 std::unordered_set<const BinaryFunction *> ProfiledFunctions;
228
229 // GUID to Elf symbol start address map
230 DenseMap<uint64_t, uint64_t> SymbolStartAddrs;
231
232 // These maps are for temporary use of warning diagnosis.
233 DenseSet<int64_t> AddrsWithMultipleSymbols;
234 DenseSet<std::pair<uint64_t, uint64_t>> AddrsWithInvalidInstruction;
235
236 // Start address to Elf symbol GUID map
237 std::unordered_multimap<uint64_t, uint64_t> StartAddrToSymMap;
238
239 // An ordered map of mapping function's start address to function range
240 // relevant info. Currently to determine if the offset of ELF is the start of
241 // a real function, we leverage the function range info from DWARF.
242 std::map<uint64_t, FuncRange> StartAddrToFuncRangeMap;
243
244 // Address to context location map. Used to expand the context.
245 std::unordered_map<uint64_t, SampleContextFrameVector> AddressToLocStackMap;
246
247 // Address to instruction size map. Also used for quick Address lookup.
248 std::unordered_map<uint64_t, uint64_t> AddressToInstSizeMap;
249
250 // An array of Addresses of all instructions sorted in increasing order. The
251 // sorting is needed to fast advance to the next forward/backward instruction.
252 std::vector<uint64_t> CodeAddressVec;
253 // A set of call instruction addresses. Used by virtual unwinding.
254 std::unordered_set<uint64_t> CallAddressSet;
255 // A set of return instruction addresses. Used by virtual unwinding.
256 std::unordered_set<uint64_t> RetAddressSet;
257 // An ordered set of unconditional branch instruction addresses.
258 std::set<uint64_t> UncondBranchAddrSet;
259 // A set of branch instruction addresses.
260 std::unordered_set<uint64_t> BranchAddressSet;
261
262 // Estimate and track function prolog and epilog ranges.
263 PrologEpilogTracker ProEpilogTracker;
264
265 // Infer missing frames due to compiler optimizations such as tail call
266 // elimination.
267 std::unique_ptr<MissingFrameInferrer> MissingContextInferrer;
268
269 // Track function sizes under different context
270 BinarySizeContextTracker FuncSizeTracker;
271
272 // The symbolizer used to get inline context for an instruction.
273 std::unique_ptr<symbolize::LLVMSymbolizer> Symbolizer;
274
275 // String table owning function name strings created from the symbolizer.
276 std::unordered_set<std::string> NameStrings;
277
278 // A collection of functions to print disassembly for.
279 StringSet<> DisassembleFunctionSet;
280
281 // Pseudo probe decoder
282 MCPseudoProbeDecoder ProbeDecoder;
283
284 // Function name to probe frame map for top-level outlined functions.
285 StringMap<MCDecodedPseudoProbeInlineTree *> TopLevelProbeFrameMap;
286
287 bool UsePseudoProbes = false;
288
289 bool UseFSDiscriminator = false;
290
291 // Whether we need to symbolize all instructions to get function context size.
292 bool TrackFuncContextSize = false;
293
294 // Indicate if the base loading address is parsed from the mmap event or uses
295 // the preferred address
296 bool IsLoadedByMMap = false;
297 // Use to avoid redundant warning.
298 bool MissingMMapWarned = false;
299
300 bool IsCOFF = false;
301
302 void setPreferredTextSegmentAddresses(const ObjectFile *O);
303
304 template <class ELFT>
305 void setPreferredTextSegmentAddresses(const ELFFile<ELFT> &Obj,
306 StringRef FileName);
307 void setPreferredTextSegmentAddresses(const COFFObjectFile *Obj,
308 StringRef FileName);
309
310 void checkPseudoProbe(const ELFObjectFileBase *Obj);
311
312 void decodePseudoProbe(const ELFObjectFileBase *Obj);
313
314 void
315 checkUseFSDiscriminator(const ObjectFile *Obj,
316 std::map<SectionRef, SectionSymbolsTy> &AllSymbols);
317
318 // Set up disassembler and related components.
319 void setUpDisassembler(const ObjectFile *Obj);
320 symbolize::LLVMSymbolizer::Options getSymbolizerOpts() const;
321
322 // Load debug info of subprograms from DWARF section.
323 void loadSymbolsFromDWARF(ObjectFile &Obj);
324
325 // Load debug info from DWARF unit.
326 void loadSymbolsFromDWARFUnit(DWARFUnit &CompilationUnit);
327
328 // Create elf symbol to its start address mapping.
329 void populateElfSymbolAddressList(const ELFObjectFileBase *O);
330
331 // A function may be spilt into multiple non-continuous address ranges. We use
332 // this to set whether start a function range is the real entry of the
333 // function and also set false to the non-function label.
334 void setIsFuncEntry(FuncRange *FRange, StringRef RangeSymName);
335
336 // Warn if no entry range exists in the function.
337 void warnNoFuncEntry();
338
339 /// Dissassemble the text section and build various address maps.
340 void disassemble(const ObjectFile *O);
341
342 /// Helper function to dissassemble the symbol and extract info for unwinding
343 bool dissassembleSymbol(std::size_t SI, ArrayRef<uint8_t> Bytes,
344 SectionSymbolsTy &Symbols, const SectionRef &Section);
345 /// Symbolize a given instruction pointer and return a full call context.
346 SampleContextFrameVector symbolize(const InstructionPointer &IP,
347 bool UseCanonicalFnName = false,
348 bool UseProbeDiscriminator = false);
349 /// Decode the interesting parts of the binary and build internal data
350 /// structures. On high level, the parts of interest are:
351 /// 1. Text sections, including the main code section and the PLT
352 /// entries that will be used to handle cross-module call transitions.
353 /// 2. The .debug_line section, used by Dwarf-based profile generation.
354 /// 3. Pseudo probe related sections, used by probe-based profile
355 /// generation.
356 void load();
357
358public:
359 ProfiledBinary(const StringRef ExeBinPath, const StringRef DebugBinPath);
360 ~ProfiledBinary();
361
362 void decodePseudoProbe();
363
364 StringRef getPath() const { return Path; }
365 StringRef getName() const { return llvm::sys::path::filename(path: Path); }
366 uint64_t getBaseAddress() const { return BaseAddress; }
367 void setBaseAddress(uint64_t Address) { BaseAddress = Address; }
368
369 bool isCOFF() const { return IsCOFF; }
370
371 // Canonicalize to use preferred load address as base address.
372 uint64_t canonicalizeVirtualAddress(uint64_t Address) {
373 return Address - BaseAddress + getPreferredBaseAddress();
374 }
375 // Return the preferred load address for the first executable segment.
376 uint64_t getPreferredBaseAddress() const {
377 return PreferredTextSegmentAddresses[0];
378 }
379 // Return the preferred load address for the first loadable segment.
380 uint64_t getFirstLoadableAddress() const { return FirstLoadableAddress; }
381 // Return the file offset for the first executable segment.
382 uint64_t getTextSegmentOffset() const { return TextSegmentOffsets[0]; }
383 const std::vector<uint64_t> &getPreferredTextSegmentAddresses() const {
384 return PreferredTextSegmentAddresses;
385 }
386 const std::vector<uint64_t> &getTextSegmentOffsets() const {
387 return TextSegmentOffsets;
388 }
389
390 uint64_t getInstSize(uint64_t Address) const {
391 auto I = AddressToInstSizeMap.find(x: Address);
392 if (I == AddressToInstSizeMap.end())
393 return 0;
394 return I->second;
395 }
396
397 bool addressIsCode(uint64_t Address) const {
398 return AddressToInstSizeMap.find(x: Address) != AddressToInstSizeMap.end();
399 }
400
401 bool addressIsCall(uint64_t Address) const {
402 return CallAddressSet.count(x: Address);
403 }
404 bool addressIsReturn(uint64_t Address) const {
405 return RetAddressSet.count(x: Address);
406 }
407 bool addressInPrologEpilog(uint64_t Address) const {
408 return ProEpilogTracker.PrologEpilogSet.count(x: Address);
409 }
410
411 bool addressIsTransfer(uint64_t Address) {
412 return BranchAddressSet.count(x: Address) || RetAddressSet.count(x: Address) ||
413 CallAddressSet.count(x: Address);
414 }
415
416 bool rangeCrossUncondBranch(uint64_t Start, uint64_t End) {
417 if (Start >= End)
418 return false;
419 auto R = UncondBranchAddrSet.lower_bound(x: Start);
420 return R != UncondBranchAddrSet.end() && *R < End;
421 }
422
423 uint64_t getAddressforIndex(uint64_t Index) const {
424 return CodeAddressVec[Index];
425 }
426
427 size_t getCodeAddrVecSize() const { return CodeAddressVec.size(); }
428
429 bool usePseudoProbes() const { return UsePseudoProbes; }
430 bool useFSDiscriminator() const { return UseFSDiscriminator; }
431 // Get the index in CodeAddressVec for the address
432 // As we might get an address which is not the code
433 // here it would round to the next valid code address by
434 // using lower bound operation
435 uint32_t getIndexForAddr(uint64_t Address) const {
436 auto Low = llvm::lower_bound(Range: CodeAddressVec, Value&: Address);
437 return Low - CodeAddressVec.begin();
438 }
439
440 uint64_t getCallAddrFromFrameAddr(uint64_t FrameAddr) const {
441 if (FrameAddr == ExternalAddr)
442 return ExternalAddr;
443 auto I = getIndexForAddr(Address: FrameAddr);
444 FrameAddr = I ? getAddressforIndex(Index: I - 1) : 0;
445 if (FrameAddr && addressIsCall(Address: FrameAddr))
446 return FrameAddr;
447 return 0;
448 }
449
450 FuncRange *findFuncRangeForStartAddr(uint64_t Address) {
451 auto I = StartAddrToFuncRangeMap.find(x: Address);
452 if (I == StartAddrToFuncRangeMap.end())
453 return nullptr;
454 return &I->second;
455 }
456
457 // Binary search the function range which includes the input address.
458 FuncRange *findFuncRange(uint64_t Address) {
459 auto I = StartAddrToFuncRangeMap.upper_bound(x: Address);
460 if (I == StartAddrToFuncRangeMap.begin())
461 return nullptr;
462 I--;
463
464 if (Address >= I->second.EndAddress)
465 return nullptr;
466
467 return &I->second;
468 }
469
470 // Get all ranges of one function.
471 RangesTy getRanges(uint64_t Address) {
472 auto *FRange = findFuncRange(Address);
473 // Ignore the range which falls into plt section or system lib.
474 if (!FRange)
475 return RangesTy();
476
477 return FRange->Func->Ranges;
478 }
479
480 const std::unordered_map<std::string, BinaryFunction> &
481 getAllBinaryFunctions() {
482 return BinaryFunctions;
483 }
484
485 std::unordered_set<const BinaryFunction *> &getProfiledFunctions() {
486 return ProfiledFunctions;
487 }
488
489 void setProfiledFunctions(std::unordered_set<const BinaryFunction *> &Funcs) {
490 ProfiledFunctions = Funcs;
491 }
492
493 BinaryFunction *getBinaryFunction(FunctionId FName) {
494 if (FName.isStringRef()) {
495 auto I = BinaryFunctions.find(x: FName.str());
496 if (I == BinaryFunctions.end())
497 return nullptr;
498 return &I->second;
499 }
500 auto I = HashBinaryFunctions.find(x: FName.getHashCode());
501 if (I == HashBinaryFunctions.end())
502 return nullptr;
503 return I->second;
504 }
505
506 uint32_t getFuncSizeForContext(const ContextTrieNode *ContextNode) {
507 return FuncSizeTracker.getFuncSizeForContext(Context: ContextNode);
508 }
509
510 void inferMissingFrames(const SmallVectorImpl<uint64_t> &Context,
511 SmallVectorImpl<uint64_t> &NewContext);
512
513 // Load the symbols from debug table and populate into symbol list.
514 void populateSymbolListFromDWARF(ProfileSymbolList &SymbolList);
515
516 SampleContextFrameVector
517 getFrameLocationStack(uint64_t Address, bool UseProbeDiscriminator = false) {
518 InstructionPointer IP(this, Address);
519 return symbolize(IP, UseCanonicalFnName: SymbolizerOpts.UseSymbolTable, UseProbeDiscriminator);
520 }
521
522 const SampleContextFrameVector &
523 getCachedFrameLocationStack(uint64_t Address,
524 bool UseProbeDiscriminator = false) {
525 auto I = AddressToLocStackMap.emplace(args&: Address, args: SampleContextFrameVector());
526 if (I.second) {
527 I.first->second = getFrameLocationStack(Address, UseProbeDiscriminator);
528 }
529 return I.first->second;
530 }
531
532 std::optional<SampleContextFrame> getInlineLeafFrameLoc(uint64_t Address) {
533 const auto &Stack = getCachedFrameLocationStack(Address);
534 if (Stack.empty())
535 return {};
536 return Stack.back();
537 }
538
539 void flushSymbolizer() { Symbolizer.reset(); }
540
541 MissingFrameInferrer *getMissingContextInferrer() {
542 return MissingContextInferrer.get();
543 }
544
545 // Compare two addresses' inline context
546 bool inlineContextEqual(uint64_t Add1, uint64_t Add2);
547
548 // Get the full context of the current stack with inline context filled in.
549 // It will search the disassembling info stored in AddressToLocStackMap. This
550 // is used as the key of function sample map
551 SampleContextFrameVector
552 getExpandedContext(const SmallVectorImpl<uint64_t> &Stack,
553 bool &WasLeafInlined);
554 // Go through instructions among the given range and record its size for the
555 // inline context.
556 void computeInlinedContextSizeForRange(uint64_t StartAddress,
557 uint64_t EndAddress);
558
559 void computeInlinedContextSizeForFunc(const BinaryFunction *Func);
560
561 const MCDecodedPseudoProbe *getCallProbeForAddr(uint64_t Address) const {
562 return ProbeDecoder.getCallProbeForAddr(Address);
563 }
564
565 void getInlineContextForProbe(const MCDecodedPseudoProbe *Probe,
566 SampleContextFrameVector &InlineContextStack,
567 bool IncludeLeaf = false) const {
568 SmallVector<MCPseduoProbeFrameLocation, 16> ProbeInlineContext;
569 ProbeDecoder.getInlineContextForProbe(Probe, InlineContextStack&: ProbeInlineContext,
570 IncludeLeaf);
571 for (uint32_t I = 0; I < ProbeInlineContext.size(); I++) {
572 auto &Callsite = ProbeInlineContext[I];
573 // Clear the current context for an unknown probe.
574 if (Callsite.second == 0 && I != ProbeInlineContext.size() - 1) {
575 InlineContextStack.clear();
576 continue;
577 }
578 InlineContextStack.emplace_back(Args: FunctionId(Callsite.first),
579 Args: LineLocation(Callsite.second, 0));
580 }
581 }
582 const AddressProbesMap &getAddress2ProbesMap() const {
583 return ProbeDecoder.getAddress2ProbesMap();
584 }
585 const MCPseudoProbeFuncDesc *getFuncDescForGUID(uint64_t GUID) {
586 return ProbeDecoder.getFuncDescForGUID(GUID);
587 }
588
589 const MCPseudoProbeFuncDesc *
590 getInlinerDescForProbe(const MCDecodedPseudoProbe *Probe) {
591 return ProbeDecoder.getInlinerDescForProbe(Probe);
592 }
593
594 bool getTrackFuncContextSize() { return TrackFuncContextSize; }
595
596 bool getIsLoadedByMMap() { return IsLoadedByMMap; }
597
598 void setIsLoadedByMMap(bool Value) { IsLoadedByMMap = Value; }
599
600 bool getMissingMMapWarned() { return MissingMMapWarned; }
601
602 void setMissingMMapWarned(bool Value) { MissingMMapWarned = Value; }
603};
604
605} // end namespace sampleprof
606} // end namespace llvm
607
608#endif
609

source code of llvm/tools/llvm-profgen/ProfiledBinary.h