43#define DEBUG_TYPE "memprof"
47template <
class T = u
int64_t>
inline T alignedRead(
const char *
Ptr) {
48 static_assert(std::is_pod<T>::value,
"Not a pod type.");
49 assert(
reinterpret_cast<size_t>(
Ptr) %
sizeof(
T) == 0 &&
"Unaligned Read");
50 return *
reinterpret_cast<const T *
>(
Ptr);
53Error checkBuffer(
const MemoryBuffer &Buffer) {
57 if (Buffer.getBufferSize() == 0)
60 if (Buffer.getBufferSize() <
sizeof(Header)) {
67 const char *Next = Buffer.getBufferStart();
68 while (Next < Buffer.getBufferEnd()) {
69 const auto *
H =
reinterpret_cast<const Header *
>(Next);
72 bool IsSupported =
false;
73 for (
auto SupportedVersion : MEMPROF_RAW_SUPPORTED_VERSIONS) {
74 if (
H->Version == SupportedVersion)
81 TotalSize +=
H->TotalSize;
85 if (Buffer.getBufferSize() != TotalSize) {
92 using namespace support;
95 endian::readNext<uint64_t, llvm::endianness::little>(
Ptr);
98 Items.
push_back(*
reinterpret_cast<const SegmentEntry *
>(
99 Ptr +
I *
sizeof(SegmentEntry)));
105readMemInfoBlocksV3(
const char *
Ptr) {
106 using namespace support;
109 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(
Ptr);
114 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(
Ptr);
122 MemInfoBlock MIB = *
reinterpret_cast<const MemInfoBlock *
>(
Ptr);
124 MIB.AccessHistogramSize = 0;
125 MIB.AccessHistogram = 0;
129 Ptr += MEMPROF_V3_MIB_SIZE;
135readMemInfoBlocksV4(
const char *
Ptr) {
136 using namespace support;
139 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(
Ptr);
144 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(
Ptr);
147 MemInfoBlock MIB = *
reinterpret_cast<const MemInfoBlock *
>(
Ptr);
150 Ptr +=
sizeof(MemInfoBlock);
152 if (MIB.AccessHistogramSize > 0) {
153 MIB.AccessHistogram =
154 (uintptr_t)malloc(MIB.AccessHistogramSize *
sizeof(
uint64_t));
157 for (
uint64_t J = 0; J < MIB.AccessHistogramSize; J++) {
158 ((
uint64_t *)MIB.AccessHistogram)[J] =
159 endian::readNext<uint64_t, llvm::endianness::little, unaligned>(
Ptr);
167 using namespace support;
170 endian::readNext<uint64_t, llvm::endianness::little>(
Ptr);
175 endian::readNext<uint64_t, llvm::endianness::little>(
Ptr);
177 endian::readNext<uint64_t, llvm::endianness::little>(
Ptr);
179 SmallVector<uint64_t> CallStack;
180 CallStack.reserve(NumPCs);
181 for (
uint64_t J = 0; J < NumPCs; J++) {
183 endian::readNext<uint64_t, llvm::endianness::little>(
Ptr));
186 Items[StackId] = CallStack;
195 for (
const auto &[Id, Stack] :
From) {
196 auto I = To.find(Id);
201 if (Stack !=
I->second)
208Error report(Error
E,
const StringRef Context) {
213bool isRuntimePath(
const StringRef Path) {
217 return Filename ==
"memprof_malloc_linux.cpp" ||
218 Filename ==
"memprof_interceptors.cpp" ||
219 Filename ==
"memprof_new_delete.cpp";
222std::string getBuildIdString(
const SegmentEntry &Entry) {
224 if (Entry.BuildIdSize == 0)
228 raw_string_ostream
OS(Str);
229 for (
size_t I = 0;
I < Entry.BuildIdSize;
I++) {
239 : IdToFrame(
std::
move(FrameIdMap)),
240 FunctionProfileData(
std::
move(ProfData)) {
245 for (
auto &AS :
Record.AllocSites) {
250 for (
auto &CS :
Record.CallSites) {
252 Record.CallSiteIds.push_back(CSId);
262 if (std::error_code EC = BufferOr.getError())
265 std::unique_ptr<MemoryBuffer> Buffer(BufferOr.get().release());
266 return create(std::move(Buffer), ProfiledBinary, KeepName);
271 const StringRef ProfiledBinary,
bool KeepName) {
272 if (
Error E = checkBuffer(*Buffer))
273 return report(std::move(
E), Buffer->getBufferIdentifier());
275 if (ProfiledBinary.
empty()) {
277 const std::vector<std::string> BuildIds =
peekBuildIds(Buffer.get());
278 std::string ErrorMessage(
279 R
"(Path to profiled binary is empty, expected binary with one of the following build ids:
281 for (
const auto &Id : BuildIds) {
282 ErrorMessage +=
"\n BuildId: ";
292 return report(BinaryOr.takeError(), ProfiledBinary);
296 std::unique_ptr<RawMemProfReader> Reader(
298 if (
Error E = Reader->initialize(std::move(Buffer))) {
301 return std::move(Reader);
307 for (
auto &[
_, MIB] : CallstackProfileData) {
308 if (MemprofRawVersion >= 4ULL && MIB.AccessHistogramSize > 0) {
309 free((
void *)MIB.AccessHistogram);
319 std::unique_ptr<MemoryBuffer> Buffer(BufferOr.get().release());
329 return Magic == MEMPROF_RAW_MAGIC_64;
333 uint64_t NumAllocFunctions = 0, NumMibInfo = 0;
335 const size_t NumAllocSites = KV.second.AllocSites.size();
336 if (NumAllocSites > 0) {
338 NumMibInfo += NumAllocSites;
342 OS <<
"MemprofProfile:\n";
344 OS <<
" Version: " << MemprofRawVersion <<
"\n";
345 OS <<
" NumSegments: " << SegmentInfo.
size() <<
"\n";
346 OS <<
" NumMibInfo: " << NumMibInfo <<
"\n";
347 OS <<
" NumAllocFunctions: " << NumAllocFunctions <<
"\n";
348 OS <<
" NumStackOffsets: " << StackMap.
size() <<
"\n";
350 OS <<
" Segments:\n";
351 for (
const auto &Entry : SegmentInfo) {
353 OS <<
" BuildId: " << getBuildIdString(Entry) <<
"\n";
354 OS <<
" Start: 0x" << llvm::utohexstr(Entry.Start) <<
"\n";
355 OS <<
" End: 0x" << llvm::utohexstr(Entry.End) <<
"\n";
356 OS <<
" Offset: 0x" << llvm::utohexstr(Entry.Offset) <<
"\n";
360 for (
const auto &[GUID,
Record] : *
this) {
362 OS <<
" FunctionGUID: " << GUID <<
"\n";
367Error RawMemProfReader::initialize(std::unique_ptr<MemoryBuffer> DataBuffer) {
368 const StringRef FileName = Binary.getBinary()->getFileName();
370 auto *ElfObject = dyn_cast<object::ELFObjectFileBase>(Binary.getBinary());
372 return report(make_error<StringError>(
Twine(
"Not an ELF file: "),
380 auto *Elf64LEObject = llvm::cast<llvm::object::ELF64LEObjectFile>(ElfObject);
382 auto PHdrsOr = ElfFile.program_headers();
385 make_error<StringError>(
Twine(
"Could not read program headers: "),
389 int NumExecutableSegments = 0;
390 for (
const auto &Phdr : *PHdrsOr) {
395 if (++NumExecutableSegments > 1) {
397 make_error<StringError>(
398 "Expect only one executable load segment in the binary",
407 PreferredTextSegmentAddress = Phdr.p_vaddr;
408 assert(Phdr.p_vaddr == (Phdr.p_vaddr & ~(0x1000 - 1U)) &&
409 "Expect p_vaddr to always be page aligned");
410 assert(Phdr.p_offset == 0 &&
"Expect p_offset = 0 for symbolization.");
415 auto Triple = ElfObject->makeTriple();
417 return report(make_error<StringError>(Twine(
"Unsupported target: ") +
418 Triple.getArchName(),
423 if (Error
E = readRawProfile(std::move(DataBuffer)))
426 if (Error
E = setupForSymbolization())
429 auto *
Object = cast<object::ObjectFile>(Binary.getBinary());
434 Object, std::move(Context),
false);
436 return report(SOFOr.takeError(), FileName);
437 auto Symbolizer = std::move(SOFOr.get());
443 if (Error
E = symbolizeAndFilterStackFrames(std::move(Symbolizer)))
446 return mapRawProfileToRecords();
449Error RawMemProfReader::setupForSymbolization() {
450 auto *
Object = cast<object::ObjectFile>(Binary.getBinary());
452 if (BinaryId.empty())
453 return make_error<StringError>(Twine(
"No build id found in binary ") +
454 Binary.getBinary()->getFileName(),
458 for (
const auto &Entry : SegmentInfo) {
460 if (BinaryId == SegmentId) {
463 if (++NumMatched > 1) {
464 return make_error<StringError>(
465 "We expect only one executable segment in the profiled binary",
468 ProfiledTextSegmentStart =
Entry.Start;
469 ProfiledTextSegmentEnd =
Entry.End;
472 assert(NumMatched != 0 &&
"No matching executable segments in segment info.");
473 assert((PreferredTextSegmentAddress == 0 ||
474 (PreferredTextSegmentAddress == ProfiledTextSegmentStart)) &&
475 "Expect text segment address to be 0 or equal to profiled text "
480Error RawMemProfReader::mapRawProfileToRecords() {
486 PerFunctionCallSites;
490 for (
const auto &[StackId, MIB] : CallstackProfileData) {
491 auto It = StackMap.
find(StackId);
492 if (It == StackMap.
end())
493 return make_error<InstrProfError>(
495 "memprof callstack record does not contain id: " + Twine(StackId));
499 Callstack.
reserve(It->getSecond().size());
502 for (
size_t I = 0;
I < Addresses.
size();
I++) {
505 "Address not found in SymbolizedFrame map");
506 const SmallVector<FrameId> &Frames = SymbolizedFrame[
Address];
509 "The last frame should not be inlined");
514 for (
size_t J = 0; J < Frames.size(); J++) {
515 if (
I == 0 && J == 0)
526 Callstack.
append(Frames.begin(), Frames.end());
534 for (
size_t I = 0; ;
I++) {
538 IndexedMemProfRecord &Record =
Result.first->second;
539 Record.AllocSites.emplace_back(Callstack, CSId, MIB);
541 if (!
F.IsInlineFrame)
547 for (
const auto &[Id, Locs] : PerFunctionCallSites) {
551 IndexedMemProfRecord &Record =
Result.first->second;
552 for (LocationPtr Loc : Locs) {
555 Record.CallSites.push_back(*Loc);
556 Record.CallSiteIds.push_back(CSId);
565Error RawMemProfReader::symbolizeAndFilterStackFrames(
566 std::unique_ptr<llvm::symbolize::SymbolizableModule> Symbolizer) {
568 const DILineInfoSpecifier Specifier(
569 DILineInfoSpecifier::FileLineInfoKind::RawValue,
570 DILineInfoSpecifier::FunctionNameKind::LinkageName);
578 for (
auto &Entry : StackMap) {
583 if (SymbolizedFrame.count(VAddr) > 0 ||
587 Expected<DIInliningInfo> DIOr = Symbolizer->symbolizeInlinedCode(
588 getModuleOffset(VAddr), Specifier,
false);
590 return DIOr.takeError();
591 DIInliningInfo DI = DIOr.get();
595 isRuntimePath(DI.getFrame(0).FileName)) {
596 AllVAddrsToDiscard.
insert(VAddr);
600 for (
size_t I = 0, NumFrames = DI.getNumberOfFrames();
I < NumFrames;
602 const auto &DIFrame = DI.getFrame(
I);
605 const Frame
F(
Guid, DIFrame.Line - DIFrame.StartLine, DIFrame.Column,
612 if (KeepSymbolName) {
613 StringRef CanonicalName =
615 DIFrame.FunctionName);
616 GuidToSymbolName.
insert({
Guid, CanonicalName.str()});
621 SymbolizedFrame[VAddr].push_back(Hash);
625 auto &CallStack =
Entry.getSecond();
629 if (CallStack.empty())
634 for (
const uint64_t Id : EntriesToErase) {
636 if(CallstackProfileData[Id].AccessHistogramSize > 0)
637 free((
void*) CallstackProfileData[Id].AccessHistogram);
638 CallstackProfileData.erase(Id);
641 if (StackMap.empty())
642 return make_error<InstrProfError>(
644 "no entries in callstack map after symbolization");
649std::vector<std::string>
661 while (Next < DataBuffer->getBufferEnd()) {
662 const auto *Header =
reinterpret_cast<const memprof::Header *
>(Next);
665 readSegmentEntries(Next + Header->SegmentOffset);
667 for (
const auto &Entry : Entries)
668 BuildIds.
insert(getBuildIdString(Entry));
670 Next += Header->TotalSize;
679RawMemProfReader::readMemInfoBlocks(
const char *
Ptr) {
680 if (MemprofRawVersion == 3ULL)
681 return readMemInfoBlocksV3(
Ptr);
682 if (MemprofRawVersion == 4ULL)
683 return readMemInfoBlocksV4(
Ptr);
685 "Panic: Unsupported version number when reading MemInfoBlocks");
688Error RawMemProfReader::readRawProfile(
689 std::unique_ptr<MemoryBuffer> DataBuffer) {
690 const char *Next = DataBuffer->getBufferStart();
692 while (Next < DataBuffer->getBufferEnd()) {
693 const auto *Header =
reinterpret_cast<const memprof::Header *
>(Next);
697 MemprofRawVersion = Header->Version;
702 readSegmentEntries(Next + Header->SegmentOffset);
703 if (!SegmentInfo.empty() && SegmentInfo != Entries) {
707 return make_error<InstrProfError>(
709 "memprof raw profile has different segment information");
711 SegmentInfo.assign(Entries.begin(), Entries.end());
716 for (
const auto &[Id, MIB] : readMemInfoBlocks(Next + Header->MIBOffset)) {
717 if (CallstackProfileData.count(Id)) {
719 if (MemprofRawVersion >= 4ULL &&
720 (CallstackProfileData[Id].AccessHistogramSize > 0 ||
721 MIB.AccessHistogramSize > 0)) {
722 uintptr_t ShorterHistogram;
723 if (CallstackProfileData[Id].AccessHistogramSize >
724 MIB.AccessHistogramSize)
725 ShorterHistogram = MIB.AccessHistogram;
727 ShorterHistogram = CallstackProfileData[Id].AccessHistogram;
728 CallstackProfileData[Id].Merge(MIB);
729 free((
void *)ShorterHistogram);
731 CallstackProfileData[
Id].Merge(MIB);
734 CallstackProfileData[
Id] = MIB;
740 const CallStackMap CSM = readStackInfo(Next + Header->StackOffset);
741 if (StackMap.empty()) {
744 if (mergeStackMap(CSM, StackMap))
745 return make_error<InstrProfError>(
747 "memprof raw profile got different call stack for same id");
750 Next += Header->TotalSize;
756object::SectionedAddress
757RawMemProfReader::getModuleOffset(
const uint64_t VirtualAddress) {
758 if (VirtualAddress > ProfiledTextSegmentStart &&
759 VirtualAddress <= ProfiledTextSegmentEnd) {
765 VirtualAddress + PreferredTextSegmentAddress - ProfiledTextSegmentStart;
766 return object::SectionedAddress{AdjustedAddress};
771 return object::SectionedAddress{VirtualAddress};
780 auto IdToFrameCallback = [
this](
const FrameId Id) {
782 if (!this->KeepSymbolName)
784 auto Iter = this->GuidToSymbolName.
find(
F.Function);
786 F.SymbolName = std::make_unique<std::string>(
Iter->getSecond());
BlockVerifier::State From
This file declares a library for handling Build IDs and using them to find debug info.
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file defines the DenseMap class.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static std::unique_ptr< DWARFContext > create(const object::ObjectFile &Obj, ProcessDebugRelocations RelocAction=ProcessDebugRelocations::Process, const LoadedObjectInfo *L=nullptr, std::string DWPName="", std::function< void(Error)> RecoverableErrorHandler=WithColor::defaultErrorHandler, std::function< void(Error)> WarningHandler=WithColor::defaultWarningHandler, bool ThreadSafe=false)
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
Tagged union holding either a T or a Error.
uint64_t GUID
Declare a type to represent a global unique identifier for a global value.
This class implements a map that also provides access to all stored values in a deterministic order.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
This interface provides simple read-only access to a block of memory, and provides simple methods for...
size_t getBufferSize() const
static ErrorOr< std::unique_ptr< MemoryBuffer > > getFileOrSTDIN(const Twine &Filename, bool IsText=false, bool RequiresNullTerminator=true, std::optional< Align > Alignment=std::nullopt)
Open the specified file as a MemoryBuffer, or open stdin if the Filename is "-".
const char * getBufferStart() const
A vector that has set insertion semantics.
Vector takeVector()
Clear the SetVector and return the underlying vector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
void reserve(size_type N)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
constexpr bool empty() const
empty - Check if the string is empty.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const Frame & idToFrame(const FrameId Id) const
virtual Error readNextRecord(GuidMemProfRecordPair &GuidRecord, std::function< const Frame(const FrameId)> Callback=nullptr)
llvm::DenseMap< FrameId, Frame > IdToFrame
llvm::MapVector< GlobalValue::GUID, IndexedMemProfRecord >::iterator Iter
llvm::MapVector< GlobalValue::GUID, IndexedMemProfRecord > FunctionProfileData
std::pair< GlobalValue::GUID, MemProfRecord > GuidMemProfRecordPair
llvm::DenseMap< CallStackId, llvm::SmallVector< FrameId > > CSIdToCallStack
void printYAML(raw_ostream &OS)
static Expected< std::unique_ptr< RawMemProfReader > > create(const Twine &Path, StringRef ProfiledBinary, bool KeepName=false)
static std::vector< std::string > peekBuildIds(MemoryBuffer *DataBuffer)
Error readNextRecord(GuidMemProfRecordPair &GuidRecord, std::function< const Frame(const FrameId)> Callback) override
static bool hasFormat(const MemoryBuffer &DataBuffer)
virtual ~RawMemProfReader() override
This class implements an extremely fast bulk output stream that can only output to a stream.
static StringRef getCanonicalFnName(const Function &F)
Return the canonical name for a function, taking into account suffix elision policy attributes.
static Expected< std::unique_ptr< SymbolizableObjectFile > > create(const object::ObjectFile *Obj, std::unique_ptr< DIContext > DICtx, bool UntagAddresses)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
CallStackId hashCallStack(ArrayRef< FrameId > CS)
llvm::DenseMap< uint64_t, llvm::SmallVector< uint64_t > > CallStackMap
void verifyFunctionProfileData(const llvm::MapVector< GlobalValue::GUID, IndexedMemProfRecord > &FunctionProfileData)
BuildIDRef getBuildID(const ObjectFile *Obj)
Returns the build ID, if any, contained in the given object file.
ArrayRef< uint8_t > BuildIDRef
A reference to a BuildID in binary form.
Expected< std::unique_ptr< Binary > > createBinary(MemoryBufferRef Source, LLVMContext *Context=nullptr, bool InitContent=true)
Create a Binary from Source, autodetecting the file type.
StringRef filename(StringRef path, Style style=Style::native)
Get filename.
This is an optimization pass for GlobalISel generic memory operations.
std::error_code inconvertibleErrorCode()
The value returned by this function can be returned from convertToErrorCode for Error values where no...
Error createStringError(std::error_code EC, char const *Fmt, const Ts &... Vals)
Create formatted StringError object.
Error joinErrors(Error E1, Error E2)
Concatenate errors.
FormattedNumber format_hex_no_prefix(uint64_t N, unsigned Width, bool Upper=false)
format_hex_no_prefix - Output N as a fixed width hexadecimal.
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
Error errorCodeToError(std::error_code EC)
Helper for converting an std::error_code to a Error.
Implement std::hash so that hash_code can be used in STL containers.
static constexpr const char *const BadString
GlobalValue::GUID Function
static GlobalValue::GUID getGUID(const StringRef FunctionName)