67 using namespace support;
72 for (
const auto &K :
P) {
73 FDOStream.
seek(K.Pos);
80 FDOStream.
seek(LastPos);
83 std::string &
Data = SOStream.
str();
84 for (
const auto &K :
P) {
85 for (
int I = 0, E = K.D.size();
I != E;
I++) {
87 endian::byte_swap<uint64_t, llvm::endianness::little>(K.D[
I]);
89 (
const char *)&Bytes,
sizeof(
uint64_t));
123 static std::pair<offset_type, offset_type>
125 using namespace support;
133 for (
const auto &ProfileData : *V) {
142 M += ValueProfData::getSize(ProfileData.second);
146 return std::make_pair(
N, M);
154 using namespace support;
157 for (
const auto &ProfileData : *V) {
164 LE.write<
uint64_t>(ProfileData.first);
174 std::unique_ptr<ValueProfData> VDataPtr =
175 ValueProfData::serializeFrom(ProfileData.second);
178 Out.
write((
const char *)VDataPtr.get(), S);
186 bool Sparse,
uint64_t TemporalProfTraceReservoirSize,
187 uint64_t MaxTemporalProfTraceLength,
bool WritePrevVersion,
189 bool MemprofGenerateRandomHotness,
190 unsigned MemprofGenerateRandomHotnessSeed)
191 : Sparse(Sparse), MaxTemporalProfTraceLength(MaxTemporalProfTraceLength),
192 TemporalProfTraceReservoirSize(TemporalProfTraceReservoirSize),
194 WritePrevVersion(WritePrevVersion),
195 MemProfVersionRequested(MemProfVersionRequested),
196 MemProfFullSchema(MemProfFullSchema),
197 MemprofGenerateRandomHotness(MemprofGenerateRandomHotness) {
199 if (MemprofGenerateRandomHotness) {
200 unsigned seed = MemprofGenerateRandomHotnessSeed
201 ? MemprofGenerateRandomHotnessSeed
202 : std::time(
nullptr);
203 errs() <<
"random hotness seed = " << seed <<
"\n";
216 this->Sparse = Sparse;
231 auto Hash =
Other.Hash;
232 Other.accumulateCounts(FuncLevelOverlap.
Test);
233 if (!FunctionData.contains(
Name)) {
241 auto &ProfileDataMap = FunctionData[
Name];
244 std::tie(Where, NewFunc) =
256 Dest.
overlap(
Other, Overlap, FuncLevelOverlap, ValueCutoff);
262 auto &ProfileDataMap = FunctionData[
Name];
266 std::tie(Where, NewFunc) =
271 Warn(make_error<InstrProfError>(E));
278 Dest.
scale(Weight, 1, MapWarn);
281 Dest.
merge(
I, Weight, MapWarn);
287void InstrProfWriter::addMemProfRecord(
293 if (MemprofGenerateRandomHotness) {
294 for (
auto &
Alloc : NewRecord.AllocSites) {
297 uint64_t NewTLAD = std::numeric_limits<uint64_t>::max();
299 bool IsCold = std::rand() % 2;
304 NewTL = std::numeric_limits<uint64_t>::max();
306 Alloc.Info.setTotalLifetimeAccessDensity(NewTLAD);
307 Alloc.Info.setTotalLifetime(NewTL);
316 Existing.
merge(NewRecord);
327 if (!Inserted && Iter->second != Frame) {
329 "frame to id mapping mismatch"));
335bool InstrProfWriter::addMemProfCallStack(
344 if (!Inserted && Iter->second != CallStack) {
346 "call stack to id mapping mismatch"));
363 if (MemProfData.
Frames.empty())
366 for (
const auto &[Id,
F] :
Incoming.Frames)
367 if (addMemProfFrame(Id,
F, Warn))
373 for (
const auto &[CSId, CS] :
Incoming.CallStacks)
374 if (addMemProfCallStack(CSId, CS, Warn))
378 if (MemProfData.
Records.empty() && !MemprofGenerateRandomHotness)
382 addMemProfRecord(GUID,
Record);
394 if (TemporalProfTraceStreamSize < TemporalProfTraceReservoirSize) {
396 TemporalProfTraces.push_back(std::move(
Trace));
399 std::uniform_int_distribution<uint64_t> Distribution(
400 0, TemporalProfTraceStreamSize);
401 uint64_t RandomIndex = Distribution(RNG);
402 if (RandomIndex < TemporalProfTraces.size())
403 TemporalProfTraces[RandomIndex] = std::move(
Trace);
405 ++TemporalProfTraceStreamSize;
410 for (
auto &
Trace : SrcTraces)
411 if (
Trace.FunctionNameRefs.
size() > MaxTemporalProfTraceLength)
412 Trace.FunctionNameRefs.resize(MaxTemporalProfTraceLength);
413 llvm::erase_if(SrcTraces, [](
auto &
T) {
return T.FunctionNameRefs.empty(); });
417 (TemporalProfTraceStreamSize > TemporalProfTraceReservoirSize);
418 bool IsSrcSampled = (SrcStreamSize > TemporalProfTraceReservoirSize);
419 if (!IsDestSampled && IsSrcSampled) {
421 std::swap(TemporalProfTraces, SrcTraces);
422 std::swap(TemporalProfTraceStreamSize, SrcStreamSize);
427 for (
auto &
Trace : SrcTraces)
428 addTemporalProfileTrace(std::move(
Trace));
435 std::uniform_int_distribution<uint64_t> Distribution(
436 0, TemporalProfTraceStreamSize);
437 uint64_t RandomIndex = Distribution(RNG);
438 if (RandomIndex < TemporalProfTraces.size())
439 IndicesToReplace.
insert(RandomIndex);
440 ++TemporalProfTraceStreamSize;
444 for (
const auto &[Index,
Trace] :
llvm::zip(IndicesToReplace, SrcTraces))
445 TemporalProfTraces[Index] = std::move(
Trace);
450 for (
auto &
I : IPW.FunctionData)
451 for (
auto &Func :
I.getValue())
452 addRecord(
I.getKey(), Func.first, std::move(Func.second), 1, Warn);
454 BinaryIds.reserve(BinaryIds.size() + IPW.BinaryIds.size());
455 for (
auto &
I : IPW.BinaryIds)
459 IPW.TemporalProfTraceStreamSize);
461 MemProfData.
Frames.reserve(IPW.MemProfData.Frames.size());
462 for (
auto &[FrameId, Frame] : IPW.MemProfData.Frames) {
465 if (!addMemProfFrame(FrameId, Frame, Warn))
469 MemProfData.
CallStacks.reserve(IPW.MemProfData.CallStacks.size());
470 for (
auto &[CSId, CallStack] : IPW.MemProfData.CallStacks) {
471 if (!addMemProfCallStack(CSId, CallStack, Warn))
475 MemProfData.
Records.reserve(IPW.MemProfData.Records.size());
476 for (
auto &[GUID,
Record] : IPW.MemProfData.Records) {
477 addMemProfRecord(GUID,
Record);
481bool InstrProfWriter::shouldEncodeData(
const ProfilingData &PD) {
484 for (
const auto &Func : PD) {
496 using namespace IndexedInstrProf;
507 for (
unsigned I = 0;
I < Res.size();
I++)
515 for (
const auto Id : Schema)
526 *MemProfCallStackIndexes =
nullptr) {
528 MemProfCallStackIndexes);
530 RecordTableGenerator;
531 for (
auto &[GUID,
Record] : MemProfRecordData) {
533 RecordTableGenerator.insert(GUID,
Record, RecordWriter);
536 MemProfRecordData.
clear();
541 return RecordTableGenerator.
Emit(
OS.OS, RecordWriter);
550 for (
auto &[FrameId, Frame] : MemProfFrameData) {
552 FrameTableGenerator.insert(FrameId, Frame);
555 MemProfFrameData.
clear();
557 return FrameTableGenerator.
Emit(
OS.OS);
579 std::vector<std::pair<memprof::FrameId, const memprof::Frame *>> FrameIdOrder;
581 for (
const auto &[Id, Frame] : MemProfFrameData)
582 FrameIdOrder.emplace_back(Id, &Frame);
583 assert(MemProfFrameData.
size() == FrameIdOrder.size());
585 [&](
const std::pair<memprof::FrameId, const memprof::Frame *> &L,
586 const std::pair<memprof::FrameId, const memprof::Frame *> &R) {
587 const auto &SL = FrameHistogram[L.first];
588 const auto &SR = FrameHistogram[R.first];
590 if (SL.Count != SR.Count)
591 return SL.Count > SR.Count;
594 if (SL.PositionSum != SR.PositionSum)
595 return SL.PositionSum < SR.PositionSum;
597 return L.first < R.first;
602 MemProfFrameIndexes.
reserve(FrameIdOrder.size());
603 for (
const auto &[Id,
F] : FrameIdOrder) {
605 MemProfFrameIndexes.
insert({Id, Index});
612 MemProfFrameData.
clear();
614 return MemProfFrameIndexes;
620 &MemProfCallStackData) {
622 CallStackTableGenerator;
623 for (
auto &[CSId, CallStack] : MemProfCallStackData)
624 CallStackTableGenerator.insert(CSId, CallStack);
626 MemProfCallStackData.clear();
628 return CallStackTableGenerator.
Emit(
OS.OS);
635 &MemProfCallStackData,
637 &MemProfFrameIndexes,
639 unsigned &NumElements) {
641 MemProfCallStackIndexes;
644 Builder.
build(std::move(MemProfCallStackData), &MemProfFrameIndexes,
652 MemProfCallStackData.clear();
654 return MemProfCallStackIndexes;
674 bool MemProfFullSchema) {
684 if (MemProfFullSchema)
699 RecordTableOffset, FramePayloadOffset, FrameTableOffset,
700 CallStackPayloadOffset, CallStackTableOffset,
702 OS.patch({{HeaderUpdatePos, Header}});
722 bool MemProfFullSchema) {
730 if (MemProfFullSchema)
743 unsigned NumElements = 0;
745 MemProfCallStackIndexes =
747 MemProfFrameIndexes, FrameHistogram,
753 &MemProfCallStackIndexes);
758 assert(CallStackPayloadOffset +
760 RecordPayloadOffset);
763 CallStackPayloadOffset,
767 OS.patch({{HeaderUpdatePos, Header}});
776 bool MemProfFullSchema) {
777 switch (MemProfVersionRequested) {
784 return make_error<InstrProfError>(
786 formatv(
"MemProf version {} not supported; "
787 "requires version between {} and {}, inclusive",
793 const bool WritePrevVersion,
796 for (
int I = 0;
I < 4;
I++)
800 auto BackPatchStartOffset =
OS.
tell();
807 if (!WritePrevVersion)
810 return BackPatchStartOffset;
825 BinaryIds.erase(
llvm::unique(BinaryIds), BinaryIds.end());
827 for (
const auto &BI : BinaryIds) {
829 BinaryIdsSectionSize +=
sizeof(
uint64_t);
834 OS.
write(BinaryIdsSectionSize);
836 for (
const auto &BI : BinaryIds) {
841 for (
unsigned K = 0;
K < BILen;
K++)
845 for (
unsigned K = 0;
K < PaddingSize;
K++)
853 std::vector<std::string> VTableNameStrs;
855 VTableNameStrs.push_back(VTableName.str());
857 std::string CompressedVTableNames;
858 if (!VTableNameStrs.empty())
861 CompressedVTableNames))
864 const uint64_t CompressedStringLen = CompressedVTableNames.length();
870 for (
auto &c : CompressedVTableNames)
877 for (
uint64_t K = CompressedStringLen;
K < PaddedLength;
K++)
884 using namespace IndexedInstrProf;
885 using namespace support;
896 for (
const auto &
I : FunctionData)
897 if (shouldEncodeData(
I.getValue()))
900 for (
const auto &
I : OrderedData)
901 Generator.insert(
I.first,
I.second);
905 Header.Version = WritePrevVersion
913 Header.Version |= VARIANT_MASK_IR_PROF;
915 Header.Version |= VARIANT_MASK_CSIR_PROF;
916 if (
static_cast<bool>(ProfileKind &
918 Header.Version |= VARIANT_MASK_INSTR_ENTRY;
919 if (
static_cast<bool>(ProfileKind &
921 Header.Version |= VARIANT_MASK_INSTR_LOOP_ENTRIES;
923 Header.Version |= VARIANT_MASK_BYTE_COVERAGE;
925 Header.Version |= VARIANT_MASK_FUNCTION_ENTRY_ONLY;
927 Header.Version |= VARIANT_MASK_MEMPROF;
929 Header.Version |= VARIANT_MASK_TEMPORAL_PROF;
931 const uint64_t BackPatchStartOffset =
932 writeHeader(Header, WritePrevVersion,
OS);
936 uint32_t SummarySize = Summary::getSize(Summary::NumKinds, NumEntries);
939 for (
unsigned I = 0;
I < SummarySize /
sizeof(
uint64_t);
I++)
944 CSSummaryOffset =
OS.
tell();
945 CSSummarySize = SummarySize /
sizeof(
uint64_t);
946 for (
unsigned I = 0;
I < CSSummarySize;
I++)
956 MemProfSectionStart =
OS.
tell();
957 if (
auto E =
writeMemProf(
OS, MemProfData, MemProfVersionRequested,
963 if (
auto E = writeBinaryIds(
OS))
968 if (!WritePrevVersion)
969 if (
Error E = writeVTableNames(
OS))
972 uint64_t TemporalProfTracesSectionStart = 0;
974 TemporalProfTracesSectionStart =
OS.
tell();
975 OS.
write(TemporalProfTraces.size());
976 OS.
write(TemporalProfTraceStreamSize);
977 for (
auto &
Trace : TemporalProfTraces) {
980 for (
auto &NameRef :
Trace.FunctionNameRefs)
986 std::unique_ptr<IndexedInstrProf::Summary> TheSummary =
990 std::unique_ptr<ProfileSummary> PS = ISB.getSummary();
995 std::unique_ptr<IndexedInstrProf::Summary> TheCSSummary =
nullptr;
998 std::unique_ptr<ProfileSummary> CSPS = CSISB.getSummary();
1004 BinaryIdSectionStart,
1005 TemporalProfTracesSectionStart};
1006 if (!WritePrevVersion)
1007 HeaderOffsets.
push_back(VTableNamesSectionStart);
1011 {BackPatchStartOffset, HeaderOffsets},
1020 OS.patch(PatchItems);
1022 for (
const auto &
I : FunctionData)
1023 for (
const auto &
F :
I.getValue())
1033 return writeImpl(POS);
1038 return writeImpl(POS);
1052#define VALUE_PROF_KIND(Enumerator, Value, Descr) #Enumerator,
1057 for (
uint32_t VK = 0; VK <= IPVK_Last; VK++) {
1058 if (VK == IPVK_IndirectCallTarget || VK == IPVK_VTableTarget)
1060 uint32_t NS = Func.getNumValueSites(VK);
1061 for (
uint32_t S = 0; S < NS; S++) {
1063 for (
const auto &V : Func.getValueArrayForSite(VK, S))
1064 if (!SeenValues.
insert(V.Value).second)
1077 OS <<
"# Func Hash:\n" << Hash <<
"\n";
1078 OS <<
"# Num Counters:\n" << Func.Counts.size() <<
"\n";
1079 OS <<
"# Counter Values:\n";
1081 OS << Count <<
"\n";
1083 if (Func.BitmapBytes.size() > 0) {
1084 OS <<
"# Num Bitmap Bytes:\n$" << Func.BitmapBytes.size() <<
"\n";
1085 OS <<
"# Bitmap Byte Values:\n";
1086 for (
uint8_t Byte : Func.BitmapBytes) {
1094 uint32_t NumValueKinds = Func.getNumValueKinds();
1095 if (!NumValueKinds) {
1100 OS <<
"# Num Value Kinds:\n" << Func.getNumValueKinds() <<
"\n";
1101 for (
uint32_t VK = 0; VK < IPVK_Last + 1; VK++) {
1102 uint32_t NS = Func.getNumValueSites(VK);
1106 OS <<
"# NumValueSites:\n" << NS <<
"\n";
1107 for (
uint32_t S = 0; S < NS; S++) {
1108 auto VD = Func.getValueArrayForSite(VK, S);
1109 OS << VD.size() <<
"\n";
1110 for (
const auto &V : VD) {
1111 if (VK == IPVK_IndirectCallTarget || VK == IPVK_VTableTarget)
1115 OS << V.Value <<
":" << V.Count <<
"\n";
1126 OS <<
"# CSIR level Instrumentation Flag\n:csir\n";
1128 OS <<
"# IR level Instrumentation Flag\n:ir\n";
1130 if (
static_cast<bool>(ProfileKind &
1132 OS <<
"# Always instrument the function entry block\n:entry_first\n";
1133 if (
static_cast<bool>(ProfileKind &
1135 OS <<
"# Always instrument the loop entry "
1136 "blocks\n:instrument_loop_entries\n";
1138 OS <<
"# Instrument block coverage\n:single_byte_coverage\n";
1142 using RecordType = std::pair<StringRef, FuncPair>;
1145 for (
const auto &
I : FunctionData) {
1146 if (shouldEncodeData(
I.getValue())) {
1149 for (
const auto &Func :
I.getValue())
1150 OrderedFuncData.
push_back(std::make_pair(
I.getKey(), Func));
1154 for (
const auto &VTableName : VTableNames)
1162 return std::tie(
A.first,
A.second.first) <
1163 std::tie(
B.first,
B.second.first);
1166 for (
const auto &record : OrderedFuncData) {
1168 const FuncPair &Func = record.second;
1172 for (
const auto &record : OrderedFuncData) {
1173 const FuncPair &Func = record.second;
1183 OS <<
":temporal_prof_traces\n";
1184 OS <<
"# Num Temporal Profile Traces:\n" << TemporalProfTraces.
size() <<
"\n";
1185 OS <<
"# Temporal Profile Trace Stream Size:\n"
1186 << TemporalProfTraceStreamSize <<
"\n";
1187 for (
auto &
Trace : TemporalProfTraces) {
1188 OS <<
"# Weight:\n" <<
Trace.Weight <<
"\n";
1189 for (
auto &NameRef :
Trace.FunctionNameRefs)
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static uint64_t writeMemProfRecords(ProfOStream &OS, llvm::MapVector< GlobalValue::GUID, memprof::IndexedMemProfRecord > &MemProfRecordData, memprof::MemProfSchema *Schema, memprof::IndexedVersion Version, llvm::DenseMap< memprof::CallStackId, memprof::LinearCallStackId > *MemProfCallStackIndexes=nullptr)
static Error writeMemProf(ProfOStream &OS, memprof::IndexedMemProfData &MemProfData, memprof::IndexedVersion MemProfVersionRequested, bool MemProfFullSchema)
static uint64_t writeMemProfCallStacks(ProfOStream &OS, llvm::MapVector< memprof::CallStackId, llvm::SmallVector< memprof::FrameId > > &MemProfCallStackData)
static Error writeMemProfV3(ProfOStream &OS, memprof::IndexedMemProfData &MemProfData, bool MemProfFullSchema)
static uint64_t writeMemProfFrames(ProfOStream &OS, llvm::MapVector< memprof::FrameId, memprof::Frame > &MemProfFrameData)
static llvm::DenseMap< memprof::CallStackId, memprof::LinearCallStackId > writeMemProfCallStackArray(ProfOStream &OS, llvm::MapVector< memprof::CallStackId, llvm::SmallVector< memprof::FrameId > > &MemProfCallStackData, llvm::DenseMap< memprof::FrameId, memprof::LinearFrameId > &MemProfFrameIndexes, llvm::DenseMap< memprof::FrameId, memprof::FrameStat > &FrameHistogram, unsigned &NumElements)
static Error writeMemProfV2(ProfOStream &OS, memprof::IndexedMemProfData &MemProfData, bool MemProfFullSchema)
static void setSummary(IndexedInstrProf::Summary *TheSummary, ProfileSummary &PS)
static const char * ValueProfKindStr[]
static llvm::DenseMap< memprof::FrameId, memprof::LinearFrameId > writeMemProfFrameArray(ProfOStream &OS, llvm::MapVector< memprof::FrameId, memprof::Frame > &MemProfFrameData, llvm::DenseMap< memprof::FrameId, memprof::FrameStat > &FrameHistogram)
static void writeMemProfSchema(ProfOStream &OS, const memprof::MemProfSchema &Schema)
Defines facilities for reading and writing on-disk hash tables.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT > iterator
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again.
Implements a dense probed hash-table based set.
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
static std::pair< offset_type, offset_type > EmitKeyDataLength(raw_ostream &Out, key_type_ref K, data_type_ref V)
const InstrProfWriter::ProfilingData *const data_type_ref
InstrProfSummaryBuilder * SummaryBuilder
llvm::endianness ValueProfDataEndianness
static hash_value_type ComputeHash(key_type_ref K)
InstrProfSummaryBuilder * CSSummaryBuilder
InstrProfRecordWriterTrait()=default
void EmitKey(raw_ostream &Out, key_type_ref K, offset_type N)
void EmitData(raw_ostream &Out, key_type_ref, data_type_ref V, offset_type)
const InstrProfWriter::ProfilingData *const data_type
void addRecord(const InstrProfRecord &)
A symbol table used for function [IR]PGO name look-up with keys (such as pointers,...
StringRef getFuncOrVarName(uint64_t ValMD5Hash)
Return name of functions or global variables from the name's md5 hash value.
Error addVTableName(StringRef VTableName)
Adds VTableName as a known symbol, and inserts it to a map that tracks all vtable names.
Error addFuncName(StringRef FuncName)
The method name is kept since there are many callers.
StringRef getFuncOrVarNameIfDefined(uint64_t ValMD5Hash)
Just like getFuncOrVarName, except that it will return literal string 'External Symbol' if the functi...
InstrProfWriter(bool Sparse=false, uint64_t TemporalProfTraceReservoirSize=0, uint64_t MaxTemporalProfTraceLength=0, bool WritePrevVersion=false, memprof::IndexedVersion MemProfVersionRequested=static_cast< memprof::IndexedVersion >(memprof::MinimumSupportedVersion), bool MemProfFullSchema=false, bool MemprofGenerateRandomHotness=false, unsigned MemprofGenerateRandomHotnessSeed=0)
Error write(raw_fd_ostream &OS)
Write the profile to OS.
void addTemporalProfileTraces(SmallVectorImpl< TemporalProfTraceTy > &SrcTraces, uint64_t SrcStreamSize)
Add SrcTraces using reservoir sampling where SrcStreamSize is the total number of temporal profiling ...
void overlapRecord(NamedInstrProfRecord &&Other, OverlapStats &Overlap, OverlapStats &FuncLevelOverlap, const OverlapFuncFilters &FuncFilter)
Error writeText(raw_fd_ostream &OS)
Write the profile in text format to OS.
void addBinaryIds(ArrayRef< llvm::object::BuildID > BIs)
static void writeRecordInText(StringRef Name, uint64_t Hash, const InstrProfRecord &Counters, InstrProfSymtab &Symtab, raw_fd_ostream &OS)
Write Record in text format to OS.
void setValueProfDataEndianness(llvm::endianness Endianness)
void addRecord(NamedInstrProfRecord &&I, uint64_t Weight, function_ref< void(Error)> Warn)
Add function counts for the given function.
void mergeRecordsFromWriter(InstrProfWriter &&IPW, function_ref< void(Error)> Warn)
Merge existing function counts from the given writer.
void writeTextTemporalProfTraceData(raw_fd_ostream &OS, InstrProfSymtab &Symtab)
Write temporal profile trace data to the header in text format to OS.
std::unique_ptr< MemoryBuffer > writeBuffer()
Write the profile, returning the raw data. For testing.
void setOutputSparse(bool Sparse)
bool addMemProfData(memprof::IndexedMemProfData Incoming, function_ref< void(Error)> Warn)
Add the entire MemProfData Incoming to the writer context.
Error validateRecord(const InstrProfRecord &Func)
This class implements a map that also provides access to all stored values in a deterministic order.
static std::unique_ptr< MemoryBuffer > getMemBufferCopy(StringRef InputData, const Twine &BufferName="")
Open the specified memory range as a MemoryBuffer, copying the contents and taking ownership of it.
Generates an on disk hash table.
offset_type Emit(raw_ostream &Out)
Emit the table to Out, which must not be at offset 0.
void writeByte(uint8_t V)
void patch(ArrayRef< PatchItem > P)
ProfOStream(raw_string_ostream &STR)
support::endian::Writer LE
ProfOStream(raw_fd_ostream &FD)
static const ArrayRef< uint32_t > DefaultCutoffs
A vector of useful cutoff values for detailed summary.
uint64_t getTotalCount() const
uint64_t getMaxCount() const
const SummaryEntryVector & getDetailedSummary()
uint32_t getNumCounts() const
uint64_t getMaxInternalCount() const
uint64_t getMaxFunctionCount() const
uint32_t getNumFunctions() const
bool insert(const value_type &X)
Insert a new element into the SetVector.
A SetVector that performs no allocations if smaller than a certain size.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
iterator_range< StringMapKeyIterator< ValueTy > > keys() const
StringRef - Represent a constant reference to a string, i.e.
std::pair< iterator, bool > insert(const ValueT &V)
An efficient, type-erasing, non-owning reference to a callable.
void build(llvm::MapVector< CallStackId, llvm::SmallVector< FrameIdTy > > &&MemProfCallStackData, const llvm::DenseMap< FrameIdTy, LinearFrameId > *MemProfFrameIndexes, llvm::DenseMap< FrameIdTy, FrameStat > &FrameHistogram)
ArrayRef< LinearFrameId > getRadixArray() const
llvm::DenseMap< CallStackId, LinearCallStackId > takeCallStackPos()
A raw_ostream that writes to a file descriptor.
uint64_t seek(uint64_t off)
Flushes the stream and repositions the underlying file descriptor position to the offset specified fr...
This class implements an extremely fast bulk output stream that can only output to a stream.
uint64_t tell() const
tell - Return the current offset with the file.
raw_ostream & write_hex(unsigned long long N)
Output N in hexadecimal, without any prefix or padding.
raw_ostream & write(unsigned char C)
A raw_ostream that writes to an std::string.
std::string & str()
Returns the string's reference.
std::unique_ptr< Summary > allocSummary(uint32_t TotalSize)
uint64_t ComputeHash(StringRef K)
constexpr uint64_t MaximumSupportedVersion
MemProfSchema getHotColdSchema()
constexpr uint64_t MinimumSupportedVersion
MemProfSchema getFullSchema()
llvm::DenseMap< FrameIdTy, FrameStat > computeFrameHistogram(llvm::MapVector< CallStackId, llvm::SmallVector< FrameIdTy > > &MemProfCallStackData)
This is an optimization pass for GlobalISel generic memory operations.
detail::zippy< detail::zip_shortest, T, U, Args... > zip(T &&t, U &&u, Args &&...args)
zip iterator for two or more iteratable types.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
void shuffle(Iterator first, Iterator last, RNG &&g)
auto unique(Range &&R, Predicate P)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto formatv(bool Validate, const char *Fmt, Ts &&...Vals)
void sort(IteratorTy Start, IteratorTy End)
constexpr T alignToPowerOf2(U Value, V Align)
Will overflow only if result is not representable in T.
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
Error collectGlobalObjectNameStrings(ArrayRef< std::string > NameStrs, bool doCompression, std::string &Result)
Given a vector of strings (names of global objects like functions or, virtual tables) NameStrs,...
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
@ LoopEntriesInstrumentation
@ FunctionEntryInstrumentation
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
Helper object to track which of three possible relocation mechanisms are used for a particular value ...
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
void set(SummaryFieldKind K, uint64_t V)
void setEntry(uint32_t I, const ProfileSummaryEntry &E)
uint64_t NumSummaryFields
uint64_t NumCutoffEntries
Profiling information for a single function.
std::vector< uint64_t > Counts
void merge(InstrProfRecord &Other, uint64_t Weight, function_ref< void(instrprof_error)> Warn)
Merge the counts in Other into this one.
void overlap(InstrProfRecord &Other, OverlapStats &Overlap, OverlapStats &FuncLevelOverlap, uint64_t ValueCutoff)
Compute the overlap b/w this IntrprofRecord and Other.
void sortValueData()
Sort value profile data (per site) by count.
std::vector< uint8_t > BitmapBytes
void scale(uint64_t N, uint64_t D, function_ref< void(instrprof_error)> Warn)
Scale up profile counts (including value profile data) by a factor of (N / D).
static bool hasCSFlagInHash(uint64_t FuncHash)
const std::string NameFilter
void addOneMismatch(const CountSumOrPercent &MismatchFunc)
CountSumOrPercent Overlap
void addOneUnique(const CountSumOrPercent &UniqueFunc)
An ordered list of functions identified by their NameRef found in INSTR_PROF_DATA.
Function object to check whether the first component of a container supported by std::get (like std::...
llvm::MapVector< CallStackId, llvm::SmallVector< FrameId > > CallStacks
llvm::MapVector< GlobalValue::GUID, IndexedMemProfRecord > Records
llvm::MapVector< FrameId, Frame > Frames
void merge(const IndexedMemProfRecord &Other)
Adapter to write values to a stream in a particular byte order.
void write(ArrayRef< value_type > Val)