30#include <unordered_map>
33#define DEBUG_TYPE "debug-ata"
35STATISTIC(NumDefsScanned,
"Number of dbg locs that get scanned for removal");
36STATISTIC(NumDefsRemoved,
"Number of dbg locs removed");
37STATISTIC(NumWedgesScanned,
"Number of dbg wedges scanned");
38STATISTIC(NumWedgesChanged,
"Number of dbg wedges changed");
42 cl::desc(
"Maximum num basic blocks before debug info dropped"),
63 return static_cast<VariableID>(Wrapped::getEmptyKey());
66 return static_cast<VariableID>(Wrapped::getTombstoneKey());
69 return Wrapped::getHashValue(
static_cast<unsigned>(Val));
84 std::unordered_map<const Instruction *, SmallVector<VarLocInfo>>
99 return Variables[
static_cast<unsigned>(
ID)];
105 auto R = VarLocsBeforeInst.find(Before);
106 if (R == VarLocsBeforeInst.end())
113 VarLocsBeforeInst[Before] = std::move(Wedge);
135 VarLocsBeforeInst[Before].emplace_back(VarLoc);
142 unsigned Counter = -1;
143 OS <<
"=== Variables ===\n";
150 OS <<
"[" << Counter <<
"] " << V.getVariable()->getName();
151 if (
auto F = V.getFragment())
152 OS <<
" bits [" <<
F->OffsetInBits <<
", "
153 <<
F->OffsetInBits +
F->SizeInBits <<
")";
154 if (
const auto *IA = V.getInlinedAt())
155 OS <<
" inlined-at " << *IA;
160 OS <<
"DEF Var=[" << (
unsigned)Loc.VariableID <<
"]"
161 <<
" Expr=" << *Loc.Expr <<
" Values=(";
162 for (
auto *
Op : Loc.Values.location_ops()) {
163 errs() <<
Op->getName() <<
" ";
169 OS <<
"=== Single location vars ===\n";
176 OS <<
"=== In-line variable defs ===";
178 OS <<
"\n" << BB.getName() <<
":\n";
190 for (
const auto &VarLoc :
Builder.SingleLocVars)
193 SingleVarLocEnd = VarLocRecords.
size();
197 for (
auto &
P :
Builder.VarLocsBeforeInst) {
198 unsigned BlockStart = VarLocRecords.
size();
201 unsigned BlockEnd = VarLocRecords.
size();
203 if (BlockEnd != BlockStart)
204 VarLocsBeforeInst[
P.first] = {BlockStart, BlockEnd};
208 assert(Variables.empty() &&
"Expect clear before init");
211 Variables.reserve(
Builder.Variables.size() + 1);
212 Variables.push_back(
DebugVariable(
nullptr, std::nullopt,
nullptr));
213 Variables.append(
Builder.Variables.begin(),
Builder.Variables.end());
218 VarLocRecords.
clear();
219 VarLocsBeforeInst.clear();
227static std::pair<Value *, DIExpression *>
230 APInt OffsetInBytes(
DL.getTypeSizeInBits(Start->getType()),
false);
232 Start->stripAndAccumulateInBoundsConstantOffsets(
DL, OffsetInBytes);
235 Ops = {dwarf::DW_OP_plus_uconst, OffsetInBytes.
getZExtValue()};
246static std::optional<int64_t>
251 unsigned ExpectedDerefIdx = 0;
253 if (NumElements > 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
255 ExpectedDerefIdx = 2;
256 }
else if (NumElements > 3 && Elements[0] == dwarf::DW_OP_constu) {
257 ExpectedDerefIdx = 3;
258 if (Elements[2] == dwarf::DW_OP_plus)
260 else if (Elements[2] == dwarf::DW_OP_minus)
267 if (ExpectedDerefIdx >= NumElements)
272 if (Elements[ExpectedDerefIdx] != dwarf::DW_OP_deref)
276 if (NumElements == ExpectedDerefIdx + 1)
278 unsigned ExpectedFragFirstIdx = ExpectedDerefIdx + 1;
279 unsigned ExpectedFragFinalIdx = ExpectedFragFirstIdx + 2;
280 if (NumElements == ExpectedFragFinalIdx + 1 &&
306 Triple(
F.getParent()->getTargetTriple()));
334class MemLocFragmentFill {
338 bool CoalesceAdjacentFragments;
345 OffsetInBitsTy, BaseAddress,
348 FragsInMemMap::Allocator IntervalMapAlloc;
361 unsigned OffsetInBits;
374 static bool intervalMapsAreEqual(
const FragsInMemMap &
A,
375 const FragsInMemMap &
B) {
376 auto AIt =
A.begin(), AEnd =
A.end();
377 auto BIt =
B.begin(), BEnd =
B.end();
378 for (; AIt != AEnd; ++AIt, ++BIt) {
381 if (AIt.start() != BIt.start() || AIt.stop() != BIt.stop())
390 static bool varFragMapsAreEqual(
const VarFragMap &
A,
const VarFragMap &
B) {
391 if (
A.size() !=
B.size())
393 for (
const auto &APair :
A) {
394 auto BIt =
B.find(APair.first);
397 if (!intervalMapsAreEqual(APair.second, BIt->second))
404 std::string
toString(
unsigned BaseID) {
406 return Bases[BaseID].getVariableLocationOp(0)->getName().str();
412 std::string
toString(FragsInMemMap::const_iterator It,
bool Newline =
true) {
414 std::stringstream S(
String);
416 S <<
"[" << It.start() <<
", " << It.stop()
419 S <<
"invalid iterator (end)";
426 FragsInMemMap meetFragments(
const FragsInMemMap &
A,
const FragsInMemMap &
B) {
427 FragsInMemMap
Result(IntervalMapAlloc);
428 for (
auto AIt =
A.begin(), AEnd =
A.end(); AIt != AEnd; ++AIt) {
435 if (!
B.overlaps(AIt.start(), AIt.stop()))
439 auto FirstOverlap =
B.find(AIt.start());
440 assert(FirstOverlap !=
B.end());
441 bool IntersectStart = FirstOverlap.start() < AIt.start();
443 <<
", IntersectStart: " << IntersectStart <<
"\n");
446 auto LastOverlap =
B.find(AIt.stop());
448 LastOverlap !=
B.end() && LastOverlap.start() < AIt.stop();
450 <<
", IntersectEnd: " << IntersectEnd <<
"\n");
453 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
461 if (*AIt && *AIt == *FirstOverlap)
462 Result.insert(AIt.start(), AIt.stop(), *AIt);
470 auto Next = FirstOverlap;
471 if (IntersectStart) {
474 if (*AIt && *AIt == *FirstOverlap)
475 Result.insert(AIt.start(), FirstOverlap.stop(), *AIt);
485 if (*AIt && *AIt == *LastOverlap)
486 Result.insert(LastOverlap.start(), AIt.stop(), *AIt);
495 while (Next !=
B.end() && Next.start() < AIt.stop() &&
496 Next.stop() <= AIt.stop()) {
498 <<
"- insert intersection of a and " <<
toString(Next));
499 if (*AIt && *AIt == *Next)
500 Result.insert(Next.start(), Next.stop(), *Next);
509 void meetVars(VarFragMap &
A,
const VarFragMap &
B) {
513 for (
auto It =
A.begin(),
End =
A.end(); It !=
End; ++It) {
514 unsigned AVar = It->first;
515 FragsInMemMap &AFrags = It->second;
516 auto BIt =
B.find(AVar);
517 if (BIt ==
B.end()) {
522 <<
Aggregates[AVar].first->getName() <<
"\n");
523 AFrags = meetFragments(AFrags, BIt->second);
533 bool FirstMeet =
true;
541 if (!Visited.
count(Pred))
544 auto PredLiveOut = LiveOut.
find(Pred);
549 BBLiveIn = PredLiveOut->second;
554 meetVars(BBLiveIn, PredLiveOut->second);
560 if (BBLiveIn.size() == 0)
564 auto CurrentLiveInEntry = LiveIn.
find(&BB);
566 if (CurrentLiveInEntry == LiveIn.
end()) {
569 LiveIn[&BB] = std::move(BBLiveIn);
575 if (!varFragMapsAreEqual(BBLiveIn, CurrentLiveInEntry->second)) {
577 CurrentLiveInEntry->second = std::move(BBLiveIn);
586 unsigned StartBit,
unsigned EndBit,
unsigned Base,
588 assert(StartBit < EndBit &&
"Cannot create fragment of size <= 0");
593 Loc.OffsetInBits = StartBit;
594 Loc.SizeInBits = EndBit - StartBit;
595 assert(
Base &&
"Expected a non-zero ID for Base address");
598 BBInsertBeforeMap[&BB][&Before].push_back(Loc);
600 <<
" bits [" << StartBit <<
", " << EndBit <<
")\n");
608 unsigned StartBit,
unsigned EndBit,
unsigned Base,
610 if (!CoalesceAdjacentFragments)
617 auto CoalescedFrag = FragMap.find(StartBit);
619 if (CoalescedFrag.start() == StartBit && CoalescedFrag.stop() == EndBit)
622 LLVM_DEBUG(
dbgs() <<
"- Insert loc for bits " << CoalescedFrag.start()
623 <<
" to " << CoalescedFrag.stop() <<
"\n");
624 insertMemLoc(BB, Before, Var, CoalescedFrag.start(), CoalescedFrag.stop(),
629 VarFragMap &LiveSet) {
646 StartBit = Frag->OffsetInBits;
647 EndBit = StartBit + Frag->SizeInBits;
662 const unsigned Base =
663 DerefOffsetInBytes && *DerefOffsetInBytes * 8 == StartBit
667 << StartBit <<
", " << EndBit <<
"): " <<
toString(
Base)
674 auto FragIt = LiveSet.find(Var);
677 if (FragIt == LiveSet.end()) {
679 auto P = LiveSet.try_emplace(Var, FragsInMemMap(IntervalMapAlloc));
680 assert(
P.second &&
"Var already in map?");
682 P.first->second.insert(StartBit, EndBit,
Base);
687 FragsInMemMap &FragMap = FragIt->second;
690 if (!FragMap.overlaps(StartBit, EndBit)) {
692 FragMap.insert(StartBit, EndBit,
Base);
693 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
700 auto FirstOverlap = FragMap.find(StartBit);
701 assert(FirstOverlap != FragMap.end());
702 bool IntersectStart = FirstOverlap.start() < StartBit;
705 auto LastOverlap = FragMap.find(EndBit);
706 bool IntersectEnd = LastOverlap.valid() && LastOverlap.start() < EndBit;
709 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
710 LLVM_DEBUG(
dbgs() <<
"- Intersect single interval @ both ends\n");
718 auto EndBitOfOverlap = FirstOverlap.stop();
719 unsigned OverlapValue = FirstOverlap.value();
722 FirstOverlap.setStop(StartBit);
723 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
724 OverlapValue, VarLoc.
DL);
727 FragMap.insert(EndBit, EndBitOfOverlap, OverlapValue);
728 insertMemLoc(BB, Before, Var, EndBit, EndBitOfOverlap, OverlapValue,
732 FragMap.insert(StartBit, EndBit,
Base);
742 if (IntersectStart) {
745 FirstOverlap.setStop(StartBit);
746 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
747 *FirstOverlap, VarLoc.
DL);
756 LastOverlap.setStart(EndBit);
757 insertMemLoc(BB, Before, Var, EndBit, LastOverlap.stop(), *LastOverlap,
773 auto It = FirstOverlap;
776 while (It.valid() && It.start() >= StartBit && It.stop() <= EndBit) {
781 assert(!FragMap.overlaps(StartBit, EndBit));
783 FragMap.insert(StartBit, EndBit,
Base);
786 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
792 void process(
BasicBlock &BB, VarFragMap &LiveSet) {
793 BBInsertBeforeMap[&BB].
clear();
795 if (
const auto *Locs = FnVarLocs->
getWedge(&
I)) {
797 addDef(Loc,
I, *
I.getParent(), LiveSet);
806 bool CoalesceAdjacentFragments)
807 : Fn(Fn), VarsWithStackSlot(VarsWithStackSlot),
808 CoalesceAdjacentFragments(CoalesceAdjacentFragments) {}
834 this->FnVarLocs = FnVarLocs;
839 std::priority_queue<unsigned int, std::vector<unsigned int>,
840 std::greater<unsigned int>>
842 std::priority_queue<unsigned int, std::vector<unsigned int>,
843 std::greater<unsigned int>>
848 unsigned int RPONumber = 0;
849 for (
auto RI = RPOT.begin(), RE = RPOT.end(); RI != RE; ++RI) {
850 OrderToBB[RPONumber] = *RI;
851 BBToOrder[*RI] = RPONumber;
852 Worklist.push(RPONumber);
855 LiveIn.
init(RPONumber);
856 LiveOut.
init(RPONumber);
869 while (!Worklist.empty() || !Pending.empty()) {
875 while (!Worklist.empty()) {
879 bool InChanged = meet(*BB, Visited);
881 InChanged |= Visited.
insert(BB).second;
884 << BB->
getName() <<
" has new InLocs, process it\n");
888 VarFragMap LiveSet = LiveIn[BB];
891 process(*BB, LiveSet);
894 if (!varFragMapsAreEqual(LiveOut[BB], LiveSet)) {
896 <<
" has new OutLocs, add succs to worklist: [ ");
897 LiveOut[BB] = std::move(LiveSet);
899 if (OnPending.
insert(*I).second) {
901 Pending.push(BBToOrder[*
I]);
908 Worklist.swap(Pending);
911 assert(Pending.empty() &&
"Pending should be empty");
915 for (
auto &Pair : BBInsertBeforeMap) {
916 InsertMap &
Map = Pair.second;
917 for (
auto &Pair : Map) {
919 assert(InsertBefore &&
"should never be null");
920 auto FragMemLocs = Pair.second;
923 for (
auto &FragMemLoc : FragMemLocs) {
924 DIExpression *Expr = DIExpression::get(Ctx, std::nullopt);
925 if (FragMemLoc.SizeInBits !=
926 *
Aggregates[FragMemLoc.Var].first->getSizeInBits())
928 Expr, FragMemLoc.OffsetInBits, FragMemLoc.SizeInBits);
930 FragMemLoc.OffsetInBits / 8);
932 FragMemLoc.DL.getInlinedAt());
933 FnVarLocs->
addVarLoc(InsertBefore, Var, Expr, FragMemLoc.DL,
934 Bases[FragMemLoc.Base]);
944class AssignmentTrackingLowering {
969 enum class LocKind { Mem, Val,
None };
986 enum S { Known, NoneOrPhi }
Status;
993 bool isSameSourceAssignment(
const Assignment &
Other)
const {
999 static const char *LUT[] = {
"Known",
"NoneOrPhi"};
1014 return Assignment(Known,
ID, Source);
1017 return Assignment(Known,
ID,
nullptr);
1019 static Assignment makeNoneOrPhi() {
1020 return Assignment(NoneOrPhi,
nullptr,
nullptr);
1036 using UntaggedStoreAssignmentMap =
1043 unsigned TrackedVariablesVectorSize = 0;
1048 UntaggedStoreAssignmentMap UntaggedStoreVars;
1052 InsertMap InsertBeforeMap;
1059 static bool mapsAreEqual(
const BitVector &Mask,
const AssignmentMap &
A,
1060 const AssignmentMap &
B) {
1062 return A[VarID].isSameSourceAssignment(B[VarID]);
1074 AssignmentMap StackHomeValue;
1076 AssignmentMap DebugValue;
1091 const AssignmentMap &getAssignmentMap(AssignmentKind Kind)
const {
1094 return StackHomeValue;
1100 AssignmentMap &getAssignmentMap(AssignmentKind Kind) {
1101 return const_cast<AssignmentMap &
>(
1102 const_cast<const BlockInfo *
>(
this)->getAssignmentMap(Kind));
1105 bool isVariableTracked(
VariableID Var)
const {
1106 return VariableIDsInBlock[
static_cast<unsigned>(Var)];
1109 const Assignment &getAssignment(AssignmentKind Kind,
VariableID Var)
const {
1110 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1111 return getAssignmentMap(Kind)[
static_cast<unsigned>(Var)];
1115 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1116 return LiveLoc[
static_cast<unsigned>(Var)];
1122 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1123 LiveLoc[
static_cast<unsigned>(Var)] = K;
1129 void setAssignment(AssignmentKind Kind,
VariableID Var,
1130 const Assignment &AV) {
1131 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1132 getAssignmentMap(Kind)[
static_cast<unsigned>(Var)] = AV;
1138 bool hasAssignment(AssignmentKind Kind,
VariableID Var,
1139 const Assignment &AV)
const {
1140 if (!isVariableTracked(Var))
1142 return AV.isSameSourceAssignment(getAssignment(Kind, Var));
1148 return VariableIDsInBlock ==
Other.VariableIDsInBlock &&
1149 LiveLoc ==
Other.LiveLoc &&
1150 mapsAreEqual(VariableIDsInBlock, StackHomeValue,
1151 Other.StackHomeValue) &&
1152 mapsAreEqual(VariableIDsInBlock, DebugValue,
Other.DebugValue);
1156 return LiveLoc.size() == DebugValue.size() &&
1157 LiveLoc.size() == StackHomeValue.size();
1161 void init(
int NumVars) {
1162 StackHomeValue.clear();
1165 VariableIDsInBlock =
BitVector(NumVars);
1166 StackHomeValue.insert(StackHomeValue.begin(), NumVars,
1167 Assignment::makeNoneOrPhi());
1168 DebugValue.insert(DebugValue.begin(), NumVars,
1169 Assignment::makeNoneOrPhi());
1170 LiveLoc.
insert(LiveLoc.
begin(), NumVars, LocKind::None);
1174 template <
typename ElmtType,
typename FnInputType>
1178 ElmtType (*Fn)(FnInputType, FnInputType)) {
1183 static BlockInfo join(
const BlockInfo &
A,
const BlockInfo &
B,
int NumVars) {
1203 Intersect &=
B.VariableIDsInBlock;
1205 for (
auto VarID : Intersect.
set_bits()) {
1206 joinElmt(VarID, Join.LiveLoc,
A.LiveLoc,
B.LiveLoc, joinKind);
1207 joinElmt(VarID, Join.DebugValue,
A.DebugValue,
B.DebugValue,
1209 joinElmt(VarID, Join.StackHomeValue,
A.StackHomeValue,
B.StackHomeValue,
1213 Join.VariableIDsInBlock =
A.VariableIDsInBlock;
1214 Join.VariableIDsInBlock |=
B.VariableIDsInBlock;
1261 static LocKind joinKind(LocKind
A, LocKind
B);
1262 static Assignment joinAssignment(
const Assignment &
A,
const Assignment &
B);
1263 BlockInfo joinBlockInfo(
const BlockInfo &
A,
const BlockInfo &
B);
1269 void process(
BasicBlock &BB, BlockInfo *LiveSet);
1274 void processNonDbgInstruction(
Instruction &
I, BlockInfo *LiveSet);
1278 void processTaggedInstruction(
Instruction &
I, BlockInfo *LiveSet);
1281 void processUntaggedInstruction(
Instruction &
I, BlockInfo *LiveSet);
1283 void processDbgValue(
DbgValueInst &DVI, BlockInfo *LiveSet);
1285 void addMemDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1287 void addDbgDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1291 void setLocKind(BlockInfo *LiveSet,
VariableID Var, LocKind K);
1294 LocKind getLocKind(BlockInfo *LiveSet,
VariableID Var);
1296 bool hasVarWithAssignment(BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
1312 : Fn(Fn), Layout(Layout), VarsWithStackSlot(VarsWithStackSlot) {}
1320AssignmentTrackingLowering::getContainedFragments(
VariableID Var)
const {
1321 auto R = VarContains.
find(Var);
1322 if (R == VarContains.
end())
1323 return std::nullopt;
1327void AssignmentTrackingLowering::touchFragment(
VariableID Var) {
1328 VarsTouchedThisFrame.insert(Var);
1331void AssignmentTrackingLowering::setLocKind(BlockInfo *LiveSet,
VariableID Var,
1333 auto SetKind = [
this](BlockInfo *LiveSet,
VariableID Var, LocKind
K) {
1334 LiveSet->setLocKind(Var, K);
1337 SetKind(LiveSet, Var, K);
1340 for (
VariableID Frag : getContainedFragments(Var))
1341 SetKind(LiveSet, Frag, K);
1344AssignmentTrackingLowering::LocKind
1345AssignmentTrackingLowering::getLocKind(BlockInfo *LiveSet,
VariableID Var) {
1346 return LiveSet->getLocKind(Var);
1349void AssignmentTrackingLowering::addMemDef(BlockInfo *LiveSet,
VariableID Var,
1350 const Assignment &AV) {
1351 LiveSet->setAssignment(BlockInfo::Stack, Var, AV);
1356 Assignment FragAV = AV;
1357 FragAV.Source =
nullptr;
1358 for (
VariableID Frag : getContainedFragments(Var))
1359 LiveSet->setAssignment(BlockInfo::Stack, Frag, FragAV);
1362void AssignmentTrackingLowering::addDbgDef(BlockInfo *LiveSet,
VariableID Var,
1363 const Assignment &AV) {
1364 LiveSet->setAssignment(BlockInfo::Debug, Var, AV);
1369 Assignment FragAV = AV;
1370 FragAV.Source =
nullptr;
1371 for (
VariableID Frag : getContainedFragments(Var))
1372 LiveSet->setAssignment(BlockInfo::Debug, Frag, FragAV);
1376 return cast<DIAssignID>(
I.getMetadata(LLVMContext::MD_DIAssignID));
1384bool AssignmentTrackingLowering::hasVarWithAssignment(
1385 BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
VariableID Var,
1386 const Assignment &AV) {
1387 if (!LiveSet->hasAssignment(Kind, Var, AV))
1392 for (
VariableID Frag : getContainedFragments(Var))
1393 if (!LiveSet->hasAssignment(Kind, Frag, AV))
1399const char *
locStr(AssignmentTrackingLowering::LocKind Loc) {
1400 using LocKind = AssignmentTrackingLowering::LocKind;
1413void AssignmentTrackingLowering::emitDbgValue(
1414 AssignmentTrackingLowering::LocKind Kind,
1426 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1435 InsertBeforeMap[InsertBefore].
push_back(VarLoc);
1439 if (Kind == LocKind::Mem) {
1440 const auto *DAI = cast<DbgAssignIntrinsic>(Source);
1443 if (DAI->isKillAddress()) {
1445 Kind = LocKind::Val;
1447 Value *Val = DAI->getAddress();
1450 "fragment info should be stored in value-expression only");
1453 if (
auto OptFragInfo =
Source->getExpression()->getFragmentInfo()) {
1454 auto FragInfo = *OptFragInfo;
1456 Expr, FragInfo.OffsetInBits, FragInfo.SizeInBits);
1459 std::tie(Val, Expr) =
1466 if (Kind == LocKind::Val) {
1471 if (Kind == LocKind::None) {
1477void AssignmentTrackingLowering::processNonDbgInstruction(
1478 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1479 if (
I.hasMetadata(LLVMContext::MD_DIAssignID))
1480 processTaggedInstruction(
I, LiveSet);
1482 processUntaggedInstruction(
I, LiveSet);
1485void AssignmentTrackingLowering::processUntaggedInstruction(
1486 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1498 assert(!
I.hasMetadata(LLVMContext::MD_DIAssignID));
1499 auto It = UntaggedStoreVars.find(&
I);
1500 if (It == UntaggedStoreVars.end())
1503 LLVM_DEBUG(
dbgs() <<
"processUntaggedInstruction on UNTAGGED INST " <<
I
1507 for (
auto [Var, Info] : It->second) {
1511 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1512 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1513 setLocKind(LiveSet, Var, LocKind::Mem);
1521 if (
auto Frag =
V.getFragment()) {
1524 assert(R &&
"unexpected createFragmentExpression failure");
1528 if (
Info.OffsetInBits)
1529 Ops = {dwarf::DW_OP_plus_uconst,
Info.OffsetInBits / 8};
1535 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1540 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1549 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1553void AssignmentTrackingLowering::processTaggedInstruction(
1554 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1569 "expected DAI's variable to have stack slot");
1572 addMemDef(LiveSet, Var, AV);
1580 if (hasVarWithAssignment(LiveSet, BlockInfo::Debug, Var, AV)) {
1586 LiveSet->DebugValue[
static_cast<unsigned>(Var)].
dump(
dbgs());
1588 setLocKind(LiveSet, Var, LocKind::Mem);
1589 emitDbgValue(LocKind::Mem, DAI, &
I);
1598 LocKind PrevLoc = getLocKind(LiveSet, Var);
1600 case LocKind::Val: {
1604 setLocKind(LiveSet, Var, LocKind::Val);
1606 case LocKind::Mem: {
1610 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1611 if (DbgAV.Status == Assignment::NoneOrPhi) {
1614 setLocKind(LiveSet, Var, LocKind::None);
1615 emitDbgValue(LocKind::None, DAI, &
I);
1619 setLocKind(LiveSet, Var, LocKind::Val);
1621 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1624 emitDbgValue(LocKind::None, DAI, &
I);
1628 case LocKind::None: {
1632 setLocKind(LiveSet, Var, LocKind::None);
1639 BlockInfo *LiveSet) {
1647 addDbgDef(LiveSet, Var, AV);
1655 if (hasVarWithAssignment(LiveSet, BlockInfo::Stack, Var, AV)) {
1663 <<
"Val, Stack matches Debug program but address is killed\n";);
1664 Kind = LocKind::Val;
1667 Kind = LocKind::Mem;
1669 setLocKind(LiveSet, Var, Kind);
1670 emitDbgValue(Kind, &DAI, &DAI);
1675 setLocKind(LiveSet, Var, LocKind::Val);
1676 emitDbgValue(LocKind::Val, &DAI, &DAI);
1680void AssignmentTrackingLowering::processDbgValue(
DbgValueInst &DVI,
1681 BlockInfo *LiveSet) {
1694 Assignment AV = Assignment::makeNoneOrPhi();
1695 addDbgDef(LiveSet, Var, AV);
1699 <<
" -> Val, dbg.value override");
1701 setLocKind(LiveSet, Var, LocKind::Val);
1702 emitDbgValue(LocKind::Val, &DVI, &DVI);
1707 return F->SizeInBits == 0;
1711void AssignmentTrackingLowering::processDbgInstruction(
1713 auto *DVI = dyn_cast<DbgVariableIntrinsic>(&
I);
1721 if (
auto *DAI = dyn_cast<DbgAssignIntrinsic>(&
I))
1722 processDbgAssign(*DAI, LiveSet);
1723 else if (
auto *DVI = dyn_cast<DbgValueInst>(&
I))
1724 processDbgValue(*DVI, LiveSet);
1727void AssignmentTrackingLowering::resetInsertionPoint(
Instruction &After) {
1730 if (R == InsertBeforeMap.end())
1735void AssignmentTrackingLowering::process(
BasicBlock &BB, BlockInfo *LiveSet) {
1736 for (
auto II = BB.
begin(), EI = BB.
end(); II != EI;) {
1737 assert(VarsTouchedThisFrame.empty());
1741 if (!isa<DbgInfoIntrinsic>(&*II)) {
1742 if (II->isTerminator())
1744 resetInsertionPoint(*II);
1745 processNonDbgInstruction(*II, LiveSet);
1746 assert(LiveSet->isValid());
1750 auto *
Dbg = dyn_cast<DbgInfoIntrinsic>(&*II);
1753 resetInsertionPoint(*II);
1754 processDbgInstruction(*Dbg, LiveSet);
1755 assert(LiveSet->isValid());
1761 for (
auto Var : VarsTouchedThisFrame) {
1762 LocKind Loc = getLocKind(LiveSet, Var);
1770 if (Loc != LocKind::Mem) {
1773 NotAlwaysStackHomed.insert(Aggr);
1776 VarsTouchedThisFrame.clear();
1780AssignmentTrackingLowering::LocKind
1781AssignmentTrackingLowering::joinKind(LocKind
A, LocKind
B) {
1784 return A ==
B ?
A : LocKind::None;
1787AssignmentTrackingLowering::Assignment
1788AssignmentTrackingLowering::joinAssignment(
const Assignment &
A,
1789 const Assignment &
B) {
1796 if (!
A.isSameSourceAssignment(
B))
1797 return Assignment::makeNoneOrPhi();
1798 if (
A.Status == Assignment::NoneOrPhi)
1799 return Assignment::makeNoneOrPhi();
1815 if (
A.Source ==
B.Source)
1817 if (
A.Source ==
nullptr ||
B.Source ==
nullptr)
1819 if (
A.Source->isIdenticalTo(
B.Source))
1824 assert(
A.Status ==
B.Status &&
A.Status == Assignment::Known);
1826 return Assignment::make(
A.ID, Source);
1829AssignmentTrackingLowering::BlockInfo
1830AssignmentTrackingLowering::joinBlockInfo(
const BlockInfo &
A,
1831 const BlockInfo &
B) {
1832 return BlockInfo::join(
A,
B, TrackedVariablesVectorSize);
1835bool AssignmentTrackingLowering::join(
1848 if (Visited.
count(Pred))
1853 if (VisitedPreds.
empty()) {
1855 bool DidInsert = It.second;
1857 It.first->second.init(TrackedVariablesVectorSize);
1862 if (VisitedPreds.
size() == 1) {
1863 const BlockInfo &PredLiveOut = LiveOut.
find(VisitedPreds[0])->second;
1864 auto CurrentLiveInEntry = LiveIn.
find(&BB);
1868 if (CurrentLiveInEntry == LiveIn.
end())
1869 LiveIn.
insert(std::make_pair(&BB, PredLiveOut));
1870 else if (PredLiveOut != CurrentLiveInEntry->second)
1871 CurrentLiveInEntry->second = PredLiveOut;
1879 const BlockInfo &PredLiveOut0 = LiveOut.
find(VisitedPreds[0])->second;
1880 const BlockInfo &PredLiveOut1 = LiveOut.
find(VisitedPreds[1])->second;
1881 BlockInfo BBLiveIn = joinBlockInfo(PredLiveOut0, PredLiveOut1);
1886 const auto &PredLiveOut = LiveOut.
find(Pred);
1888 "block should have been processed already");
1889 BBLiveIn = joinBlockInfo(std::move(BBLiveIn), PredLiveOut->second);
1893 auto CurrentLiveInEntry = LiveIn.
find(&BB);
1896 if (CurrentLiveInEntry == LiveIn.
end())
1898 else if (BBLiveIn != CurrentLiveInEntry->second)
1899 CurrentLiveInEntry->second = std::move(BBLiveIn);
1908 auto ALeft =
A.OffsetInBits;
1909 auto BLeft =
B.OffsetInBits;
1913 auto ARight = ALeft +
A.SizeInBits;
1914 auto BRight = BLeft +
B.SizeInBits;
1915 if (BRight > ARight)
1920static std::optional<at::AssignmentInfo>
1925 if (
const auto *SI = dyn_cast<StoreInst>(&
I))
1927 if (
const auto *
MI = dyn_cast<MemIntrinsic>(&
I))
1930 return std::nullopt;
1955 unsigned &TrackedVariablesVectorSize) {
1967 for (
auto &BB : Fn) {
1968 for (
auto &
I : BB) {
1969 if (
auto *DDI = dyn_cast<DbgDeclareInst>(&
I)) {
1971 }
else if (
auto *DII = dyn_cast<DbgVariableIntrinsic>(&
I)) {
1974 if (!VarsWithStackSlot.
contains(DA))
1976 if (Seen.
insert(DV).second)
1977 FragmentMap[DA].push_back(DV);
1982 std::optional<DIExpression::FragmentInfo> FragInfo;
1987 I.getModule()->getDataLayout(),
Info->Base,
1988 Info->OffsetInBits,
Info->SizeInBits, DAI, FragInfo) ||
1989 (FragInfo && FragInfo->SizeInBits == 0))
2003 if (!VarsWithStackSlot.
contains(DA))
2007 UntaggedStoreVars[&
I].push_back(
2010 if (Seen.
insert(DV).second)
2011 FragmentMap[DA].push_back(DV);
2019 for (
auto &Pair : FragmentMap) {
2021 std::sort(Frags.
begin(), Frags.
end(),
2023 return Elmt.getFragmentOrDefault().SizeInBits >
2024 Next.getFragmentOrDefault().SizeInBits;
2032 for (
auto &Pair : FragmentMap) {
2033 auto &Frags = Pair.second;
2034 for (
auto It = Frags.begin(), IEnd = Frags.end(); It != IEnd; ++It) {
2044 for (; OtherIt != IEnd; ++OtherIt) {
2048 Map[OtherVar].push_back(ThisVar);
2059 for (
auto *DDI : Declares)
2061 DDI->getDebugLoc(), DDI->getWrappedLocation());
2068 <<
": too many blocks (" << Fn.
size() <<
")\n");
2073 FnVarLocs = FnVarLocsBuilder;
2083 Fn, FnVarLocs, *VarsWithStackSlot, UntaggedStoreVars,
2084 TrackedVariablesVectorSize);
2088 std::priority_queue<unsigned int, std::vector<unsigned int>,
2089 std::greater<unsigned int>>
2091 std::priority_queue<unsigned int, std::vector<unsigned int>,
2092 std::greater<unsigned int>>
2097 unsigned int RPONumber = 0;
2098 for (
auto RI = RPOT.begin(), RE = RPOT.end(); RI != RE; ++RI) {
2099 OrderToBB[RPONumber] = *RI;
2100 BBToOrder[*RI] = RPONumber;
2101 Worklist.push(RPONumber);
2104 LiveIn.
init(RPONumber);
2105 LiveOut.
init(RPONumber);
2119 while (!Worklist.empty()) {
2124 while (!Worklist.empty()) {
2128 bool InChanged = join(*BB, Visited);
2130 InChanged |= Visited.
insert(BB).second;
2135 BlockInfo LiveSet = LiveIn[BB];
2138 process(*BB, &LiveSet);
2141 if (LiveOut[BB] != LiveSet) {
2143 <<
" has new OutLocs, add succs to worklist: [ ");
2144 LiveOut[BB] = std::move(LiveSet);
2146 if (OnPending.
insert(*I).second) {
2148 Pending.push(BBToOrder[*
I]);
2155 Worklist.swap(Pending);
2158 assert(Pending.empty() &&
"Pending should be empty");
2164 bool InsertedAnyIntrinsics =
false;
2173 for (
const auto &Pair : InsertBeforeMap) {
2174 const auto &Vec = Pair.second;
2180 if (NotAlwaysStackHomed.contains(Aggr))
2190 NotAlwaysStackHomed.insert(Aggr);
2199 if (AlwaysStackHomed.
insert(Aggr).second) {
2208 InsertedAnyIntrinsics =
true;
2214 for (
const auto &[InsertBefore, Vec] : InsertBeforeMap) {
2221 if (AlwaysStackHomed.
contains(Aggr))
2224 InsertedAnyIntrinsics =
true;
2227 FnVarLocs->
setWedge(InsertBefore, std::move(NewDefs));
2230 InsertedAnyIntrinsics |= emitPromotedVarLocs(FnVarLocs);
2232 return InsertedAnyIntrinsics;
2235bool AssignmentTrackingLowering::emitPromotedVarLocs(
2237 bool InsertedAnyIntrinsics =
false;
2240 for (
auto &BB : Fn) {
2241 for (
auto &
I : BB) {
2243 auto *DVI = dyn_cast<DbgValueInst>(&
I);
2251 assert(InsertBefore &&
"Unexpected: debug intrinsics after a terminator");
2255 InsertedAnyIntrinsics =
true;
2258 return InsertedAnyIntrinsics;
2271 bool Changed =
false;
2277 if (!isa<DbgVariableIntrinsic>(
I)) {
2279 VariableDefinedBits.
clear();
2283 const auto *Locs = FnVarLocs.
getWedge(&
I);
2288 bool ChangedThisWedge =
false;
2293 for (
auto RIt = Locs->rbegin(), REnd = Locs->rend(); RIt != REnd; ++RIt) {
2297 uint64_t SizeInBits = Aggr.first->getSizeInBits().value_or(0);
2299 if (SizeInBits == 0) {
2311 bool FirstDefinition = InsertResult.second;
2312 BitVector &DefinedBits = InsertResult.first->second;
2315 RIt->Expr->getFragmentInfo().value_or(
2317 bool InvalidFragment = Fragment.
endInBits() > SizeInBits;
2320 if (FirstDefinition || InvalidFragment ||
2323 if (!InvalidFragment)
2331 ChangedThisWedge =
true;
2336 if (ChangedThisWedge) {
2337 std::reverse(NewDefsReversed.
begin(), NewDefsReversed.
end());
2338 FnVarLocs.
setWedge(&
I, std::move(NewDefsReversed));
2357 bool Changed =
false;
2366 const auto *Locs = FnVarLocs.
getWedge(&
I);
2371 bool ChangedThisWedge =
false;
2379 std::nullopt, Loc.DL.getInlinedAt());
2380 auto VMI = VariableMap.
find(Key);
2384 if (VMI == VariableMap.
end() || VMI->second.first != Loc.Values ||
2385 VMI->second.second != Loc.Expr) {
2386 VariableMap[Key] = {Loc.Values, Loc.Expr};
2392 ChangedThisWedge =
true;
2397 if (ChangedThisWedge) {
2398 FnVarLocs.
setWedge(&
I, std::move(NewDefs));
2426 VarsWithDef[
A].
insert(V.getFragmentOrDefault());
2432 auto FragsIt = VarsWithDef.
find(
A);
2433 if (FragsIt == VarsWithDef.
end())
2436 return DIExpression::fragmentsOverlap(Frag, V.getFragmentOrDefault());
2440 bool Changed =
false;
2448 const auto *Locs = FnVarLocs.
getWedge(&
I);
2453 bool ChangedThisWedge =
false;
2461 Loc.DL.getInlinedAt()};
2466 if (Loc.Values.isKillLocation(Loc.Expr) && !HasDefinedBits(Aggr, Var)) {
2469 ChangedThisWedge =
true;
2473 DefineBits(Aggr, Var);
2478 if (ChangedThisWedge) {
2479 FnVarLocs.
setWedge(&
I, std::move(NewDefs));
2490 bool MadeChanges =
false;
2504 for (
auto &BB : Fn) {
2505 for (
auto &
I : BB) {
2526 bool Changed =
false;
2531 AssignmentTrackingLowering
Pass(Fn, Layout, &VarsWithStackSlot);
2532 Changed =
Pass.run(FnVarLocs);
2536 MemLocFragmentFill
Pass(Fn, &VarsWithStackSlot,
2538 Pass.run(FnVarLocs);
2553 LLVM_DEBUG(
dbgs() <<
"AssignmentTrackingAnalysis run on " <<
F.getName()
2555 auto DL = std::make_unique<DataLayout>(
F.getParent());
2567 Results->print(
errs(),
F);
2579 "Assignment Tracking Analysis",
false,
true)
for(const MachineOperand &MO :llvm::drop_begin(OldMI.operands(), Desc.getNumOperands()))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis Results
std::pair< const DILocalVariable *, const DILocation * > DebugAggregate
A whole (unfragmented) source variable.
static void analyzeFunction(Function &Fn, const DataLayout &Layout, FunctionVarLocsBuilder *FnVarLocs)
static std::pair< Value *, DIExpression * > walkToAllocaAndPrependOffsetDeref(const DataLayout &DL, Value *Start, DIExpression *Expression)
Walk backwards along constant GEPs and bitcasts to the base storage from Start as far as possible.
static DIAssignID * getIDFromMarker(const DbgAssignIntrinsic &DAI)
static DenseSet< DebugAggregate > findVarsWithStackSlot(Function &Fn)
static bool hasZeroSizedFragment(DbgVariableIntrinsic &DVI)
static bool fullyContains(DIExpression::FragmentInfo A, DIExpression::FragmentInfo B)
Return true if A fully contains B.
static DebugAggregate getAggregate(const DbgVariableIntrinsic *DII)
static std::optional< at::AssignmentInfo > getUntaggedStoreAssignmentInfo(const Instruction &I, const DataLayout &Layout)
static AssignmentTrackingLowering::OverlapMap buildOverlapMapAndRecordDeclares(Function &Fn, FunctionVarLocsBuilder *FnVarLocs, const DenseSet< DebugAggregate > &VarsWithStackSlot, AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars, unsigned &TrackedVariablesVectorSize)
Build a map of {Variable x: Variables y} where all variable fragments contained within the variable f...
static bool removeUndefDbgLocsFromEntryBlock(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > PrintResults("print-debug-ata", cl::init(false), cl::Hidden)
Print the results of the analysis. Respects -filter-print-funcs.
const char * locStr(AssignmentTrackingLowering::LocKind Loc)
static bool removeRedundantDbgLocsUsingForwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant location defs using a forward scan.
static bool removeRedundantDbgLocs(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > EnableMemLocFragFill("mem-loc-frag-fill", cl::init(true), cl::Hidden)
Option for debugging the pass, determines if the memory location fragment filling happens after gener...
static DIAssignID * getIDFromInst(const Instruction &I)
static std::optional< int64_t > getDerefOffsetInBytes(const DIExpression *DIExpr)
Extract the offset used in DIExpr.
static bool removeRedundantDbgLocsUsingBackwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant definitions within sequences of consecutive location defs.
static cl::opt< cl::boolOrDefault > CoalesceAdjacentFragmentsOpt("debug-ata-coalesce-frags", cl::Hidden)
Coalesce adjacent dbg locs describing memory locations that have contiguous fragments.
static cl::opt< unsigned > MaxNumBlocks("debug-ata-max-blocks", cl::init(10000), cl::desc("Maximum num basic blocks before debug info dropped"), cl::Hidden)
static bool shouldCoalesceFragments(Function &F)
This file implements the BitVector class.
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Analysis containing CSE Info
This file defines DenseMapInfo traits for DenseMap.
This file contains constants used for implementing Dwarf debug support.
std::optional< std::vector< StOtherPiece > > Other
This file implements a coalescing interval map for small objects.
This header defines various interfaces for pass management in LLVM.
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
Scalar Replacement Of Aggregates
This file defines the SmallSet class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Helper class to build FunctionVarLocs, since that class isn't easy to modify.
void addVarLoc(Instruction *Before, DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def to the wedge of defs just before /p Before.
void setWedge(const Instruction *Before, SmallVector< VarLocInfo > &&Wedge)
Replace the defs that come just before /p Before with /p Wedge.
const SmallVectorImpl< VarLocInfo > * getWedge(const Instruction *Before) const
Return ptr to wedge of defs or nullptr if no defs come just before /p Before.
unsigned getNumVariables() const
void addSingleLocVar(DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def for a variable that is valid for its lifetime.
VariableID insertVariable(DebugVariable V)
Find or insert V and return the ID.
const DebugVariable & getVariable(VariableID ID) const
Get a variable from its ID.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
bool getBoolValue() const
Convert APInt to a boolean value.
an instruction to allocate memory on the stack
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
ArrayRef< T > drop_front(size_t N=1) const
Drop the first N elements of the array.
AssignmentTrackingAnalysis()
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
bool isEntryBlock() const
Return true if this is the entry block of the containing function.
int find_first_unset_in(unsigned Begin, unsigned End) const
find_first_unset_in - Returns the index of the first unset bit in the range [Begin,...
iterator_range< const_set_bits_iterator > set_bits() const
A structured debug information entry.
static DIExpression * append(const DIExpression *Expr, ArrayRef< uint64_t > Ops)
Append the opcodes Ops to DIExpr.
unsigned getNumElements() const
bool startsWithDeref() const
Return whether the first element a DW_OP_deref.
static std::optional< FragmentInfo > getFragmentInfo(expr_op_iterator Start, expr_op_iterator End)
Retrieve the details of this fragment expression.
ArrayRef< uint64_t > getElements() const
static std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
static DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
std::optional< uint64_t > getSizeInBits() const
Determines the size of the variable's type.
StringRef getName() const
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
This represents the llvm.dbg.assign instruction.
DIAssignID * getAssignID() const
bool isKillAddress() const
Check whether this kills the address component.
This is the common base class for debug info intrinsics.
This represents the llvm.dbg.value instruction.
This is the common base class for debug info intrinsics for variables.
DILocalVariable * getVariable() const
DIExpression * getExpression() const
RawLocationWrapper getWrappedLocation() const
DILocation * getInlinedAt() const
Identifies a unique instance of a variable.
const DILocation * getInlinedAt() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&... Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void init(unsigned InitNumEntries)
Implements a dense probed hash-table based set.
Class representing an expression and its matching format.
FunctionPass class - This class is used to implement most global optimizations.
Data structure describing the variable locations in a function.
void print(raw_ostream &OS, const Function &Fn) const
const VarLocInfo * locs_begin(const Instruction *Before) const
First variable location definition that comes before Before.
const VarLocInfo * single_locs_begin() const
const VarLocInfo * locs_end(const Instruction *Before) const
One past the last variable location definition that comes before Before.
const VarLocInfo * single_locs_end() const
One past the last single-location variable location definition.
void init(FunctionVarLocsBuilder &Builder)
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Module * getParent()
Get the module that this global value is contained inside of...
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
bool isTerminator() const
const_iterator begin() const
void insert(KeyT a, KeyT b, ValT y)
insert - Add a mapping of [a;b] to y, coalesce with adjacent intervals.
void clear()
clear - Remove all entries.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void push_back(MachineInstr *MI)
This class implements a map that also provides access to all stored values in a deterministic order.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Pass interface - Implemented by all 'passes'.
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Lightweight class that wraps the location operand metadata of a debug intrinsic.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Target - Wrapper for Target specific information.
Triple - Helper class for working with autoconf configuration names.
static IntegerType * getInt1Ty(LLVMContext &C)
UniqueVector - This class produces a sequential ID number (base 1) for each unique entry that is adde...
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
size_t size() const
size - Returns the number of entries in the vector.
LLVM Value Representation.
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
void deleteAll(Function *F)
Remove all Assignment Tracking related intrinsics and metadata from F.
AssignmentMarkerRange getAssignmentMarkers(DIAssignID *ID)
Return a range of dbg.assign intrinsics which use \ID as an operand.
std::optional< AssignmentInfo > getAssignmentInfo(const DataLayout &DL, const MemIntrinsic *I)
bool calculateFragmentIntersect(const DataLayout &DL, const Value *Dest, uint64_t SliceOffsetInBits, uint64_t SliceSizeInBits, const DbgAssignIntrinsic *DAI, std::optional< DIExpression::FragmentInfo > &Result)
Calculate the fragment of the variable in DAI covered from (Dest + SliceOffsetInBits) to to (Dest + S...
initializer< Ty > init(const Ty &Val)
std::optional< const char * > toString(const std::optional< DWARFFormValue > &V)
Take an optional DWARFFormValue and try to extract a string value from it.
@ DW_OP_LLVM_fragment
Only used in LLVM metadata.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
Interval::succ_iterator succ_end(Interval *I)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool operator!=(uint64_t V1, const APInt &V2)
Interval::succ_iterator succ_begin(Interval *I)
succ_begin/succ_end - define methods so that Intervals may be used just like BasicBlocks can with the...
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
Interval::pred_iterator pred_end(Interval *I)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isFunctionInPrintList(StringRef FunctionName)
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
VariableID
Type wrapper for integer ID for Variables. 0 is reserved.
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
bool isAssignmentTrackingEnabled(const Module &M)
Return true if assignment tracking is enabled for module M.
bool debuginfoShouldUseDebugInstrRef(const Triple &T)
Implement std::hash so that hash_code can be used in STL containers.
Holds the characteristics of one fragment of a larger variable.
uint64_t endInBits() const
Return the index of the bit after the end of the fragment, e.g.
uint64_t startInBits() const
Return the index of the first bit of the fragment.
static VariableID getTombstoneKey()
static bool isEqual(const VariableID &LHS, const VariableID &RHS)
static unsigned getHashValue(const VariableID &Val)
static VariableID getEmptyKey()
An information struct used to provide DenseMap with the various necessary components for a given valu...
Variable location definition used by FunctionVarLocs.
RawLocationWrapper Values
llvm::VariableID VariableID