40#include <unordered_map>
43#define DEBUG_TYPE "debug-ata"
45STATISTIC(NumDefsScanned,
"Number of dbg locs that get scanned for removal");
46STATISTIC(NumDefsRemoved,
"Number of dbg locs removed");
47STATISTIC(NumWedgesScanned,
"Number of dbg wedges scanned");
48STATISTIC(NumWedgesChanged,
"Number of dbg wedges changed");
52 cl::desc(
"Maximum num basic blocks before debug info dropped"),
73 return static_cast<VariableID>(Wrapped::getEmptyKey());
76 return static_cast<VariableID>(Wrapped::getTombstoneKey());
79 return Wrapped::getHashValue(
static_cast<unsigned>(Val));
103 friend FunctionVarLocs;
107 std::unordered_map<VarLocInsertPt, SmallVector<VarLocInfo>> VarLocsBeforeInst;
116 return static_cast<VariableID>(Variables.insert(V));
121 return Variables[
static_cast<unsigned>(
ID)];
127 auto R = VarLocsBeforeInst.find(Before);
128 if (R == VarLocsBeforeInst.end())
135 VarLocsBeforeInst[Before] = std::move(Wedge);
146 SingleLocVars.emplace_back(VarLoc);
157 VarLocsBeforeInst[Before].emplace_back(VarLoc);
164 unsigned Counter = -1;
165 OS <<
"=== Variables ===\n";
172 OS <<
"[" << Counter <<
"] " << V.getVariable()->getName();
173 if (
auto F = V.getFragment())
174 OS <<
" bits [" <<
F->OffsetInBits <<
", "
175 <<
F->OffsetInBits +
F->SizeInBits <<
")";
176 if (
const auto *IA = V.getInlinedAt())
177 OS <<
" inlined-at " << *IA;
182 OS <<
"DEF Var=[" << (
unsigned)
Loc.VariableID <<
"]"
183 <<
" Expr=" << *
Loc.Expr <<
" Values=(";
184 for (
auto *
Op :
Loc.Values.location_ops()) {
185 errs() <<
Op->getName() <<
" ";
191 OS <<
"=== Single location vars ===\n";
198 OS <<
"=== In-line variable defs ===";
200 OS <<
"\n" << BB.getName() <<
":\n";
212 for (
const auto &VarLoc : Builder.SingleLocVars)
213 VarLocRecords.emplace_back(VarLoc);
215 SingleVarLocEnd = VarLocRecords.size();
221 for (
auto &
P : Builder.VarLocsBeforeInst) {
227 unsigned BlockStart = VarLocRecords.size();
234 auto It = Builder.VarLocsBeforeInst.find(&DVR);
235 if (It == Builder.VarLocsBeforeInst.end())
238 VarLocRecords.emplace_back(VarLoc);
241 VarLocRecords.emplace_back(VarLoc);
242 unsigned BlockEnd = VarLocRecords.size();
244 if (BlockEnd != BlockStart)
245 VarLocsBeforeInst[
I] = {BlockStart, BlockEnd};
249 assert(Variables.empty() &&
"Expect clear before init");
252 Variables.reserve(Builder.Variables.size() + 1);
253 Variables.push_back(
DebugVariable(
nullptr, std::nullopt,
nullptr));
254 Variables.append(Builder.Variables.begin(), Builder.Variables.end());
259 VarLocRecords.clear();
260 VarLocsBeforeInst.clear();
268static std::pair<Value *, DIExpression *>
271 APInt OffsetInBytes(
DL.getTypeSizeInBits(Start->getType()),
false);
273 Start->stripAndAccumulateInBoundsConstantOffsets(
DL, OffsetInBytes);
287static std::optional<int64_t>
292 unsigned ExpectedDerefIdx = 0;
294 if (NumElements > 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
296 ExpectedDerefIdx = 2;
297 }
else if (NumElements > 3 && Elements[0] == dwarf::DW_OP_constu) {
298 ExpectedDerefIdx = 3;
299 if (Elements[2] == dwarf::DW_OP_plus)
301 else if (Elements[2] == dwarf::DW_OP_minus)
308 if (ExpectedDerefIdx >= NumElements)
313 if (Elements[ExpectedDerefIdx] != dwarf::DW_OP_deref)
317 if (NumElements == ExpectedDerefIdx + 1)
319 unsigned ExpectedFragFirstIdx = ExpectedDerefIdx + 1;
320 unsigned ExpectedFragFinalIdx = ExpectedFragFirstIdx + 2;
321 if (NumElements == ExpectedFragFinalIdx + 1 &&
371class MemLocFragmentFill {
373 FunctionVarLocsBuilder *FnVarLocs;
374 const DenseSet<DebugAggregate> *VarsWithStackSlot;
375 bool CoalesceAdjacentFragments;
378 using BaseAddress = unsigned;
379 using OffsetInBitsTy = unsigned;
380 using FragTraits = IntervalMapHalfOpenInfo<OffsetInBitsTy>;
381 using FragsInMemMap = IntervalMap<
382 OffsetInBitsTy, BaseAddress,
383 IntervalMapImpl::NodeSizer<OffsetInBitsTy, BaseAddress>::LeafSize,
385 FragsInMemMap::Allocator IntervalMapAlloc;
386 using VarFragMap = DenseMap<unsigned, FragsInMemMap>;
390 UniqueVector<RawLocationWrapper> Bases;
392 DenseMap<const BasicBlock *, VarFragMap> LiveIn;
393 DenseMap<const BasicBlock *, VarFragMap> LiveOut;
398 unsigned OffsetInBits;
402 using InsertMap = MapVector<VarLocInsertPt, SmallVector<FragMemLoc>>;
409 DenseMap<const BasicBlock *, InsertMap> BBInsertBeforeMap;
411 static bool intervalMapsAreEqual(
const FragsInMemMap &
A,
412 const FragsInMemMap &
B) {
413 auto AIt =
A.
begin(), AEnd =
A.end();
414 auto BIt =
B.begin(), BEnd =
B.end();
415 for (; AIt != AEnd; ++AIt, ++BIt) {
418 if (AIt.start() != BIt.start() || AIt.stop() != BIt.stop())
427 static bool varFragMapsAreEqual(
const VarFragMap &
A,
const VarFragMap &
B) {
428 if (
A.size() !=
B.size())
430 for (
const auto &APair :
A) {
431 auto BIt =
B.find(APair.first);
434 if (!intervalMapsAreEqual(APair.second, BIt->second))
441 std::string
toString(
unsigned BaseID) {
443 return Bases[BaseID].getVariableLocationOp(0)->getName().str();
449 std::string
toString(FragsInMemMap::const_iterator It,
bool Newline =
true) {
451 std::stringstream S(
String);
453 S <<
"[" << It.start() <<
", " << It.stop()
456 S <<
"invalid iterator (end)";
463 FragsInMemMap meetFragments(
const FragsInMemMap &
A,
const FragsInMemMap &
B) {
464 FragsInMemMap
Result(IntervalMapAlloc);
465 for (
auto AIt =
A.begin(), AEnd =
A.end(); AIt != AEnd; ++AIt) {
472 if (!
B.overlaps(AIt.start(), AIt.stop()))
476 auto FirstOverlap =
B.find(AIt.start());
477 assert(FirstOverlap !=
B.end());
478 bool IntersectStart = FirstOverlap.start() < AIt.start();
480 <<
", IntersectStart: " << IntersectStart <<
"\n");
483 auto LastOverlap =
B.find(AIt.stop());
485 LastOverlap !=
B.end() && LastOverlap.start() < AIt.stop();
487 <<
", IntersectEnd: " << IntersectEnd <<
"\n");
490 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
498 if (*AIt && *AIt == *FirstOverlap)
499 Result.insert(AIt.start(), AIt.stop(), *AIt);
507 auto Next = FirstOverlap;
508 if (IntersectStart) {
511 if (*AIt && *AIt == *FirstOverlap)
512 Result.insert(AIt.start(), FirstOverlap.stop(), *AIt);
522 if (*AIt && *AIt == *LastOverlap)
523 Result.insert(LastOverlap.start(), AIt.stop(), *AIt);
532 while (
Next !=
B.end() &&
Next.start() < AIt.stop() &&
533 Next.stop() <= AIt.stop()) {
535 <<
"- insert intersection of a and " <<
toString(
Next));
536 if (*AIt && *AIt == *
Next)
546 void meetVars(VarFragMap &
A,
const VarFragMap &
B) {
550 for (
auto It =
A.begin(), End =
A.end(); It != End; ++It) {
551 unsigned AVar = It->first;
552 FragsInMemMap &AFrags = It->second;
553 auto BIt =
B.find(AVar);
554 if (BIt ==
B.end()) {
559 <<
Aggregates[AVar].first->getName() <<
"\n");
560 AFrags = meetFragments(AFrags, BIt->second);
564 bool meet(
const BasicBlock &BB,
565 const SmallPtrSet<BasicBlock *, 16> &Visited) {
570 bool FirstMeet =
true;
577 if (!Visited.
count(Pred))
580 auto PredLiveOut = LiveOut.
find(Pred);
585 BBLiveIn = PredLiveOut->second;
588 LLVM_DEBUG(
dbgs() <<
"BBLiveIn = meet BBLiveIn, " << Pred->getName()
590 meetVars(BBLiveIn, PredLiveOut->second);
596 if (BBLiveIn.size() == 0)
605 CurrentLiveInEntry->second = std::move(BBLiveIn);
611 if (!varFragMapsAreEqual(BBLiveIn, CurrentLiveInEntry->second)) {
613 CurrentLiveInEntry->second = std::move(BBLiveIn);
621 void insertMemLoc(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
622 unsigned StartBit,
unsigned EndBit,
unsigned Base,
624 assert(StartBit < EndBit &&
"Cannot create fragment of size <= 0");
629 Loc.OffsetInBits = StartBit;
630 Loc.SizeInBits = EndBit - StartBit;
631 assert(
Base &&
"Expected a non-zero ID for Base address");
634 BBInsertBeforeMap[&BB][Before].push_back(Loc);
636 <<
" bits [" << StartBit <<
", " << EndBit <<
")\n");
643 void coalesceFragments(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
644 unsigned StartBit,
unsigned EndBit,
unsigned Base,
646 if (!CoalesceAdjacentFragments)
653 auto CoalescedFrag = FragMap.find(StartBit);
655 if (CoalescedFrag.start() == StartBit && CoalescedFrag.stop() == EndBit)
658 LLVM_DEBUG(
dbgs() <<
"- Insert loc for bits " << CoalescedFrag.start()
659 <<
" to " << CoalescedFrag.stop() <<
"\n");
660 insertMemLoc(BB, Before, Var, CoalescedFrag.start(), CoalescedFrag.stop(),
664 void addDef(
const VarLocInfo &VarLoc,
VarLocInsertPt Before, BasicBlock &BB,
665 VarFragMap &LiveSet) {
678 const DIExpression *DIExpr = VarLoc.
Expr;
682 StartBit = Frag->OffsetInBits;
683 EndBit = StartBit + Frag->SizeInBits;
698 const unsigned Base =
699 DerefOffsetInBytes && *DerefOffsetInBytes * 8 == StartBit
703 << StartBit <<
", " << EndBit <<
"): " <<
toString(
Base)
710 auto FragIt = LiveSet.find(Var);
713 if (FragIt == LiveSet.end()) {
715 auto P = LiveSet.try_emplace(Var, FragsInMemMap(IntervalMapAlloc));
716 assert(
P.second &&
"Var already in map?");
718 P.first->second.insert(StartBit, EndBit,
Base);
723 FragsInMemMap &FragMap = FragIt->second;
726 if (!FragMap.overlaps(StartBit, EndBit)) {
728 FragMap.insert(StartBit, EndBit,
Base);
729 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
736 auto FirstOverlap = FragMap.find(StartBit);
737 assert(FirstOverlap != FragMap.end());
738 bool IntersectStart = FirstOverlap.start() < StartBit;
741 auto LastOverlap = FragMap.find(EndBit);
742 bool IntersectEnd = LastOverlap.valid() && LastOverlap.start() < EndBit;
745 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
746 LLVM_DEBUG(
dbgs() <<
"- Intersect single interval @ both ends\n");
754 auto EndBitOfOverlap = FirstOverlap.stop();
755 unsigned OverlapValue = FirstOverlap.value();
758 FirstOverlap.setStop(StartBit);
759 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
760 OverlapValue, VarLoc.
DL);
763 FragMap.insert(EndBit, EndBitOfOverlap, OverlapValue);
764 insertMemLoc(BB, Before, Var, EndBit, EndBitOfOverlap, OverlapValue,
768 FragMap.insert(StartBit, EndBit,
Base);
778 if (IntersectStart) {
781 FirstOverlap.setStop(StartBit);
782 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
783 *FirstOverlap, VarLoc.
DL);
792 LastOverlap.setStart(EndBit);
793 insertMemLoc(BB, Before, Var, EndBit, LastOverlap.stop(), *LastOverlap,
809 auto It = FirstOverlap;
812 while (It.valid() && It.start() >= StartBit && It.stop() <= EndBit) {
817 assert(!FragMap.overlaps(StartBit, EndBit));
819 FragMap.insert(StartBit, EndBit,
Base);
822 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
826 bool skipVariable(
const DILocalVariable *V) {
return !
V->getSizeInBits(); }
828 void process(BasicBlock &BB, VarFragMap &LiveSet) {
829 BBInsertBeforeMap[&BB].
clear();
831 for (DbgVariableRecord &DVR :
filterDbgVars(
I.getDbgRecordRange())) {
832 if (
const auto *Locs = FnVarLocs->
getWedge(&DVR)) {
833 for (
const VarLocInfo &Loc : *Locs) {
834 addDef(Loc, &DVR, *
I.getParent(), LiveSet);
838 if (
const auto *Locs = FnVarLocs->
getWedge(&
I)) {
839 for (
const VarLocInfo &Loc : *Locs) {
840 addDef(Loc, &
I, *
I.getParent(), LiveSet);
847 MemLocFragmentFill(Function &Fn,
848 const DenseSet<DebugAggregate> *VarsWithStackSlot,
849 bool CoalesceAdjacentFragments)
850 : Fn(Fn), VarsWithStackSlot(VarsWithStackSlot),
851 CoalesceAdjacentFragments(CoalesceAdjacentFragments) {}
873 void run(FunctionVarLocsBuilder *FnVarLocs) {
877 this->FnVarLocs = FnVarLocs;
881 ReversePostOrderTraversal<Function *> RPOT(&Fn);
882 std::priority_queue<unsigned int, std::vector<unsigned int>,
883 std::greater<unsigned int>>
885 std::priority_queue<unsigned int, std::vector<unsigned int>,
886 std::greater<unsigned int>>
888 DenseMap<unsigned int, BasicBlock *> OrderToBB;
889 DenseMap<BasicBlock *, unsigned int> BBToOrder;
891 unsigned int RPONumber = 0;
892 for (BasicBlock *BB : RPOT) {
893 OrderToBB[RPONumber] = BB;
894 BBToOrder[BB] = RPONumber;
895 Worklist.push(RPONumber);
911 SmallPtrSet<BasicBlock *, 16> Visited;
912 while (!Worklist.empty() || !Pending.empty()) {
916 SmallPtrSet<BasicBlock *, 16> OnPending;
918 while (!Worklist.empty()) {
922 bool InChanged = meet(*BB, Visited);
924 InChanged |= Visited.
insert(BB).second;
927 << BB->
getName() <<
" has new InLocs, process it\n");
931 VarFragMap LiveSet = LiveIn[BB];
934 process(*BB, LiveSet);
937 if (!varFragMapsAreEqual(LiveOut[BB], LiveSet)) {
939 <<
" has new OutLocs, add succs to worklist: [ ");
940 LiveOut[BB] = std::move(LiveSet);
942 if (OnPending.
insert(Succ).second) {
944 Pending.push(BBToOrder[Succ]);
951 Worklist.swap(Pending);
954 assert(Pending.empty() &&
"Pending should be empty");
958 for (
auto &Pair : BBInsertBeforeMap) {
959 InsertMap &
Map = Pair.second;
960 for (
auto &Pair : Map) {
961 auto InsertBefore = Pair.first;
962 assert(InsertBefore &&
"should never be null");
963 auto FragMemLocs = Pair.second;
966 for (
auto &FragMemLoc : FragMemLocs) {
967 DIExpression *Expr = DIExpression::get(Ctx, {});
968 if (FragMemLoc.SizeInBits !=
969 *
Aggregates[FragMemLoc.Var].first->getSizeInBits())
971 Expr, FragMemLoc.OffsetInBits, FragMemLoc.SizeInBits);
973 FragMemLoc.OffsetInBits / 8);
974 DebugVariable Var(
Aggregates[FragMemLoc.Var].first, Expr,
975 FragMemLoc.DL.getInlinedAt());
976 FnVarLocs->
addVarLoc(InsertBefore, Var, Expr, FragMemLoc.DL,
977 Bases[FragMemLoc.Base]);
987class AssignmentTrackingLowering {
1012 enum class LocKind { Mem, Val,
None };
1029 enum S { Known, NoneOrPhi } Status;
1034 DbgVariableRecord *
Source =
nullptr;
1036 bool isSameSourceAssignment(
const Assignment &
Other)
const {
1039 return std::tie(Status,
ID) == std::tie(
Other.Status,
Other.ID);
1041 void dump(raw_ostream &OS) {
1042 static const char *
LUT[] = {
"Known",
"NoneOrPhi"};
1043 OS <<
LUT[Status] <<
"(id=";
1056 static Assignment make(DIAssignID *
ID, DbgVariableRecord *Source) {
1058 "Cannot make an assignment from a non-assign DbgVariableRecord");
1059 return Assignment(Known,
ID, Source);
1061 static Assignment makeFromMemDef(DIAssignID *
ID) {
1062 return Assignment(Known,
ID);
1064 static Assignment makeNoneOrPhi() {
return Assignment(NoneOrPhi,
nullptr); }
1066 Assignment() : Status(NoneOrPhi),
ID(nullptr) {}
1067 Assignment(S Status, DIAssignID *
ID) : Status(Status),
ID(
ID) {
1071 Assignment(S Status, DIAssignID *
ID, DbgVariableRecord *Source)
1078 using AssignmentMap = SmallVector<Assignment>;
1079 using LocMap = SmallVector<LocKind>;
1080 using OverlapMap = DenseMap<VariableID, SmallVector<VariableID>>;
1081 using UntaggedStoreAssignmentMap =
1084 using UnknownStoreAssignmentMap =
1085 DenseMap<const Instruction *, SmallVector<VariableID>>;
1090 unsigned TrackedVariablesVectorSize = 0;
1095 UntaggedStoreAssignmentMap UntaggedStoreVars;
1098 UnknownStoreAssignmentMap UnknownStoreVars;
1101 using InstInsertMap = MapVector<VarLocInsertPt, SmallVector<VarLocInfo>>;
1102 InstInsertMap InsertBeforeMap;
1105 void resetInsertionPoint(Instruction &After);
1106 void resetInsertionPoint(DbgVariableRecord &After);
1108 void emitDbgValue(LocKind Kind, DbgVariableRecord *,
VarLocInsertPt After);
1110 static bool mapsAreEqual(
const BitVector &Mask,
const AssignmentMap &
A,
1111 const AssignmentMap &
B) {
1113 return A[VarID].isSameSourceAssignment(B[VarID]);
1122 BitVector VariableIDsInBlock;
1125 AssignmentMap StackHomeValue;
1127 AssignmentMap DebugValue;
1142 const AssignmentMap &getAssignmentMap(AssignmentKind Kind)
const {
1145 return StackHomeValue;
1151 AssignmentMap &getAssignmentMap(AssignmentKind Kind) {
1152 return const_cast<AssignmentMap &
>(
1153 const_cast<const BlockInfo *
>(
this)->getAssignmentMap(Kind));
1156 bool isVariableTracked(
VariableID Var)
const {
1157 return VariableIDsInBlock[
static_cast<unsigned>(Var)];
1160 const Assignment &getAssignment(AssignmentKind Kind,
VariableID Var)
const {
1161 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1162 return getAssignmentMap(Kind)[
static_cast<unsigned>(Var)];
1166 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1167 return LiveLoc[
static_cast<unsigned>(Var)];
1173 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1174 LiveLoc[
static_cast<unsigned>(Var)] = K;
1180 void setAssignment(AssignmentKind Kind,
VariableID Var,
1181 const Assignment &AV) {
1182 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1183 getAssignmentMap(Kind)[
static_cast<unsigned>(Var)] = AV;
1189 bool hasAssignment(AssignmentKind Kind,
VariableID Var,
1190 const Assignment &AV)
const {
1191 if (!isVariableTracked(Var))
1193 return AV.isSameSourceAssignment(getAssignment(Kind, Var));
1199 return VariableIDsInBlock ==
Other.VariableIDsInBlock &&
1200 LiveLoc ==
Other.LiveLoc &&
1201 mapsAreEqual(VariableIDsInBlock, StackHomeValue,
1202 Other.StackHomeValue) &&
1203 mapsAreEqual(VariableIDsInBlock, DebugValue,
Other.DebugValue);
1207 return LiveLoc.size() == DebugValue.size() &&
1208 LiveLoc.size() == StackHomeValue.size();
1212 void init(
int NumVars) {
1213 StackHomeValue.clear();
1216 VariableIDsInBlock = BitVector(NumVars);
1217 StackHomeValue.insert(StackHomeValue.begin(), NumVars,
1218 Assignment::makeNoneOrPhi());
1219 DebugValue.insert(DebugValue.begin(), NumVars,
1220 Assignment::makeNoneOrPhi());
1221 LiveLoc.
insert(LiveLoc.
begin(), NumVars, LocKind::None);
1225 template <
typename ElmtType,
typename FnInputType>
1229 ElmtType (*Fn)(FnInputType, FnInputType)) {
1234 static BlockInfo
join(
const BlockInfo &
A,
const BlockInfo &
B,
int NumVars) {
1253 BitVector Intersect =
A.VariableIDsInBlock;
1254 Intersect &=
B.VariableIDsInBlock;
1257 joinElmt(
VarID, Join.LiveLoc,
A.LiveLoc,
B.LiveLoc, joinKind);
1258 joinElmt(
VarID, Join.DebugValue,
A.DebugValue,
B.DebugValue,
1260 joinElmt(
VarID, Join.StackHomeValue,
A.StackHomeValue,
B.StackHomeValue,
1264 Join.VariableIDsInBlock =
A.VariableIDsInBlock;
1265 Join.VariableIDsInBlock |=
B.VariableIDsInBlock;
1272 const DataLayout &Layout;
1273 const DenseSet<DebugAggregate> *VarsWithStackSlot;
1274 FunctionVarLocsBuilder *FnVarLocs;
1275 DenseMap<const BasicBlock *, BlockInfo> LiveIn;
1276 DenseMap<const BasicBlock *, BlockInfo> LiveOut;
1279 DenseSet<VariableID> VarsTouchedThisFrame;
1282 DenseSet<DebugAggregate> NotAlwaysStackHomed;
1284 VariableID getVariableID(
const DebugVariable &Var) {
1292 bool join(
const BasicBlock &BB,
const SmallPtrSet<BasicBlock *, 16> &Visited);
1312 static LocKind joinKind(LocKind
A, LocKind
B);
1313 static Assignment joinAssignment(
const Assignment &
A,
const Assignment &
B);
1314 BlockInfo joinBlockInfo(
const BlockInfo &
A,
const BlockInfo &
B);
1320 void process(BasicBlock &BB, BlockInfo *LiveSet);
1325 void processNonDbgInstruction(Instruction &
I, BlockInfo *LiveSet);
1328 void processTaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1331 void processUntaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1332 void processUnknownStoreToVariable(Instruction &
I,
VariableID &Var,
1333 BlockInfo *LiveSet);
1334 void processDbgAssign(DbgVariableRecord *Assign, BlockInfo *LiveSet);
1335 void processDbgVariableRecord(DbgVariableRecord &DVR, BlockInfo *LiveSet);
1336 void processDbgValue(DbgVariableRecord *DbgValue, BlockInfo *LiveSet);
1338 void addMemDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1340 void addDbgDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1344 void setLocKind(BlockInfo *LiveSet,
VariableID Var, LocKind K);
1347 LocKind getLocKind(BlockInfo *LiveSet,
VariableID Var);
1349 bool hasVarWithAssignment(BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
1360 bool emitPromotedVarLocs(FunctionVarLocsBuilder *FnVarLocs);
1363 AssignmentTrackingLowering(Function &Fn,
const DataLayout &Layout,
1364 const DenseSet<DebugAggregate> *VarsWithStackSlot)
1365 : Fn(Fn), Layout(Layout), VarsWithStackSlot(VarsWithStackSlot) {}
1368 bool run(FunctionVarLocsBuilder *FnVarLocs);
1373AssignmentTrackingLowering::getContainedFragments(
VariableID Var)
const {
1374 auto R = VarContains.find(Var);
1375 if (R == VarContains.end())
1380void AssignmentTrackingLowering::touchFragment(
VariableID Var) {
1381 VarsTouchedThisFrame.insert(Var);
1384void AssignmentTrackingLowering::setLocKind(BlockInfo *LiveSet,
VariableID Var,
1386 auto SetKind = [
this](BlockInfo *LiveSet,
VariableID Var, LocKind
K) {
1387 LiveSet->setLocKind(Var, K);
1390 SetKind(LiveSet, Var, K);
1393 for (
VariableID Frag : getContainedFragments(Var))
1394 SetKind(LiveSet, Frag, K);
1397AssignmentTrackingLowering::LocKind
1398AssignmentTrackingLowering::getLocKind(BlockInfo *LiveSet,
VariableID Var) {
1399 return LiveSet->getLocKind(Var);
1402void AssignmentTrackingLowering::addMemDef(BlockInfo *LiveSet,
VariableID Var,
1403 const Assignment &AV) {
1404 LiveSet->setAssignment(BlockInfo::Stack, Var, AV);
1409 Assignment FragAV = AV;
1410 FragAV.Source =
nullptr;
1411 for (
VariableID Frag : getContainedFragments(Var))
1412 LiveSet->setAssignment(BlockInfo::Stack, Frag, FragAV);
1415void AssignmentTrackingLowering::addDbgDef(BlockInfo *LiveSet,
VariableID Var,
1416 const Assignment &AV) {
1417 LiveSet->setAssignment(BlockInfo::Debug, Var, AV);
1422 Assignment FragAV = AV;
1423 FragAV.Source =
nullptr;
1424 for (
VariableID Frag : getContainedFragments(Var))
1425 LiveSet->setAssignment(BlockInfo::Debug, Frag, FragAV);
1434 "Cannot get a DIAssignID from a non-assign DbgVariableRecord!");
1439bool AssignmentTrackingLowering::hasVarWithAssignment(
1440 BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
VariableID Var,
1441 const Assignment &AV) {
1442 if (!LiveSet->hasAssignment(Kind, Var, AV))
1447 for (
VariableID Frag : getContainedFragments(Var))
1448 if (!LiveSet->hasAssignment(Kind, Frag, AV))
1454const char *
locStr(AssignmentTrackingLowering::LocKind
Loc) {
1455 using LocKind = AssignmentTrackingLowering::LocKind;
1476 if (!
Next->hasDbgRecords())
1478 return &*
Next->getDbgRecordRange().begin();
1486void AssignmentTrackingLowering::emitDbgValue(
1499 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1508 InsertBeforeMap[InsertBefore].
push_back(VarLoc);
1512 if (Kind == LocKind::Mem) {
1517 if (
Assign->isKillAddress()) {
1519 Kind = LocKind::Val;
1524 "fragment info should be stored in value-expression only");
1527 if (
auto OptFragInfo =
Source->getExpression()->getFragmentInfo()) {
1528 auto FragInfo = *OptFragInfo;
1530 Expr, FragInfo.OffsetInBits, FragInfo.SizeInBits);
1533 std::tie(Val, Expr) =
1540 if (Kind == LocKind::Val) {
1541 Emit(
Source->getRawLocation(),
Source->getExpression());
1545 if (Kind == LocKind::None) {
1546 Emit(
nullptr,
Source->getExpression());
1551void AssignmentTrackingLowering::processNonDbgInstruction(
1552 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1553 if (
I.hasMetadata(LLVMContext::MD_DIAssignID))
1554 processTaggedInstruction(
I, LiveSet);
1556 processUntaggedInstruction(
I, LiveSet);
1559void AssignmentTrackingLowering::processUnknownStoreToVariable(
1563 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1566 if (getLocKind(LiveSet, Var) != LocKind::Mem)
1570 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1571 if (DbgAV.Status != Assignment::NoneOrPhi && DbgAV.Source) {
1573 DbgAV.dump(
dbgs());
dbgs() <<
"\n");
1574 setLocKind(LiveSet, Var, LocKind::Val);
1575 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1581 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1587 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1595 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1598void AssignmentTrackingLowering::processUntaggedInstruction(
1599 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1611 assert(!
I.hasMetadata(LLVMContext::MD_DIAssignID));
1612 auto It = UntaggedStoreVars.find(&
I);
1613 if (It == UntaggedStoreVars.end()) {
1620 if (
auto UnhandledStoreIt = UnknownStoreVars.find(&
I);
1621 UnhandledStoreIt != UnknownStoreVars.end()) {
1622 LLVM_DEBUG(
dbgs() <<
"Processing untagged unknown store " <<
I <<
"\n");
1623 for (
auto &Var : UnhandledStoreIt->second)
1624 processUnknownStoreToVariable(
I, Var, LiveSet);
1629 LLVM_DEBUG(
dbgs() <<
"processUntaggedInstruction on UNTAGGED INST " <<
I
1633 for (
auto [Var,
Info] : It->second) {
1637 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1638 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1639 setLocKind(LiveSet, Var, LocKind::Mem);
1647 if (
auto Frag =
V.getFragment()) {
1650 assert(R &&
"unexpected createFragmentExpression failure");
1654 if (
Info.OffsetInBits)
1655 Ops = {dwarf::DW_OP_plus_uconst,
Info.OffsetInBits / 8};
1662 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1667 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1676 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1680void AssignmentTrackingLowering::processTaggedInstruction(
1681 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1687 if (LinkedDPAssigns.empty())
1696 "expected Assign's variable to have stack slot");
1699 addMemDef(LiveSet, Var, AV);
1707 if (hasVarWithAssignment(LiveSet, BlockInfo::Debug, Var, AV)) {
1713 LiveSet->DebugValue[
static_cast<unsigned>(Var)].dump(
dbgs());
1715 setLocKind(LiveSet, Var, LocKind::Mem);
1716 emitDbgValue(LocKind::Mem, Assign, &
I);
1725 LocKind PrevLoc = getLocKind(LiveSet, Var);
1727 case LocKind::Val: {
1731 setLocKind(LiveSet, Var, LocKind::Val);
1733 case LocKind::Mem: {
1737 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1738 if (DbgAV.Status == Assignment::NoneOrPhi) {
1741 setLocKind(LiveSet, Var, LocKind::None);
1742 emitDbgValue(LocKind::None, Assign, &
I);
1746 setLocKind(LiveSet, Var, LocKind::Val);
1748 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1751 emitDbgValue(LocKind::None, Assign, &
I);
1755 case LocKind::None: {
1759 setLocKind(LiveSet, Var, LocKind::None);
1766 BlockInfo *LiveSet) {
1773 Assignment AV = Assignment::make(
getIDFromMarker(*DbgAssign), DbgAssign);
1774 addDbgDef(LiveSet, Var, AV);
1776 LLVM_DEBUG(
dbgs() <<
"processDbgAssign on " << *DbgAssign <<
"\n";);
1782 if (hasVarWithAssignment(LiveSet, BlockInfo::Stack, Var, AV)) {
1790 <<
"Val, Stack matches Debug program but address is killed\n";);
1791 Kind = LocKind::Val;
1794 Kind = LocKind::Mem;
1796 setLocKind(LiveSet, Var, Kind);
1797 emitDbgValue(Kind, DbgAssign, DbgAssign);
1802 setLocKind(LiveSet, Var, LocKind::Val);
1803 emitDbgValue(LocKind::Val, DbgAssign, DbgAssign);
1808 BlockInfo *LiveSet) {
1821 Assignment AV = Assignment::makeNoneOrPhi();
1822 addDbgDef(LiveSet, Var, AV);
1826 <<
" -> Val, dbg.value override");
1828 setLocKind(LiveSet, Var, LocKind::Val);
1833 if (
auto F =
DbgValue.getExpression()->getFragmentInfo())
1834 return F->SizeInBits == 0;
1838void AssignmentTrackingLowering::processDbgVariableRecord(
1845 processDbgAssign(&DVR, LiveSet);
1847 processDbgValue(&DVR, LiveSet);
1850void AssignmentTrackingLowering::resetInsertionPoint(
Instruction &After) {
1853 if (R == InsertBeforeMap.end())
1859 if (R == InsertBeforeMap.end())
1864void AssignmentTrackingLowering::process(
BasicBlock &BB, BlockInfo *LiveSet) {
1867 bool ProcessedLeadingDbgRecords = !BB.
begin()->hasDbgRecords();
1869 assert(VarsTouchedThisFrame.empty());
1876 if (ProcessedLeadingDbgRecords) {
1881 if (
II->isTerminator())
1883 resetInsertionPoint(*
II);
1884 processNonDbgInstruction(*
II, LiveSet);
1885 assert(LiveSet->isValid());
1891 if (
II != EI &&
II->hasDbgRecords()) {
1896 resetInsertionPoint(DVR);
1897 processDbgVariableRecord(DVR, LiveSet);
1898 assert(LiveSet->isValid());
1901 ProcessedLeadingDbgRecords =
true;
1909 for (
auto Var : VarsTouchedThisFrame) {
1910 LocKind
Loc = getLocKind(LiveSet, Var);
1918 if (
Loc != LocKind::Mem) {
1921 NotAlwaysStackHomed.insert(Aggr);
1924 VarsTouchedThisFrame.clear();
1928AssignmentTrackingLowering::LocKind
1929AssignmentTrackingLowering::joinKind(LocKind
A, LocKind
B) {
1932 return A ==
B ?
A : LocKind::None;
1935AssignmentTrackingLowering::Assignment
1936AssignmentTrackingLowering::joinAssignment(
const Assignment &
A,
1937 const Assignment &
B) {
1944 if (!
A.isSameSourceAssignment(
B))
1945 return Assignment::makeNoneOrPhi();
1946 if (
A.Status == Assignment::NoneOrPhi)
1947 return Assignment::makeNoneOrPhi();
1963 if (
A.Source ==
B.Source)
1965 if (!
A.Source || !
B.Source)
1967 if (
A.Source->isEquivalentTo(*
B.Source))
1972 assert(
A.Status ==
B.Status &&
A.Status == Assignment::Known);
1974 return Assignment::make(
A.ID, Source);
1977AssignmentTrackingLowering::BlockInfo
1978AssignmentTrackingLowering::joinBlockInfo(
const BlockInfo &
A,
1979 const BlockInfo &
B) {
1980 return BlockInfo::join(
A,
B, TrackedVariablesVectorSize);
1983bool AssignmentTrackingLowering::join(
1995 if (Visited.
count(Pred))
2000 if (VisitedPreds.
empty()) {
2002 bool DidInsert = It.second;
2004 It.first->second.init(TrackedVariablesVectorSize);
2009 if (VisitedPreds.
size() == 1) {
2010 const BlockInfo &PredLiveOut = LiveOut.
find(VisitedPreds[0])->second;
2017 if (PredLiveOut != CurrentLiveInEntry->second) {
2018 CurrentLiveInEntry->second = PredLiveOut;
2026 const BlockInfo &PredLiveOut0 = LiveOut.
find(VisitedPreds[0])->second;
2027 const BlockInfo &PredLiveOut1 = LiveOut.
find(VisitedPreds[1])->second;
2028 BlockInfo BBLiveIn = joinBlockInfo(PredLiveOut0, PredLiveOut1);
2033 const auto &PredLiveOut = LiveOut.
find(Pred);
2035 "block should have been processed already");
2036 BBLiveIn = joinBlockInfo(std::move(BBLiveIn), PredLiveOut->second);
2040 auto CurrentLiveInEntry = LiveIn.
find(&BB);
2043 if (CurrentLiveInEntry == LiveIn.
end())
2045 else if (BBLiveIn != CurrentLiveInEntry->second)
2046 CurrentLiveInEntry->second = std::move(BBLiveIn);
2055 auto ALeft =
A.OffsetInBits;
2056 auto BLeft =
B.OffsetInBits;
2060 auto ARight = ALeft +
A.SizeInBits;
2061 auto BRight = BLeft +
B.SizeInBits;
2062 if (BRight > ARight)
2067static std::optional<at::AssignmentInfo>
2077 return std::nullopt;
2085 if (
ID != Intrinsic::experimental_vp_strided_store &&
2086 ID != Intrinsic::masked_store &&
ID != Intrinsic::vp_scatter &&
2087 ID != Intrinsic::masked_scatter &&
ID != Intrinsic::vp_store &&
2088 ID != Intrinsic::masked_compressstore)
2122 AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars,
2123 AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars,
2124 unsigned &TrackedVariablesVectorSize) {
2138 if (
Record->isDbgDeclare()) {
2144 if (!VarsWithStackSlot.
contains(DA))
2146 if (Seen.
insert(DV).second)
2147 FragmentMap[DA].push_back(DV);
2149 for (
auto &BB : Fn) {
2150 for (
auto &
I : BB) {
2152 ProcessDbgRecord(&DVR);
2156 std::optional<DIExpression::FragmentInfo> FragInfo;
2161 I.getDataLayout(),
Info->Base,
2162 Info->OffsetInBits,
Info->SizeInBits, Assign, FragInfo) ||
2163 (FragInfo && FragInfo->SizeInBits == 0))
2172 FragInfo = Assign->getExpression()->getFragmentInfo();
2176 Assign->getDebugLoc().getInlinedAt());
2178 if (!VarsWithStackSlot.
contains(DA))
2182 UntaggedStoreVars[&
I].push_back(
2185 if (Seen.
insert(DV).second)
2186 FragmentMap[DA].push_back(DV);
2189 HandleDbgAssignForStore(DVR);
2197 Assign->getDebugLoc().getInlinedAt());
2199 if (!VarsWithStackSlot.
contains(DA))
2206 HandleDbgAssignForUnknownStore(DVR);
2213 for (
auto &Pair : FragmentMap) {
2215 std::sort(Frags.
begin(), Frags.
end(),
2217 return Elmt.getFragmentOrDefault().SizeInBits >
2218 Next.getFragmentOrDefault().SizeInBits;
2225 AssignmentTrackingLowering::OverlapMap Map;
2226 for (
auto &Pair : FragmentMap) {
2227 auto &Frags = Pair.second;
2228 for (
auto It = Frags.begin(), IEnd = Frags.end(); It != IEnd; ++It) {
2238 for (; OtherIt != IEnd; ++OtherIt) {
2242 Map[OtherVar].push_back(ThisVar);
2253 for (
auto *DVR : DPDeclares)
2260bool AssignmentTrackingLowering::run(FunctionVarLocsBuilder *FnVarLocsBuilder) {
2263 <<
": too many blocks (" << Fn.
size() <<
")\n");
2268 FnVarLocs = FnVarLocsBuilder;
2278 Fn, FnVarLocs, *VarsWithStackSlot, UntaggedStoreVars, UnknownStoreVars,
2279 TrackedVariablesVectorSize);
2283 std::priority_queue<unsigned int, std::vector<unsigned int>,
2284 std::greater<unsigned int>>
2286 std::priority_queue<unsigned int, std::vector<unsigned int>,
2287 std::greater<unsigned int>>
2292 unsigned int RPONumber = 0;
2294 OrderToBB[RPONumber] = BB;
2295 BBToOrder[BB] = RPONumber;
2296 Worklist.push(RPONumber);
2314 while (!Worklist.empty()) {
2319 while (!Worklist.empty()) {
2323 bool InChanged =
join(*BB, Visited);
2325 InChanged |= Visited.
insert(BB).second;
2330 BlockInfo LiveSet = LiveIn[BB];
2333 process(*BB, &LiveSet);
2336 if (LiveOut[BB] != LiveSet) {
2338 <<
" has new OutLocs, add succs to worklist: [ ");
2339 LiveOut[BB] = std::move(LiveSet);
2341 if (OnPending.
insert(Succ).second) {
2343 Pending.push(BBToOrder[Succ]);
2350 Worklist.swap(Pending);
2353 assert(Pending.empty() &&
"Pending should be empty");
2359 bool InsertedAnyIntrinsics =
false;
2368 for (
const auto &Pair : InsertBeforeMap) {
2369 auto &Vec = Pair.second;
2375 if (NotAlwaysStackHomed.contains(Aggr))
2385 NotAlwaysStackHomed.insert(Aggr);
2394 if (AlwaysStackHomed.
insert(Aggr).second) {
2403 InsertedAnyIntrinsics =
true;
2409 for (
const auto &[InsertBefore, Vec] : InsertBeforeMap) {
2416 if (AlwaysStackHomed.
contains(Aggr))
2419 InsertedAnyIntrinsics =
true;
2422 FnVarLocs->
setWedge(InsertBefore, std::move(NewDefs));
2425 InsertedAnyIntrinsics |= emitPromotedVarLocs(FnVarLocs);
2427 return InsertedAnyIntrinsics;
2430bool AssignmentTrackingLowering::emitPromotedVarLocs(
2431 FunctionVarLocsBuilder *FnVarLocs) {
2432 bool InsertedAnyIntrinsics =
false;
2441 assert(InsertBefore &&
"Unexpected: debug intrinsics after a terminator");
2445 InsertedAnyIntrinsics =
true;
2447 for (
auto &BB : Fn) {
2448 for (
auto &
I : BB) {
2452 TranslateDbgRecord(&DVR);
2455 return InsertedAnyIntrinsics;
2475 VariableDefinedBytes.
clear();
2477 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2479 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2484 bool ChangedThisWedge =
false;
2489 for (
auto RIt = Locs->rbegin(), REnd = Locs->rend(); RIt != REnd; ++RIt) {
2493 uint64_t SizeInBits = Aggr.first->getSizeInBits().value_or(0);
2497 const uint64_t MaxSizeBytes = 2048;
2499 if (SizeInBytes == 0 || SizeInBytes > MaxSizeBytes) {
2513 bool FirstDefinition = InsertResult.second;
2514 BitVector &DefinedBytes = InsertResult.first->second;
2517 RIt->Expr->getFragmentInfo().value_or(
2519 bool InvalidFragment = Fragment.endInBits() > SizeInBits;
2520 uint64_t StartInBytes = Fragment.startInBits() / 8;
2524 if (FirstDefinition || InvalidFragment ||
2526 if (!InvalidFragment)
2527 DefinedBytes.
set(StartInBytes, EndInBytes);
2534 ChangedThisWedge =
true;
2539 if (ChangedThisWedge) {
2540 std::reverse(NewDefsReversed.
begin(), NewDefsReversed.
end());
2541 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefsReversed));
2546 HandleLocsForWedge(&
I);
2548 HandleLocsForWedge(&DVR);
2573 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2574 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2579 bool ChangedThisWedge =
false;
2587 std::nullopt,
Loc.DL.getInlinedAt());
2592 if (Inserted || VMI->second.first !=
Loc.Values ||
2593 VMI->second.second !=
Loc.Expr) {
2594 VMI->second = {
Loc.Values,
Loc.Expr};
2600 ChangedThisWedge =
true;
2605 if (ChangedThisWedge) {
2606 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2613 HandleLocsForWedge(&DVR);
2614 HandleLocsForWedge(&
I);
2639 VarsWithDef[
A].
insert(V.getFragmentOrDefault());
2645 auto FragsIt = VarsWithDef.
find(
A);
2646 if (FragsIt == VarsWithDef.
end())
2649 return DIExpression::fragmentsOverlap(Frag, V.getFragmentOrDefault());
2660 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2661 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2666 bool ChangedThisWedge =
false;
2674 Loc.DL.getInlinedAt()};
2679 if (
Loc.Values.isKillLocation(
Loc.Expr) && !HasDefinedBits(Aggr, Var)) {
2682 ChangedThisWedge =
true;
2686 DefineBits(Aggr, Var);
2691 if (ChangedThisWedge) {
2692 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2698 HandleLocsForWedge(&DVR);
2699 HandleLocsForWedge(&
I);
2707 bool MadeChanges =
false;
2721 for (
auto &BB : Fn) {
2722 for (
auto &
I : BB) {
2748 AssignmentTrackingLowering
Pass(Fn, Layout, &VarsWithStackSlot);
2753 MemLocFragmentFill
Pass(Fn, &VarsWithStackSlot,
2755 Pass.run(FnVarLocs);
2772 auto &
DL =
F.getDataLayout();
2796 LLVM_DEBUG(
dbgs() <<
"AssignmentTrackingAnalysis run on " <<
F.getName()
2806 Results->init(Builder);
2809 Results->print(
errs(),
F);
2821 "Assignment Tracking Analysis",
false,
true)
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis Results
std::pair< const DILocalVariable *, const DILocation * > DebugAggregate
A whole (unfragmented) source variable.
VarLocInsertPt getNextNode(const DbgRecord *DVR)
static void analyzeFunction(Function &Fn, const DataLayout &Layout, FunctionVarLocsBuilder *FnVarLocs)
static std::pair< Value *, DIExpression * > walkToAllocaAndPrependOffsetDeref(const DataLayout &DL, Value *Start, DIExpression *Expression)
Walk backwards along constant GEPs and bitcasts to the base storage from Start as far as possible.
static DenseSet< DebugAggregate > findVarsWithStackSlot(Function &Fn)
static AssignmentTrackingLowering::OverlapMap buildOverlapMapAndRecordDeclares(Function &Fn, FunctionVarLocsBuilder *FnVarLocs, const DenseSet< DebugAggregate > &VarsWithStackSlot, AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars, AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars, unsigned &TrackedVariablesVectorSize)
Build a map of {Variable x: Variables y} where all variable fragments contained within the variable f...
static bool fullyContains(DIExpression::FragmentInfo A, DIExpression::FragmentInfo B)
Return true if A fully contains B.
static std::optional< at::AssignmentInfo > getUntaggedStoreAssignmentInfo(const Instruction &I, const DataLayout &Layout)
static bool removeUndefDbgLocsFromEntryBlock(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > PrintResults("print-debug-ata", cl::init(false), cl::Hidden)
Print the results of the analysis. Respects -filter-print-funcs.
const char * locStr(AssignmentTrackingLowering::LocKind Loc)
PointerUnion< const Instruction *, const DbgRecord * > VarLocInsertPt
static bool removeRedundantDbgLocsUsingForwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant location defs using a forward scan.
static bool removeRedundantDbgLocs(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > EnableMemLocFragFill("mem-loc-frag-fill", cl::init(true), cl::Hidden)
Option for debugging the pass, determines if the memory location fragment filling happens after gener...
static DIAssignID * getIDFromMarker(const DbgVariableRecord &DVR)
static DebugAggregate getAggregate(const DebugVariable &Var)
static bool hasZeroSizedFragment(DbgVariableRecord &DbgValue)
static DIAssignID * getIDFromInst(const Instruction &I)
AllocaInst * getUnknownStore(const Instruction &I, const DataLayout &Layout)
static std::optional< int64_t > getDerefOffsetInBytes(const DIExpression *DIExpr)
Extract the offset used in DIExpr.
static bool removeRedundantDbgLocsUsingBackwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant definitions within sequences of consecutive location defs.
static cl::opt< cl::boolOrDefault > CoalesceAdjacentFragmentsOpt("debug-ata-coalesce-frags", cl::Hidden)
Coalesce adjacent dbg locs describing memory locations that have contiguous fragments.
static cl::opt< unsigned > MaxNumBlocks("debug-ata-max-blocks", cl::init(10000), cl::desc("Maximum num basic blocks before debug info dropped"), cl::Hidden)
static bool shouldCoalesceFragments(Function &F)
This file implements the BitVector class.
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static ManagedStatic< cl::opt< bool, true >, CreateDebug > Debug
This file defines DenseMapInfo traits for DenseMap.
This file contains constants used for implementing Dwarf debug support.
Module.h This file contains the declarations for the Module class.
This header defines various interfaces for pass management in LLVM.
This file implements a coalescing interval map for small objects.
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
IntervalMap< SlotIndex, DbgVariableValue, 4 > LocMap
Map of where a user value is live to that value.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
Scalar Replacement Of Aggregates
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Helper class to build FunctionVarLocs, since that class isn't easy to modify.
void setWedge(VarLocInsertPt Before, SmallVector< VarLocInfo > &&Wedge)
Replace the defs that come just before /p Before with /p Wedge.
const SmallVectorImpl< VarLocInfo > * getWedge(VarLocInsertPt Before) const
Return ptr to wedge of defs or nullptr if no defs come just before /p Before.
unsigned getNumVariables() const
void addSingleLocVar(DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def for a variable that is valid for its lifetime.
VariableID insertVariable(DebugVariable V)
Find or insert V and return the ID.
void addVarLoc(VarLocInsertPt Before, DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def to the wedge of defs just before /p Before.
const DebugVariable & getVariable(VariableID ID) const
Get a variable from its ID.
Class recording the (high level) value of a variable.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
bool getBoolValue() const
Convert APInt to a boolean value.
an instruction to allocate memory on the stack
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
AssignmentTrackingAnalysis()
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
LLVM_ABI bool isEntryBlock() const
Return true if this is the entry block of the containing function.
int find_first_unset_in(unsigned Begin, unsigned End) const
find_first_unset_in - Returns the index of the first unset bit in the range [Begin,...
iterator_range< const_set_bits_iterator > set_bits() const
A structured debug information entry.
static LLVM_ABI DIExpression * append(const DIExpression *Expr, ArrayRef< uint64_t > Ops)
Append the opcodes Ops to DIExpr.
unsigned getNumElements() const
DbgVariableFragmentInfo FragmentInfo
LLVM_ABI bool startsWithDeref() const
Return whether the first element a DW_OP_deref.
static LLVM_ABI std::optional< FragmentInfo > getFragmentInfo(expr_op_iterator Start, expr_op_iterator End)
Retrieve the details of this fragment expression.
ArrayRef< uint64_t > getElements() const
static LLVM_ABI std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static LLVM_ABI DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
static LLVM_ABI DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
LLVM_ABI std::optional< uint64_t > getSizeInBits() const
Determines the size of the variable's type.
StringRef getName() const
A parsed version of the target data layout string in and methods for querying it.
LLVM_ABI unsigned getIndexTypeSizeInBits(Type *Ty) const
The size in bits of the index used in GEP calculation for this type.
Instruction * MarkedInstr
Link back to the Instruction that owns this marker.
LLVM_ABI iterator_range< simple_ilist< DbgRecord >::iterator > getDbgRecordRange()
Produce a range over all the DbgRecords in this Marker.
Base class for non-instruction debug metadata records that have positions within IR.
DebugLoc getDebugLoc() const
Record of a variable value-assignment, aka a non instruction representation of the dbg....
LLVM_ABI bool isKillAddress() const
Check whether this kills the address component.
LLVM_ABI DIAssignID * getAssignID() const
DIExpression * getExpression() const
DILocalVariable * getVariable() const
Metadata * getRawLocation() const
Returns the metadata operand for the first location description.
Result run(Function &F, FunctionAnalysisManager &FAM)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
LLVM_ABI DILocation * getInlinedAt() const
Identifies a unique instance of a variable.
const DILocation * getInlinedAt() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again.
Implements a dense probed hash-table based set.
Class representing an expression and its matching format.
Data structure describing the variable locations in a function.
void print(raw_ostream &OS, const Function &Fn) const
const VarLocInfo * locs_begin(const Instruction *Before) const
First variable location definition that comes before Before.
const VarLocInfo * single_locs_begin() const
const VarLocInfo * locs_end(const Instruction *Before) const
One past the last variable location definition that comes before Before.
const VarLocInfo * single_locs_end() const
One past the last single-location variable location definition.
void init(FunctionVarLocsBuilder &Builder)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
bool isTerminator() const
const_iterator begin() const
void insert(KeyT a, KeyT b, ValT y)
insert - Add a mapping of [a;b] to y, coalesce with adjacent intervals.
void clear()
clear - Remove all entries.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void push_back(MachineInstr *MI)
Pass interface - Implemented by all 'passes'.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
void * getOpaqueValue() const
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Lightweight class that wraps the location operand metadata of a debug intrinsic.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
static LLVM_ABI IntegerType * getInt1Ty(LLVMContext &C)
UniqueVector - This class produces a sequential ID number (base 1) for each unique entry that is adde...
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
LLVM Value Representation.
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
DenseMap< FragmentOfVar, SmallVector< DIExpression::FragmentInfo, 1 > > OverlapMap
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
@ BasicBlock
Various leaf nodes.
LLVM_ABI void deleteAll(Function *F)
Remove all Assignment Tracking related intrinsics and metadata from F.
SmallVector< DbgVariableRecord * > getDVRAssignmentMarkers(const Instruction *Inst)
Return a range of dbg_assign records for which Inst performs the assignment they encode.
LLVM_ABI std::optional< AssignmentInfo > getAssignmentInfo(const DataLayout &DL, const MemIntrinsic *I)
LLVM_ABI bool calculateFragmentIntersect(const DataLayout &DL, const Value *Dest, uint64_t SliceOffsetInBits, uint64_t SliceSizeInBits, const DbgVariableRecord *DVRAssign, std::optional< DIExpression::FragmentInfo > &Result)
Calculate the fragment of the variable in DAI covered from (Dest + SliceOffsetInBits) to to (Dest + S...
initializer< Ty > init(const Ty &Val)
@ DW_OP_LLVM_fragment
Only used in LLVM metadata.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
std::tuple< const DIScope *, const DIScope *, const DILocalVariable * > VarID
A unique key that represents a debug variable.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
Printable print(const GCNRegPressure &RP, const GCNSubtarget *ST=nullptr, unsigned DynamicVGPRBlockSize=0)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto successors(const MachineBasicBlock *BB)
bool operator!=(uint64_t V1, const APInt &V2)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isFunctionInPrintList(StringRef FunctionName)
VariableID
Type wrapper for integer ID for Variables. 0 is reserved.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
constexpr T divideCeil(U Numerator, V Denominator)
Returns the integer ceil(Numerator / Denominator).
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
LLVM_ABI bool isAssignmentTrackingEnabled(const Module &M)
Return true if assignment tracking is enabled for module M.
FunctionAddr VTableAddr Next
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
std::string toString(const APInt &I, unsigned Radix, bool Signed, bool formatAsCLiteral=false, bool UpperCase=true, bool InsertSeparators=false)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
auto predecessors(const MachineBasicBlock *BB)
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool debuginfoShouldUseDebugInstrRef(const Triple &T)
Implement std::hash so that hash_code can be used in STL containers.
A special type used by analysis passes to provide an address that identifies that particular analysis...
static VariableID getTombstoneKey()
static bool isEqual(const VariableID &LHS, const VariableID &RHS)
static unsigned getHashValue(const VariableID &Val)
static VariableID getEmptyKey()
DenseMapInfo< unsigned > Wrapped
An information struct used to provide DenseMap with the various necessary components for a given valu...
Variable location definition used by FunctionVarLocs.
RawLocationWrapper Values
llvm::VariableID VariableID
VarLocInsertPt argument_type
result_type operator()(const argument_type &Arg) const