41#include <unordered_map>
44#define DEBUG_TYPE "debug-ata"
46STATISTIC(NumDefsScanned,
"Number of dbg locs that get scanned for removal");
47STATISTIC(NumDefsRemoved,
"Number of dbg locs removed");
48STATISTIC(NumWedgesScanned,
"Number of dbg wedges scanned");
49STATISTIC(NumWedgesChanged,
"Number of dbg wedges changed");
53 cl::desc(
"Maximum num basic blocks before debug info dropped"),
74 return static_cast<VariableID>(Wrapped::getEmptyKey());
77 return static_cast<VariableID>(Wrapped::getTombstoneKey());
80 return Wrapped::getHashValue(
static_cast<unsigned>(Val));
99 friend FunctionVarLocs;
103 std::unordered_map<VarLocInsertPt, SmallVector<VarLocInfo>> VarLocsBeforeInst;
112 return static_cast<VariableID>(Variables.insert(V));
117 return Variables[
static_cast<unsigned>(
ID)];
123 auto R = VarLocsBeforeInst.find(Before);
124 if (R == VarLocsBeforeInst.end())
131 VarLocsBeforeInst[Before] = std::move(Wedge);
140 VarLoc.
DL = std::move(
DL);
142 SingleLocVars.emplace_back(VarLoc);
151 VarLoc.
DL = std::move(
DL);
153 VarLocsBeforeInst[Before].emplace_back(VarLoc);
160 unsigned Counter = -1;
161 OS <<
"=== Variables ===\n";
168 OS <<
"[" << Counter <<
"] " << V.getVariable()->getName();
169 if (
auto F = V.getFragment())
170 OS <<
" bits [" <<
F->OffsetInBits <<
", "
171 <<
F->OffsetInBits +
F->SizeInBits <<
")";
172 if (
const auto *IA = V.getInlinedAt())
173 OS <<
" inlined-at " << *IA;
178 OS <<
"DEF Var=[" << (
unsigned)
Loc.VariableID <<
"]"
179 <<
" Expr=" << *
Loc.Expr <<
" Values=(";
180 for (
auto *
Op :
Loc.Values.location_ops()) {
181 errs() <<
Op->getName() <<
" ";
187 OS <<
"=== Single location vars ===\n";
194 OS <<
"=== In-line variable defs ===";
196 OS <<
"\n" << BB.getName() <<
":\n";
208 for (
const auto &VarLoc : Builder.SingleLocVars)
209 VarLocRecords.emplace_back(VarLoc);
211 SingleVarLocEnd = VarLocRecords.size();
217 for (
auto &
P : Builder.VarLocsBeforeInst) {
223 unsigned BlockStart = VarLocRecords.size();
230 auto It = Builder.VarLocsBeforeInst.find(&DVR);
231 if (It == Builder.VarLocsBeforeInst.end())
234 VarLocRecords.emplace_back(VarLoc);
237 VarLocRecords.emplace_back(VarLoc);
238 unsigned BlockEnd = VarLocRecords.size();
240 if (BlockEnd != BlockStart)
241 VarLocsBeforeInst[
I] = {BlockStart, BlockEnd};
245 assert(Variables.empty() &&
"Expect clear before init");
248 Variables.reserve(Builder.Variables.size() + 1);
249 Variables.push_back(
DebugVariable(
nullptr, std::nullopt,
nullptr));
250 Variables.append(Builder.Variables.begin(), Builder.Variables.end());
255 VarLocRecords.clear();
256 VarLocsBeforeInst.clear();
264static std::pair<Value *, DIExpression *>
267 APInt OffsetInBytes(
DL.getTypeSizeInBits(Start->getType()),
false);
269 Start->stripAndAccumulateInBoundsConstantOffsets(
DL, OffsetInBytes);
283static std::optional<int64_t>
288 unsigned ExpectedDerefIdx = 0;
290 if (NumElements > 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
292 ExpectedDerefIdx = 2;
293 }
else if (NumElements > 3 && Elements[0] == dwarf::DW_OP_constu) {
294 ExpectedDerefIdx = 3;
295 if (Elements[2] == dwarf::DW_OP_plus)
297 else if (Elements[2] == dwarf::DW_OP_minus)
304 if (ExpectedDerefIdx >= NumElements)
309 if (Elements[ExpectedDerefIdx] != dwarf::DW_OP_deref)
313 if (NumElements == ExpectedDerefIdx + 1)
315 unsigned ExpectedFragFirstIdx = ExpectedDerefIdx + 1;
316 unsigned ExpectedFragFinalIdx = ExpectedFragFirstIdx + 2;
317 if (NumElements == ExpectedFragFinalIdx + 1 &&
367class MemLocFragmentFill {
369 FunctionVarLocsBuilder *FnVarLocs;
370 const DenseSet<DebugAggregate> *VarsWithStackSlot;
371 bool CoalesceAdjacentFragments;
374 using BaseAddress = unsigned;
375 using OffsetInBitsTy = unsigned;
376 using FragTraits = IntervalMapHalfOpenInfo<OffsetInBitsTy>;
377 using FragsInMemMap = IntervalMap<
378 OffsetInBitsTy, BaseAddress,
379 IntervalMapImpl::NodeSizer<OffsetInBitsTy, BaseAddress>::LeafSize,
381 FragsInMemMap::Allocator IntervalMapAlloc;
382 using VarFragMap = DenseMap<unsigned, FragsInMemMap>;
386 UniqueVector<RawLocationWrapper> Bases;
388 DenseMap<const BasicBlock *, VarFragMap> LiveIn;
389 DenseMap<const BasicBlock *, VarFragMap> LiveOut;
394 unsigned OffsetInBits;
398 using InsertMap = MapVector<VarLocInsertPt, SmallVector<FragMemLoc>>;
405 DenseMap<const BasicBlock *, InsertMap> BBInsertBeforeMap;
407 static bool intervalMapsAreEqual(
const FragsInMemMap &
A,
408 const FragsInMemMap &
B) {
409 auto AIt =
A.
begin(), AEnd =
A.end();
410 auto BIt =
B.begin(), BEnd =
B.end();
411 for (; AIt != AEnd; ++AIt, ++BIt) {
414 if (AIt.start() != BIt.start() || AIt.stop() != BIt.stop())
423 static bool varFragMapsAreEqual(
const VarFragMap &
A,
const VarFragMap &
B) {
424 if (
A.size() !=
B.size())
426 for (
const auto &APair :
A) {
427 auto BIt =
B.find(APair.first);
430 if (!intervalMapsAreEqual(APair.second, BIt->second))
437 std::string
toString(
unsigned BaseID) {
439 return Bases[BaseID].getVariableLocationOp(0)->getName().str();
445 std::string
toString(FragsInMemMap::const_iterator It,
bool Newline =
true) {
447 std::stringstream S(
String);
449 S <<
"[" << It.start() <<
", " << It.stop()
452 S <<
"invalid iterator (end)";
459 FragsInMemMap meetFragments(
const FragsInMemMap &
A,
const FragsInMemMap &
B) {
460 FragsInMemMap
Result(IntervalMapAlloc);
461 for (
auto AIt =
A.begin(), AEnd =
A.end(); AIt != AEnd; ++AIt) {
468 if (!
B.overlaps(AIt.start(), AIt.stop()))
472 auto FirstOverlap =
B.find(AIt.start());
473 assert(FirstOverlap !=
B.end());
474 bool IntersectStart = FirstOverlap.start() < AIt.start();
476 <<
", IntersectStart: " << IntersectStart <<
"\n");
479 auto LastOverlap =
B.find(AIt.stop());
481 LastOverlap !=
B.end() && LastOverlap.start() < AIt.stop();
483 <<
", IntersectEnd: " << IntersectEnd <<
"\n");
486 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
494 if (*AIt && *AIt == *FirstOverlap)
495 Result.insert(AIt.start(), AIt.stop(), *AIt);
503 auto Next = FirstOverlap;
504 if (IntersectStart) {
507 if (*AIt && *AIt == *FirstOverlap)
508 Result.insert(AIt.start(), FirstOverlap.stop(), *AIt);
518 if (*AIt && *AIt == *LastOverlap)
519 Result.insert(LastOverlap.start(), AIt.stop(), *AIt);
528 while (
Next !=
B.end() &&
Next.start() < AIt.stop() &&
529 Next.stop() <= AIt.stop()) {
531 <<
"- insert intersection of a and " <<
toString(
Next));
532 if (*AIt && *AIt == *
Next)
542 void meetVars(VarFragMap &
A,
const VarFragMap &
B) {
546 for (
auto It =
A.begin(), End =
A.end(); It != End; ++It) {
547 unsigned AVar = It->first;
548 FragsInMemMap &AFrags = It->second;
549 auto BIt =
B.find(AVar);
550 if (BIt ==
B.end()) {
555 <<
Aggregates[AVar].first->getName() <<
"\n");
556 AFrags = meetFragments(AFrags, BIt->second);
560 bool meet(
const BasicBlock &BB,
561 const SmallPtrSet<BasicBlock *, 16> &Visited) {
566 bool FirstMeet =
true;
573 if (!Visited.
count(Pred))
576 auto PredLiveOut = LiveOut.
find(Pred);
581 BBLiveIn = PredLiveOut->second;
584 LLVM_DEBUG(
dbgs() <<
"BBLiveIn = meet BBLiveIn, " << Pred->getName()
586 meetVars(BBLiveIn, PredLiveOut->second);
592 if (BBLiveIn.size() == 0)
601 CurrentLiveInEntry->second = std::move(BBLiveIn);
607 if (!varFragMapsAreEqual(BBLiveIn, CurrentLiveInEntry->second)) {
609 CurrentLiveInEntry->second = std::move(BBLiveIn);
617 void insertMemLoc(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
618 unsigned StartBit,
unsigned EndBit,
unsigned Base,
620 assert(StartBit < EndBit &&
"Cannot create fragment of size <= 0");
625 Loc.OffsetInBits = StartBit;
626 Loc.SizeInBits = EndBit - StartBit;
627 assert(
Base &&
"Expected a non-zero ID for Base address");
630 BBInsertBeforeMap[&BB][Before].push_back(Loc);
632 <<
" bits [" << StartBit <<
", " << EndBit <<
")\n");
639 void coalesceFragments(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
640 unsigned StartBit,
unsigned EndBit,
unsigned Base,
642 if (!CoalesceAdjacentFragments)
649 auto CoalescedFrag = FragMap.find(StartBit);
651 if (CoalescedFrag.start() == StartBit && CoalescedFrag.stop() == EndBit)
654 LLVM_DEBUG(
dbgs() <<
"- Insert loc for bits " << CoalescedFrag.start()
655 <<
" to " << CoalescedFrag.stop() <<
"\n");
656 insertMemLoc(BB, Before, Var, CoalescedFrag.start(), CoalescedFrag.stop(),
660 void addDef(
const VarLocInfo &VarLoc,
VarLocInsertPt Before, BasicBlock &BB,
661 VarFragMap &LiveSet) {
674 const DIExpression *DIExpr = VarLoc.
Expr;
678 StartBit = Frag->OffsetInBits;
679 EndBit = StartBit + Frag->SizeInBits;
694 const unsigned Base =
695 DerefOffsetInBytes && *DerefOffsetInBytes * 8 == StartBit
699 << StartBit <<
", " << EndBit <<
"): " <<
toString(
Base)
706 auto FragIt = LiveSet.find(Var);
709 if (FragIt == LiveSet.end()) {
711 auto P = LiveSet.try_emplace(Var, FragsInMemMap(IntervalMapAlloc));
712 assert(
P.second &&
"Var already in map?");
714 P.first->second.insert(StartBit, EndBit,
Base);
719 FragsInMemMap &FragMap = FragIt->second;
722 if (!FragMap.overlaps(StartBit, EndBit)) {
724 FragMap.insert(StartBit, EndBit,
Base);
725 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
732 auto FirstOverlap = FragMap.find(StartBit);
733 assert(FirstOverlap != FragMap.end());
734 bool IntersectStart = FirstOverlap.start() < StartBit;
737 auto LastOverlap = FragMap.find(EndBit);
738 bool IntersectEnd = LastOverlap.valid() && LastOverlap.start() < EndBit;
741 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
742 LLVM_DEBUG(
dbgs() <<
"- Intersect single interval @ both ends\n");
750 auto EndBitOfOverlap = FirstOverlap.stop();
751 unsigned OverlapValue = FirstOverlap.value();
754 FirstOverlap.setStop(StartBit);
755 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
756 OverlapValue, VarLoc.
DL);
759 FragMap.insert(EndBit, EndBitOfOverlap, OverlapValue);
760 insertMemLoc(BB, Before, Var, EndBit, EndBitOfOverlap, OverlapValue,
764 FragMap.insert(StartBit, EndBit,
Base);
774 if (IntersectStart) {
777 FirstOverlap.setStop(StartBit);
778 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
779 *FirstOverlap, VarLoc.
DL);
788 LastOverlap.setStart(EndBit);
789 insertMemLoc(BB, Before, Var, EndBit, LastOverlap.stop(), *LastOverlap,
805 auto It = FirstOverlap;
808 while (It.valid() && It.start() >= StartBit && It.stop() <= EndBit) {
813 assert(!FragMap.overlaps(StartBit, EndBit));
815 FragMap.insert(StartBit, EndBit,
Base);
818 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
822 bool skipVariable(
const DILocalVariable *V) {
return !
V->getSizeInBits(); }
824 void process(BasicBlock &BB, VarFragMap &LiveSet) {
825 BBInsertBeforeMap[&BB].
clear();
827 for (DbgVariableRecord &DVR :
filterDbgVars(
I.getDbgRecordRange())) {
828 if (
const auto *Locs = FnVarLocs->
getWedge(&DVR)) {
829 for (
const VarLocInfo &Loc : *Locs) {
830 addDef(Loc, &DVR, *
I.getParent(), LiveSet);
834 if (
const auto *Locs = FnVarLocs->
getWedge(&
I)) {
835 for (
const VarLocInfo &Loc : *Locs) {
836 addDef(Loc, &
I, *
I.getParent(), LiveSet);
843 MemLocFragmentFill(Function &Fn,
844 const DenseSet<DebugAggregate> *VarsWithStackSlot,
845 bool CoalesceAdjacentFragments)
846 : Fn(Fn), VarsWithStackSlot(VarsWithStackSlot),
847 CoalesceAdjacentFragments(CoalesceAdjacentFragments) {}
869 void run(FunctionVarLocsBuilder *FnVarLocs) {
873 this->FnVarLocs = FnVarLocs;
877 ReversePostOrderTraversal<Function *> RPOT(&Fn);
878 std::priority_queue<unsigned int, std::vector<unsigned int>,
879 std::greater<unsigned int>>
881 std::priority_queue<unsigned int, std::vector<unsigned int>,
882 std::greater<unsigned int>>
884 DenseMap<unsigned int, BasicBlock *> OrderToBB;
885 DenseMap<BasicBlock *, unsigned int> BBToOrder;
887 unsigned int RPONumber = 0;
888 for (BasicBlock *BB : RPOT) {
889 OrderToBB[RPONumber] = BB;
890 BBToOrder[BB] = RPONumber;
891 Worklist.push(RPONumber);
907 SmallPtrSet<BasicBlock *, 16> Visited;
908 while (!Worklist.empty() || !Pending.empty()) {
912 SmallPtrSet<BasicBlock *, 16> OnPending;
914 while (!Worklist.empty()) {
918 bool InChanged = meet(*BB, Visited);
920 InChanged |= Visited.
insert(BB).second;
923 << BB->
getName() <<
" has new InLocs, process it\n");
927 VarFragMap LiveSet = LiveIn[BB];
930 process(*BB, LiveSet);
933 if (!varFragMapsAreEqual(LiveOut[BB], LiveSet)) {
935 <<
" has new OutLocs, add succs to worklist: [ ");
936 LiveOut[BB] = std::move(LiveSet);
938 if (OnPending.
insert(Succ).second) {
940 Pending.push(BBToOrder[Succ]);
947 Worklist.swap(Pending);
950 assert(Pending.empty() &&
"Pending should be empty");
954 for (
auto &Pair : BBInsertBeforeMap) {
955 InsertMap &
Map = Pair.second;
956 for (
auto &Pair : Map) {
957 auto InsertBefore = Pair.first;
958 assert(InsertBefore &&
"should never be null");
959 auto FragMemLocs = Pair.second;
962 for (
auto &FragMemLoc : FragMemLocs) {
963 DIExpression *Expr = DIExpression::get(Ctx, {});
964 if (FragMemLoc.SizeInBits !=
965 *
Aggregates[FragMemLoc.Var].first->getSizeInBits())
967 Expr, FragMemLoc.OffsetInBits, FragMemLoc.SizeInBits);
969 FragMemLoc.OffsetInBits / 8);
970 DebugVariable Var(
Aggregates[FragMemLoc.Var].first, Expr,
971 FragMemLoc.DL.getInlinedAt());
972 FnVarLocs->
addVarLoc(InsertBefore, Var, Expr, FragMemLoc.DL,
973 Bases[FragMemLoc.Base]);
983class AssignmentTrackingLowering {
1008 enum class LocKind { Mem, Val,
None };
1025 enum S { Known, NoneOrPhi } Status;
1030 DbgVariableRecord *
Source =
nullptr;
1032 bool isSameSourceAssignment(
const Assignment &
Other)
const {
1035 return std::tie(Status,
ID) == std::tie(
Other.Status,
Other.ID);
1037 void dump(raw_ostream &OS) {
1038 static const char *
LUT[] = {
"Known",
"NoneOrPhi"};
1039 OS <<
LUT[Status] <<
"(id=";
1052 static Assignment make(DIAssignID *
ID, DbgVariableRecord *Source) {
1054 "Cannot make an assignment from a non-assign DbgVariableRecord");
1055 return Assignment(Known,
ID, Source);
1057 static Assignment makeFromMemDef(DIAssignID *
ID) {
1058 return Assignment(Known,
ID);
1060 static Assignment makeNoneOrPhi() {
return Assignment(NoneOrPhi,
nullptr); }
1062 Assignment() : Status(NoneOrPhi),
ID(nullptr) {}
1063 Assignment(S Status, DIAssignID *
ID) : Status(Status),
ID(
ID) {
1067 Assignment(S Status, DIAssignID *
ID, DbgVariableRecord *Source)
1074 using AssignmentMap = SmallVector<Assignment>;
1075 using LocMap = SmallVector<LocKind>;
1076 using OverlapMap = DenseMap<VariableID, SmallVector<VariableID>>;
1077 using UntaggedStoreAssignmentMap =
1080 using UnknownStoreAssignmentMap =
1081 DenseMap<const Instruction *, SmallVector<VariableID>>;
1082 using EscapingCallVarsMap =
1089 unsigned TrackedVariablesVectorSize = 0;
1094 UntaggedStoreAssignmentMap UntaggedStoreVars;
1097 UnknownStoreAssignmentMap UnknownStoreVars;
1101 EscapingCallVarsMap EscapingCallVars;
1104 using InstInsertMap = MapVector<VarLocInsertPt, SmallVector<VarLocInfo>>;
1105 InstInsertMap InsertBeforeMap;
1108 void resetInsertionPoint(Instruction &After);
1109 void resetInsertionPoint(DbgVariableRecord &After);
1111 void emitDbgValue(LocKind Kind, DbgVariableRecord *,
VarLocInsertPt After);
1113 static bool mapsAreEqual(
const BitVector &Mask,
const AssignmentMap &
A,
1114 const AssignmentMap &
B) {
1116 return A[VarID].isSameSourceAssignment(B[VarID]);
1125 BitVector VariableIDsInBlock;
1128 AssignmentMap StackHomeValue;
1130 AssignmentMap DebugValue;
1145 const AssignmentMap &getAssignmentMap(AssignmentKind Kind)
const {
1148 return StackHomeValue;
1154 AssignmentMap &getAssignmentMap(AssignmentKind Kind) {
1155 return const_cast<AssignmentMap &
>(
1156 const_cast<const BlockInfo *
>(
this)->getAssignmentMap(Kind));
1159 bool isVariableTracked(
VariableID Var)
const {
1160 return VariableIDsInBlock[
static_cast<unsigned>(Var)];
1163 const Assignment &getAssignment(AssignmentKind Kind,
VariableID Var)
const {
1164 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1165 return getAssignmentMap(Kind)[
static_cast<unsigned>(Var)];
1169 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1170 return LiveLoc[
static_cast<unsigned>(Var)];
1176 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1177 LiveLoc[
static_cast<unsigned>(Var)] = K;
1183 void setAssignment(AssignmentKind Kind,
VariableID Var,
1184 const Assignment &AV) {
1185 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1186 getAssignmentMap(Kind)[
static_cast<unsigned>(Var)] = AV;
1192 bool hasAssignment(AssignmentKind Kind,
VariableID Var,
1193 const Assignment &AV)
const {
1194 if (!isVariableTracked(Var))
1196 return AV.isSameSourceAssignment(getAssignment(Kind, Var));
1202 return VariableIDsInBlock ==
Other.VariableIDsInBlock &&
1203 LiveLoc ==
Other.LiveLoc &&
1204 mapsAreEqual(VariableIDsInBlock, StackHomeValue,
1205 Other.StackHomeValue) &&
1206 mapsAreEqual(VariableIDsInBlock, DebugValue,
Other.DebugValue);
1210 return LiveLoc.size() == DebugValue.size() &&
1211 LiveLoc.size() == StackHomeValue.size();
1215 void init(
int NumVars) {
1216 StackHomeValue.clear();
1219 VariableIDsInBlock = BitVector(NumVars);
1220 StackHomeValue.insert(StackHomeValue.begin(), NumVars,
1221 Assignment::makeNoneOrPhi());
1222 DebugValue.insert(DebugValue.begin(), NumVars,
1223 Assignment::makeNoneOrPhi());
1224 LiveLoc.
insert(LiveLoc.
begin(), NumVars, LocKind::None);
1228 template <
typename ElmtType,
typename FnInputType>
1232 ElmtType (*Fn)(FnInputType, FnInputType)) {
1237 static BlockInfo
join(
const BlockInfo &
A,
const BlockInfo &
B,
int NumVars) {
1256 BitVector Intersect =
A.VariableIDsInBlock;
1257 Intersect &=
B.VariableIDsInBlock;
1260 joinElmt(
VarID, Join.LiveLoc,
A.LiveLoc,
B.LiveLoc, joinKind);
1261 joinElmt(
VarID, Join.DebugValue,
A.DebugValue,
B.DebugValue,
1263 joinElmt(
VarID, Join.StackHomeValue,
A.StackHomeValue,
B.StackHomeValue,
1267 Join.VariableIDsInBlock =
A.VariableIDsInBlock;
1268 Join.VariableIDsInBlock |=
B.VariableIDsInBlock;
1275 const DataLayout &Layout;
1276 const DenseSet<DebugAggregate> *VarsWithStackSlot;
1277 FunctionVarLocsBuilder *FnVarLocs;
1278 DenseMap<const BasicBlock *, BlockInfo> LiveIn;
1279 DenseMap<const BasicBlock *, BlockInfo> LiveOut;
1282 DenseSet<VariableID> VarsTouchedThisFrame;
1285 DenseSet<DebugAggregate> NotAlwaysStackHomed;
1287 VariableID getVariableID(
const DebugVariable &Var) {
1295 bool join(
const BasicBlock &BB,
const SmallPtrSet<BasicBlock *, 16> &Visited);
1315 static LocKind joinKind(LocKind
A, LocKind
B);
1316 static Assignment joinAssignment(
const Assignment &
A,
const Assignment &
B);
1317 BlockInfo joinBlockInfo(
const BlockInfo &
A,
const BlockInfo &
B);
1323 void process(BasicBlock &BB, BlockInfo *LiveSet);
1328 void processNonDbgInstruction(Instruction &
I, BlockInfo *LiveSet);
1331 void processTaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1334 void processUntaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1335 void processUnknownStoreToVariable(Instruction &
I,
VariableID &Var,
1336 BlockInfo *LiveSet);
1337 void processEscapingCall(Instruction &
I, BlockInfo *LiveSet);
1338 void processDbgAssign(DbgVariableRecord *Assign, BlockInfo *LiveSet);
1339 void processDbgVariableRecord(DbgVariableRecord &DVR, BlockInfo *LiveSet);
1340 void processDbgValue(DbgVariableRecord *DbgValue, BlockInfo *LiveSet);
1342 void addMemDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1344 void addDbgDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1348 void setLocKind(BlockInfo *LiveSet,
VariableID Var, LocKind K);
1351 LocKind getLocKind(BlockInfo *LiveSet,
VariableID Var);
1353 bool hasVarWithAssignment(BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
1364 bool emitPromotedVarLocs(FunctionVarLocsBuilder *FnVarLocs);
1367 AssignmentTrackingLowering(Function &Fn,
const DataLayout &Layout,
1368 const DenseSet<DebugAggregate> *VarsWithStackSlot)
1369 : Fn(Fn), Layout(Layout), VarsWithStackSlot(VarsWithStackSlot) {}
1372 bool run(FunctionVarLocsBuilder *FnVarLocs);
1377AssignmentTrackingLowering::getContainedFragments(
VariableID Var)
const {
1378 auto R = VarContains.find(Var);
1379 if (R == VarContains.end())
1384void AssignmentTrackingLowering::touchFragment(
VariableID Var) {
1385 VarsTouchedThisFrame.insert(Var);
1388void AssignmentTrackingLowering::setLocKind(BlockInfo *LiveSet,
VariableID Var,
1390 auto SetKind = [
this](BlockInfo *LiveSet,
VariableID Var, LocKind
K) {
1391 LiveSet->setLocKind(Var, K);
1394 SetKind(LiveSet, Var, K);
1397 for (
VariableID Frag : getContainedFragments(Var))
1398 SetKind(LiveSet, Frag, K);
1401AssignmentTrackingLowering::LocKind
1402AssignmentTrackingLowering::getLocKind(BlockInfo *LiveSet,
VariableID Var) {
1403 return LiveSet->getLocKind(Var);
1406void AssignmentTrackingLowering::addMemDef(BlockInfo *LiveSet,
VariableID Var,
1407 const Assignment &AV) {
1408 LiveSet->setAssignment(BlockInfo::Stack, Var, AV);
1413 Assignment FragAV = AV;
1414 FragAV.Source =
nullptr;
1415 for (
VariableID Frag : getContainedFragments(Var))
1416 LiveSet->setAssignment(BlockInfo::Stack, Frag, FragAV);
1419void AssignmentTrackingLowering::addDbgDef(BlockInfo *LiveSet,
VariableID Var,
1420 const Assignment &AV) {
1421 LiveSet->setAssignment(BlockInfo::Debug, Var, AV);
1426 Assignment FragAV = AV;
1427 FragAV.Source =
nullptr;
1428 for (
VariableID Frag : getContainedFragments(Var))
1429 LiveSet->setAssignment(BlockInfo::Debug, Frag, FragAV);
1438 "Cannot get a DIAssignID from a non-assign DbgVariableRecord!");
1443bool AssignmentTrackingLowering::hasVarWithAssignment(
1444 BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
VariableID Var,
1445 const Assignment &AV) {
1446 if (!LiveSet->hasAssignment(Kind, Var, AV))
1451 for (
VariableID Frag : getContainedFragments(Var))
1452 if (!LiveSet->hasAssignment(Kind, Frag, AV))
1458const char *
locStr(AssignmentTrackingLowering::LocKind
Loc) {
1459 using LocKind = AssignmentTrackingLowering::LocKind;
1480 if (!
Next->hasDbgRecords())
1482 return &*
Next->getDbgRecordRange().begin();
1490void AssignmentTrackingLowering::emitDbgValue(
1503 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1512 InsertBeforeMap[InsertBefore].
push_back(VarLoc);
1516 if (Kind == LocKind::Mem) {
1521 if (
Assign->isKillAddress()) {
1523 Kind = LocKind::Val;
1528 "fragment info should be stored in value-expression only");
1531 if (
auto OptFragInfo =
Source->getExpression()->getFragmentInfo()) {
1532 auto FragInfo = *OptFragInfo;
1534 Expr, FragInfo.OffsetInBits, FragInfo.SizeInBits);
1537 std::tie(Val, Expr) =
1544 if (Kind == LocKind::Val) {
1545 Emit(
Source->getRawLocation(),
Source->getExpression());
1549 if (Kind == LocKind::None) {
1550 Emit(
nullptr,
Source->getExpression());
1555void AssignmentTrackingLowering::processNonDbgInstruction(
1556 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1557 if (
I.hasMetadata(LLVMContext::MD_DIAssignID))
1558 processTaggedInstruction(
I, LiveSet);
1560 processUntaggedInstruction(
I, LiveSet);
1565 processEscapingCall(
I, LiveSet);
1568void AssignmentTrackingLowering::processUnknownStoreToVariable(
1572 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1575 if (getLocKind(LiveSet, Var) != LocKind::Mem)
1579 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1580 if (DbgAV.Status != Assignment::NoneOrPhi && DbgAV.Source) {
1582 DbgAV.dump(
dbgs());
dbgs() <<
"\n");
1583 setLocKind(LiveSet, Var, LocKind::Val);
1584 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1590 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1596 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1604 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1607void AssignmentTrackingLowering::processUntaggedInstruction(
1608 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1620 assert(!
I.hasMetadata(LLVMContext::MD_DIAssignID));
1621 auto It = UntaggedStoreVars.find(&
I);
1622 if (It == UntaggedStoreVars.end()) {
1629 if (
auto UnhandledStoreIt = UnknownStoreVars.find(&
I);
1630 UnhandledStoreIt != UnknownStoreVars.end()) {
1631 LLVM_DEBUG(
dbgs() <<
"Processing untagged unknown store " <<
I <<
"\n");
1632 for (
auto &Var : UnhandledStoreIt->second)
1633 processUnknownStoreToVariable(
I, Var, LiveSet);
1638 LLVM_DEBUG(
dbgs() <<
"processUntaggedInstruction on UNTAGGED INST " <<
I
1642 for (
auto [Var, Info] : It->second) {
1646 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1647 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1648 setLocKind(LiveSet, Var, LocKind::Mem);
1656 if (
auto Frag =
V.getFragment()) {
1659 assert(R &&
"unexpected createFragmentExpression failure");
1663 if (
Info.OffsetInBits)
1664 Ops = {dwarf::DW_OP_plus_uconst,
Info.OffsetInBits / 8};
1671 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1676 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1685 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1689void AssignmentTrackingLowering::processEscapingCall(
1690 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1691 auto It = EscapingCallVars.find(&
I);
1692 if (It == EscapingCallVars.end())
1699 for (
auto &[Var, Addr, AddrExpr] : It->second) {
1705 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1706 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1707 setLocKind(LiveSet, Var, LocKind::Mem);
1711 <<
", setting LocKind to Mem\n");
1718 if (
auto Frag =
V.getFragment()) {
1721 assert(R &&
"unexpected createFragmentExpression failure");
1729 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1733 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1741 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1745void AssignmentTrackingLowering::processTaggedInstruction(
1746 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1752 if (LinkedDPAssigns.empty())
1761 "expected Assign's variable to have stack slot");
1764 addMemDef(LiveSet, Var, AV);
1772 if (hasVarWithAssignment(LiveSet, BlockInfo::Debug, Var, AV)) {
1778 LiveSet->DebugValue[
static_cast<unsigned>(Var)].dump(
dbgs());
1780 setLocKind(LiveSet, Var, LocKind::Mem);
1781 emitDbgValue(LocKind::Mem, Assign, &
I);
1790 LocKind PrevLoc = getLocKind(LiveSet, Var);
1792 case LocKind::Val: {
1796 setLocKind(LiveSet, Var, LocKind::Val);
1798 case LocKind::Mem: {
1802 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1803 if (DbgAV.Status == Assignment::NoneOrPhi) {
1806 setLocKind(LiveSet, Var, LocKind::None);
1807 emitDbgValue(LocKind::None, Assign, &
I);
1811 setLocKind(LiveSet, Var, LocKind::Val);
1813 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1816 emitDbgValue(LocKind::None, Assign, &
I);
1820 case LocKind::None: {
1824 setLocKind(LiveSet, Var, LocKind::None);
1831 BlockInfo *LiveSet) {
1838 Assignment AV = Assignment::make(
getIDFromMarker(*DbgAssign), DbgAssign);
1839 addDbgDef(LiveSet, Var, AV);
1841 LLVM_DEBUG(
dbgs() <<
"processDbgAssign on " << *DbgAssign <<
"\n";);
1847 if (hasVarWithAssignment(LiveSet, BlockInfo::Stack, Var, AV)) {
1855 <<
"Val, Stack matches Debug program but address is killed\n";);
1856 Kind = LocKind::Val;
1859 Kind = LocKind::Mem;
1861 setLocKind(LiveSet, Var, Kind);
1862 emitDbgValue(Kind, DbgAssign, DbgAssign);
1867 setLocKind(LiveSet, Var, LocKind::Val);
1868 emitDbgValue(LocKind::Val, DbgAssign, DbgAssign);
1873 BlockInfo *LiveSet) {
1886 Assignment AV = Assignment::makeNoneOrPhi();
1887 addDbgDef(LiveSet, Var, AV);
1891 <<
" -> Val, dbg.value override");
1893 setLocKind(LiveSet, Var, LocKind::Val);
1898 if (
auto F =
DbgValue.getExpression()->getFragmentInfo())
1899 return F->SizeInBits == 0;
1903void AssignmentTrackingLowering::processDbgVariableRecord(
1910 processDbgAssign(&DVR, LiveSet);
1912 processDbgValue(&DVR, LiveSet);
1915void AssignmentTrackingLowering::resetInsertionPoint(
Instruction &After) {
1918 if (R == InsertBeforeMap.end())
1924 if (R == InsertBeforeMap.end())
1929void AssignmentTrackingLowering::process(
BasicBlock &BB, BlockInfo *LiveSet) {
1932 bool ProcessedLeadingDbgRecords = !BB.
begin()->hasDbgRecords();
1934 assert(VarsTouchedThisFrame.empty());
1941 if (ProcessedLeadingDbgRecords) {
1946 if (
II->isTerminator())
1948 resetInsertionPoint(*
II);
1949 processNonDbgInstruction(*
II, LiveSet);
1950 assert(LiveSet->isValid());
1956 if (
II != EI &&
II->hasDbgRecords()) {
1961 resetInsertionPoint(DVR);
1962 processDbgVariableRecord(DVR, LiveSet);
1963 assert(LiveSet->isValid());
1966 ProcessedLeadingDbgRecords =
true;
1974 for (
auto Var : VarsTouchedThisFrame) {
1975 LocKind
Loc = getLocKind(LiveSet, Var);
1983 if (
Loc != LocKind::Mem) {
1986 NotAlwaysStackHomed.insert(Aggr);
1989 VarsTouchedThisFrame.clear();
1993AssignmentTrackingLowering::LocKind
1994AssignmentTrackingLowering::joinKind(LocKind
A, LocKind
B) {
1997 return A ==
B ?
A : LocKind::None;
2000AssignmentTrackingLowering::Assignment
2001AssignmentTrackingLowering::joinAssignment(
const Assignment &
A,
2002 const Assignment &
B) {
2009 if (!
A.isSameSourceAssignment(
B))
2010 return Assignment::makeNoneOrPhi();
2011 if (
A.Status == Assignment::NoneOrPhi)
2012 return Assignment::makeNoneOrPhi();
2028 if (
A.Source ==
B.Source)
2030 if (!
A.Source || !
B.Source)
2032 if (
A.Source->isEquivalentTo(*
B.Source))
2037 assert(
A.Status ==
B.Status &&
A.Status == Assignment::Known);
2039 return Assignment::make(
A.ID, Source);
2042AssignmentTrackingLowering::BlockInfo
2043AssignmentTrackingLowering::joinBlockInfo(
const BlockInfo &
A,
2044 const BlockInfo &
B) {
2045 return BlockInfo::join(
A,
B, TrackedVariablesVectorSize);
2048bool AssignmentTrackingLowering::join(
2060 if (Visited.
count(Pred))
2065 if (VisitedPreds.
empty()) {
2067 bool DidInsert = It.second;
2069 It.first->second.init(TrackedVariablesVectorSize);
2074 if (VisitedPreds.
size() == 1) {
2075 const BlockInfo &PredLiveOut = LiveOut.
find(VisitedPreds[0])->second;
2082 if (PredLiveOut != CurrentLiveInEntry->second) {
2083 CurrentLiveInEntry->second = PredLiveOut;
2091 const BlockInfo &PredLiveOut0 = LiveOut.
find(VisitedPreds[0])->second;
2092 const BlockInfo &PredLiveOut1 = LiveOut.
find(VisitedPreds[1])->second;
2093 BlockInfo BBLiveIn = joinBlockInfo(PredLiveOut0, PredLiveOut1);
2098 const auto &PredLiveOut = LiveOut.
find(Pred);
2100 "block should have been processed already");
2101 BBLiveIn = joinBlockInfo(std::move(BBLiveIn), PredLiveOut->second);
2105 auto CurrentLiveInEntry = LiveIn.
find(&BB);
2108 if (CurrentLiveInEntry == LiveIn.
end())
2110 else if (BBLiveIn != CurrentLiveInEntry->second)
2111 CurrentLiveInEntry->second = std::move(BBLiveIn);
2120 auto ALeft =
A.OffsetInBits;
2121 auto BLeft =
B.OffsetInBits;
2125 auto ARight = ALeft +
A.SizeInBits;
2126 auto BRight = BLeft +
B.SizeInBits;
2127 if (BRight > ARight)
2132static std::optional<at::AssignmentInfo>
2142 return std::nullopt;
2150 if (
ID != Intrinsic::experimental_vp_strided_store &&
2151 ID != Intrinsic::masked_store &&
ID != Intrinsic::vp_scatter &&
2152 ID != Intrinsic::masked_scatter &&
ID != Intrinsic::vp_store &&
2153 ID != Intrinsic::masked_compressstore)
2189 AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars,
2190 AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars,
2191 AssignmentTrackingLowering::EscapingCallVarsMap &EscapingCallVars,
2192 unsigned &TrackedVariablesVectorSize) {
2206 if (
Record->isDbgDeclare()) {
2212 if (!VarsWithStackSlot.
contains(DA))
2214 if (Seen.
insert(DV).second)
2215 FragmentMap[DA].push_back(DV);
2217 for (
auto &BB : Fn) {
2218 for (
auto &
I : BB) {
2220 ProcessDbgRecord(&DVR);
2224 std::optional<DIExpression::FragmentInfo> FragInfo;
2229 I.getDataLayout(), Info->Base,
2230 Info->OffsetInBits, Info->SizeInBits, Assign, FragInfo) ||
2231 (FragInfo && FragInfo->SizeInBits == 0))
2240 FragInfo = Assign->getExpression()->getFragmentInfo();
2244 Assign->getDebugLoc().getInlinedAt());
2246 if (!VarsWithStackSlot.
contains(DA))
2250 UntaggedStoreVars[&
I].push_back(
2253 if (Seen.
insert(DV).second)
2254 FragmentMap[DA].push_back(DV);
2257 HandleDbgAssignForStore(DVR);
2265 Assign->getDebugLoc().getInlinedAt());
2267 if (!VarsWithStackSlot.
contains(DA))
2274 HandleDbgAssignForUnknownStore(DVR);
2287 if (CB->onlyReadsMemory())
2291 for (
unsigned ArgIdx = 0; ArgIdx < CB->arg_size(); ++ArgIdx) {
2292 Value *Arg = CB->getArgOperand(ArgIdx);
2296 if (CB->paramHasAttr(ArgIdx, Attribute::ReadOnly) ||
2297 CB->paramHasAttr(ArgIdx, Attribute::ReadNone))
2300 if (CB->paramHasAttr(ArgIdx, Attribute::ByVal))
2315 if (!VarsWithStackSlot.
contains(DA))
2319 EscapingCallVars[&
I].push_back(
2328 for (
auto &Pair : FragmentMap) {
2330 std::sort(Frags.
begin(), Frags.
end(),
2332 return Elmt.getFragmentOrDefault().SizeInBits >
2333 Next.getFragmentOrDefault().SizeInBits;
2340 AssignmentTrackingLowering::OverlapMap Map;
2341 for (
auto &Pair : FragmentMap) {
2342 auto &Frags = Pair.second;
2343 for (
auto It = Frags.begin(), IEnd = Frags.end(); It != IEnd; ++It) {
2353 for (; OtherIt != IEnd; ++OtherIt) {
2357 Map[OtherVar].push_back(ThisVar);
2368 for (
auto *DVR : DPDeclares)
2375bool AssignmentTrackingLowering::run(FunctionVarLocsBuilder *FnVarLocsBuilder) {
2378 <<
": too many blocks (" << Fn.
size() <<
")\n");
2383 FnVarLocs = FnVarLocsBuilder;
2393 Fn, FnVarLocs, *VarsWithStackSlot, UntaggedStoreVars, UnknownStoreVars,
2394 EscapingCallVars, TrackedVariablesVectorSize);
2398 std::priority_queue<unsigned int, std::vector<unsigned int>,
2399 std::greater<unsigned int>>
2401 std::priority_queue<unsigned int, std::vector<unsigned int>,
2402 std::greater<unsigned int>>
2407 unsigned int RPONumber = 0;
2409 OrderToBB[RPONumber] = BB;
2410 BBToOrder[BB] = RPONumber;
2411 Worklist.push(RPONumber);
2429 while (!Worklist.empty()) {
2434 while (!Worklist.empty()) {
2438 bool InChanged =
join(*BB, Visited);
2440 InChanged |= Visited.
insert(BB).second;
2445 BlockInfo LiveSet = LiveIn[BB];
2448 process(*BB, &LiveSet);
2451 if (LiveOut[BB] != LiveSet) {
2453 <<
" has new OutLocs, add succs to worklist: [ ");
2454 LiveOut[BB] = std::move(LiveSet);
2456 if (OnPending.
insert(Succ).second) {
2458 Pending.push(BBToOrder[Succ]);
2465 Worklist.swap(Pending);
2468 assert(Pending.empty() &&
"Pending should be empty");
2474 bool InsertedAnyIntrinsics =
false;
2483 for (
const auto &Pair : InsertBeforeMap) {
2484 auto &Vec = Pair.second;
2490 if (NotAlwaysStackHomed.contains(Aggr))
2500 NotAlwaysStackHomed.insert(Aggr);
2509 if (AlwaysStackHomed.
insert(Aggr).second) {
2518 InsertedAnyIntrinsics =
true;
2524 for (
const auto &[InsertBefore, Vec] : InsertBeforeMap) {
2531 if (AlwaysStackHomed.
contains(Aggr))
2534 InsertedAnyIntrinsics =
true;
2537 FnVarLocs->
setWedge(InsertBefore, std::move(NewDefs));
2540 InsertedAnyIntrinsics |= emitPromotedVarLocs(FnVarLocs);
2542 return InsertedAnyIntrinsics;
2545bool AssignmentTrackingLowering::emitPromotedVarLocs(
2546 FunctionVarLocsBuilder *FnVarLocs) {
2547 bool InsertedAnyIntrinsics =
false;
2556 assert(InsertBefore &&
"Unexpected: debug intrinsics after a terminator");
2560 InsertedAnyIntrinsics =
true;
2562 for (
auto &BB : Fn) {
2563 for (
auto &
I : BB) {
2567 TranslateDbgRecord(&DVR);
2570 return InsertedAnyIntrinsics;
2590 VariableDefinedBytes.
clear();
2592 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2594 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2599 bool ChangedThisWedge =
false;
2604 for (
auto RIt = Locs->rbegin(), REnd = Locs->rend(); RIt != REnd; ++RIt) {
2608 uint64_t SizeInBits = Aggr.first->getSizeInBits().value_or(0);
2612 const uint64_t MaxSizeBytes = 2048;
2614 if (SizeInBytes == 0 || SizeInBytes > MaxSizeBytes) {
2628 bool FirstDefinition = InsertResult.second;
2629 BitVector &DefinedBytes = InsertResult.first->second;
2632 RIt->Expr->getFragmentInfo().value_or(
2634 bool InvalidFragment = Fragment.endInBits() > SizeInBits;
2635 uint64_t StartInBytes = Fragment.startInBits() / 8;
2639 if (FirstDefinition || InvalidFragment ||
2641 if (!InvalidFragment)
2642 DefinedBytes.
set(StartInBytes, EndInBytes);
2649 ChangedThisWedge =
true;
2654 if (ChangedThisWedge) {
2655 std::reverse(NewDefsReversed.
begin(), NewDefsReversed.
end());
2656 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefsReversed));
2661 HandleLocsForWedge(&
I);
2663 HandleLocsForWedge(&DVR);
2688 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2689 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2694 bool ChangedThisWedge =
false;
2702 std::nullopt,
Loc.DL.getInlinedAt());
2707 if (Inserted || VMI->second.first !=
Loc.Values ||
2708 VMI->second.second !=
Loc.Expr) {
2709 VMI->second = {
Loc.Values,
Loc.Expr};
2715 ChangedThisWedge =
true;
2720 if (ChangedThisWedge) {
2721 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2728 HandleLocsForWedge(&DVR);
2729 HandleLocsForWedge(&
I);
2754 VarsWithDef[
A].
insert(V.getFragmentOrDefault());
2760 auto FragsIt = VarsWithDef.
find(
A);
2761 if (FragsIt == VarsWithDef.
end())
2764 return DIExpression::fragmentsOverlap(Frag, V.getFragmentOrDefault());
2775 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2776 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2781 bool ChangedThisWedge =
false;
2789 Loc.DL.getInlinedAt()};
2794 if (
Loc.Values.isKillLocation(
Loc.Expr) && !HasDefinedBits(Aggr, Var)) {
2797 ChangedThisWedge =
true;
2801 DefineBits(Aggr, Var);
2806 if (ChangedThisWedge) {
2807 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2813 HandleLocsForWedge(&DVR);
2814 HandleLocsForWedge(&
I);
2822 bool MadeChanges =
false;
2836 for (
auto &BB : Fn) {
2837 for (
auto &
I : BB) {
2863 AssignmentTrackingLowering
Pass(Fn, Layout, &VarsWithStackSlot);
2868 MemLocFragmentFill
Pass(Fn, &VarsWithStackSlot,
2870 Pass.run(FnVarLocs);
2887 auto &
DL =
F.getDataLayout();
2911 LLVM_DEBUG(
dbgs() <<
"AssignmentTrackingAnalysis run on " <<
F.getName()
2921 Results->init(Builder);
2924 Results->print(
errs(),
F);
2936 "Assignment Tracking Analysis",
false,
true)
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis Results
std::pair< const DILocalVariable *, const DILocation * > DebugAggregate
A whole (unfragmented) source variable.
VarLocInsertPt getNextNode(const DbgRecord *DVR)
static void analyzeFunction(Function &Fn, const DataLayout &Layout, FunctionVarLocsBuilder *FnVarLocs)
static std::pair< Value *, DIExpression * > walkToAllocaAndPrependOffsetDeref(const DataLayout &DL, Value *Start, DIExpression *Expression)
Walk backwards along constant GEPs and bitcasts to the base storage from Start as far as possible.
static DenseSet< DebugAggregate > findVarsWithStackSlot(Function &Fn)
static bool fullyContains(DIExpression::FragmentInfo A, DIExpression::FragmentInfo B)
Return true if A fully contains B.
static std::optional< at::AssignmentInfo > getUntaggedStoreAssignmentInfo(const Instruction &I, const DataLayout &Layout)
static bool removeUndefDbgLocsFromEntryBlock(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > PrintResults("print-debug-ata", cl::init(false), cl::Hidden)
Print the results of the analysis. Respects -filter-print-funcs.
const char * locStr(AssignmentTrackingLowering::LocKind Loc)
PointerUnion< const Instruction *, const DbgRecord * > VarLocInsertPt
static bool removeRedundantDbgLocsUsingForwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant location defs using a forward scan.
static bool removeRedundantDbgLocs(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > EnableMemLocFragFill("mem-loc-frag-fill", cl::init(true), cl::Hidden)
Option for debugging the pass, determines if the memory location fragment filling happens after gener...
static AssignmentTrackingLowering::OverlapMap buildOverlapMapAndRecordDeclares(Function &Fn, FunctionVarLocsBuilder *FnVarLocs, const DenseSet< DebugAggregate > &VarsWithStackSlot, AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars, AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars, AssignmentTrackingLowering::EscapingCallVarsMap &EscapingCallVars, unsigned &TrackedVariablesVectorSize)
Build a map of {Variable x: Variables y} where all variable fragments contained within the variable f...
static DIAssignID * getIDFromMarker(const DbgVariableRecord &DVR)
static DebugAggregate getAggregate(const DebugVariable &Var)
static bool hasZeroSizedFragment(DbgVariableRecord &DbgValue)
static DIAssignID * getIDFromInst(const Instruction &I)
AllocaInst * getUnknownStore(const Instruction &I, const DataLayout &Layout)
static std::optional< int64_t > getDerefOffsetInBytes(const DIExpression *DIExpr)
Extract the offset used in DIExpr.
static bool removeRedundantDbgLocsUsingBackwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant definitions within sequences of consecutive location defs.
static cl::opt< cl::boolOrDefault > CoalesceAdjacentFragmentsOpt("debug-ata-coalesce-frags", cl::Hidden)
Coalesce adjacent dbg locs describing memory locations that have contiguous fragments.
static cl::opt< unsigned > MaxNumBlocks("debug-ata-max-blocks", cl::init(10000), cl::desc("Maximum num basic blocks before debug info dropped"), cl::Hidden)
static bool shouldCoalesceFragments(Function &F)
This file implements the BitVector class.
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static ManagedStatic< cl::opt< bool, true >, CreateDebug > Debug
This file defines DenseMapInfo traits for DenseMap.
This file contains constants used for implementing Dwarf debug support.
Module.h This file contains the declarations for the Module class.
This header defines various interfaces for pass management in LLVM.
This file implements a coalescing interval map for small objects.
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
IntervalMap< SlotIndex, DbgVariableValue, 4 > LocMap
Map of where a user value is live to that value.
print mir2vec MIR2Vec Vocabulary Printer Pass
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
Scalar Replacement Of Aggregates
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Helper class to build FunctionVarLocs, since that class isn't easy to modify.
void setWedge(VarLocInsertPt Before, SmallVector< VarLocInfo > &&Wedge)
Replace the defs that come just before /p Before with /p Wedge.
const SmallVectorImpl< VarLocInfo > * getWedge(VarLocInsertPt Before) const
Return ptr to wedge of defs or nullptr if no defs come just before /p Before.
unsigned getNumVariables() const
void addSingleLocVar(DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def for a variable that is valid for its lifetime.
VariableID insertVariable(DebugVariable V)
Find or insert V and return the ID.
void addVarLoc(VarLocInsertPt Before, DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def to the wedge of defs just before /p Before.
const DebugVariable & getVariable(VariableID ID) const
Get a variable from its ID.
Class recording the (high level) value of a variable.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
bool getBoolValue() const
Convert APInt to a boolean value.
an instruction to allocate memory on the stack
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
AssignmentTrackingAnalysis()
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
LLVM_ABI bool isEntryBlock() const
Return true if this is the entry block of the containing function.
int find_first_unset_in(unsigned Begin, unsigned End) const
find_first_unset_in - Returns the index of the first unset bit in the range [Begin,...
iterator_range< const_set_bits_iterator > set_bits() const
A structured debug information entry.
static LLVM_ABI DIExpression * append(const DIExpression *Expr, ArrayRef< uint64_t > Ops)
Append the opcodes Ops to DIExpr.
unsigned getNumElements() const
DbgVariableFragmentInfo FragmentInfo
LLVM_ABI bool startsWithDeref() const
Return whether the first element a DW_OP_deref.
static LLVM_ABI std::optional< FragmentInfo > getFragmentInfo(expr_op_iterator Start, expr_op_iterator End)
Retrieve the details of this fragment expression.
ArrayRef< uint64_t > getElements() const
static LLVM_ABI std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static LLVM_ABI DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
static LLVM_ABI DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
LLVM_ABI std::optional< uint64_t > getSizeInBits() const
Determines the size of the variable's type.
StringRef getName() const
A parsed version of the target data layout string in and methods for querying it.
LLVM_ABI unsigned getIndexTypeSizeInBits(Type *Ty) const
The size in bits of the index used in GEP calculation for this type.
Instruction * MarkedInstr
Link back to the Instruction that owns this marker.
LLVM_ABI iterator_range< simple_ilist< DbgRecord >::iterator > getDbgRecordRange()
Produce a range over all the DbgRecords in this Marker.
Base class for non-instruction debug metadata records that have positions within IR.
DebugLoc getDebugLoc() const
Record of a variable value-assignment, aka a non instruction representation of the dbg....
LLVM_ABI Value * getAddress() const
LLVM_ABI bool isKillAddress() const
Check whether this kills the address component.
LLVM_ABI DIAssignID * getAssignID() const
DIExpression * getExpression() const
DILocalVariable * getVariable() const
Metadata * getRawLocation() const
Returns the metadata operand for the first location description.
DIExpression * getAddressExpression() const
Result run(Function &F, FunctionAnalysisManager &FAM)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
LLVM_ABI DILocation * getInlinedAt() const
Identifies a unique instance of a variable.
const DILocation * getInlinedAt() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again.
Implements a dense probed hash-table based set.
Class representing an expression and its matching format.
Data structure describing the variable locations in a function.
void print(raw_ostream &OS, const Function &Fn) const
const VarLocInfo * locs_begin(const Instruction *Before) const
First variable location definition that comes before Before.
const VarLocInfo * single_locs_begin() const
const VarLocInfo * locs_end(const Instruction *Before) const
One past the last variable location definition that comes before Before.
const VarLocInfo * single_locs_end() const
One past the last single-location variable location definition.
void init(FunctionVarLocsBuilder &Builder)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
bool isTerminator() const
const_iterator begin() const
void insert(KeyT a, KeyT b, ValT y)
insert - Add a mapping of [a;b] to y, coalesce with adjacent intervals.
void clear()
clear - Remove all entries.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void push_back(MachineInstr *MI)
Pass interface - Implemented by all 'passes'.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
void * getOpaqueValue() const
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Lightweight class that wraps the location operand metadata of a debug intrinsic.
Implements a dense probed hash-table based set with some number of buckets stored inline.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
bool isPointerTy() const
True if this is an instance of PointerType.
static LLVM_ABI IntegerType * getInt1Ty(LLVMContext &C)
UniqueVector - This class produces a sequential ID number (base 1) for each unique entry that is adde...
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
DenseMap< FragmentOfVar, SmallVector< DIExpression::FragmentInfo, 1 > > OverlapMap
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
@ BasicBlock
Various leaf nodes.
LLVM_ABI void deleteAll(Function *F)
Remove all Assignment Tracking related intrinsics and metadata from F.
SmallVector< DbgVariableRecord * > getDVRAssignmentMarkers(const Instruction *Inst)
Return a range of dbg_assign records for which Inst performs the assignment they encode.
LLVM_ABI std::optional< AssignmentInfo > getAssignmentInfo(const DataLayout &DL, const MemIntrinsic *I)
LLVM_ABI bool calculateFragmentIntersect(const DataLayout &DL, const Value *Dest, uint64_t SliceOffsetInBits, uint64_t SliceSizeInBits, const DbgVariableRecord *DVRAssign, std::optional< DIExpression::FragmentInfo > &Result)
Calculate the fragment of the variable in DAI covered from (Dest + SliceOffsetInBits) to to (Dest + S...
initializer< Ty > init(const Ty &Val)
@ DW_OP_LLVM_fragment
Only used in LLVM metadata.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
std::tuple< const DIScope *, const DIScope *, const DILocalVariable * > VarID
A unique key that represents a debug variable.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
Printable print(const GCNRegPressure &RP, const GCNSubtarget *ST=nullptr, unsigned DynamicVGPRBlockSize=0)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto successors(const MachineBasicBlock *BB)
bool operator!=(uint64_t V1, const APInt &V2)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isFunctionInPrintList(StringRef FunctionName)
VariableID
Type wrapper for integer ID for Variables. 0 is reserved.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
constexpr T divideCeil(U Numerator, V Denominator)
Returns the integer ceil(Numerator / Denominator).
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
LLVM_ABI bool isAssignmentTrackingEnabled(const Module &M)
Return true if assignment tracking is enabled for module M.
FunctionAddr VTableAddr Next
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
std::string toString(const APInt &I, unsigned Radix, bool Signed, bool formatAsCLiteral=false, bool UpperCase=true, bool InsertSeparators=false)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
auto predecessors(const MachineBasicBlock *BB)
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool debuginfoShouldUseDebugInstrRef(const Triple &T)
Implement std::hash so that hash_code can be used in STL containers.
A special type used by analysis passes to provide an address that identifies that particular analysis...
static VariableID getTombstoneKey()
static bool isEqual(const VariableID &LHS, const VariableID &RHS)
static unsigned getHashValue(const VariableID &Val)
static VariableID getEmptyKey()
DenseMapInfo< unsigned > Wrapped
An information struct used to provide DenseMap with the various necessary components for a given valu...
Variable location definition used by FunctionVarLocs.
RawLocationWrapper Values
llvm::VariableID VariableID
std::size_t operator()(const VarLocInsertPt &Arg) const