40#include <unordered_map>
43#define DEBUG_TYPE "debug-ata"
45STATISTIC(NumDefsScanned,
"Number of dbg locs that get scanned for removal");
46STATISTIC(NumDefsRemoved,
"Number of dbg locs removed");
47STATISTIC(NumWedgesScanned,
"Number of dbg wedges scanned");
48STATISTIC(NumWedgesChanged,
"Number of dbg wedges changed");
52 cl::desc(
"Maximum num basic blocks before debug info dropped"),
73 return static_cast<VariableID>(Wrapped::getEmptyKey());
76 return static_cast<VariableID>(Wrapped::getTombstoneKey());
79 return Wrapped::getHashValue(
static_cast<unsigned>(Val));
98 friend FunctionVarLocs;
102 std::unordered_map<VarLocInsertPt, SmallVector<VarLocInfo>> VarLocsBeforeInst;
111 return static_cast<VariableID>(Variables.insert(V));
116 return Variables[
static_cast<unsigned>(
ID)];
122 auto R = VarLocsBeforeInst.find(Before);
123 if (R == VarLocsBeforeInst.end())
130 VarLocsBeforeInst[Before] = std::move(Wedge);
141 SingleLocVars.emplace_back(VarLoc);
152 VarLocsBeforeInst[Before].emplace_back(VarLoc);
159 unsigned Counter = -1;
160 OS <<
"=== Variables ===\n";
167 OS <<
"[" << Counter <<
"] " << V.getVariable()->getName();
168 if (
auto F = V.getFragment())
169 OS <<
" bits [" <<
F->OffsetInBits <<
", "
170 <<
F->OffsetInBits +
F->SizeInBits <<
")";
171 if (
const auto *IA = V.getInlinedAt())
172 OS <<
" inlined-at " << *IA;
177 OS <<
"DEF Var=[" << (
unsigned)
Loc.VariableID <<
"]"
178 <<
" Expr=" << *
Loc.Expr <<
" Values=(";
179 for (
auto *
Op :
Loc.Values.location_ops()) {
180 errs() <<
Op->getName() <<
" ";
186 OS <<
"=== Single location vars ===\n";
193 OS <<
"=== In-line variable defs ===";
195 OS <<
"\n" << BB.getName() <<
":\n";
207 for (
const auto &VarLoc : Builder.SingleLocVars)
208 VarLocRecords.emplace_back(VarLoc);
210 SingleVarLocEnd = VarLocRecords.size();
216 for (
auto &
P : Builder.VarLocsBeforeInst) {
222 unsigned BlockStart = VarLocRecords.size();
229 auto It = Builder.VarLocsBeforeInst.find(&DVR);
230 if (It == Builder.VarLocsBeforeInst.end())
233 VarLocRecords.emplace_back(VarLoc);
236 VarLocRecords.emplace_back(VarLoc);
237 unsigned BlockEnd = VarLocRecords.size();
239 if (BlockEnd != BlockStart)
240 VarLocsBeforeInst[
I] = {BlockStart, BlockEnd};
244 assert(Variables.empty() &&
"Expect clear before init");
247 Variables.reserve(Builder.Variables.size() + 1);
248 Variables.push_back(
DebugVariable(
nullptr, std::nullopt,
nullptr));
249 Variables.append(Builder.Variables.begin(), Builder.Variables.end());
254 VarLocRecords.clear();
255 VarLocsBeforeInst.clear();
263static std::pair<Value *, DIExpression *>
266 APInt OffsetInBytes(
DL.getTypeSizeInBits(Start->getType()),
false);
268 Start->stripAndAccumulateInBoundsConstantOffsets(
DL, OffsetInBytes);
282static std::optional<int64_t>
287 unsigned ExpectedDerefIdx = 0;
289 if (NumElements > 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
291 ExpectedDerefIdx = 2;
292 }
else if (NumElements > 3 && Elements[0] == dwarf::DW_OP_constu) {
293 ExpectedDerefIdx = 3;
294 if (Elements[2] == dwarf::DW_OP_plus)
296 else if (Elements[2] == dwarf::DW_OP_minus)
303 if (ExpectedDerefIdx >= NumElements)
308 if (Elements[ExpectedDerefIdx] != dwarf::DW_OP_deref)
312 if (NumElements == ExpectedDerefIdx + 1)
314 unsigned ExpectedFragFirstIdx = ExpectedDerefIdx + 1;
315 unsigned ExpectedFragFinalIdx = ExpectedFragFirstIdx + 2;
316 if (NumElements == ExpectedFragFinalIdx + 1 &&
366class MemLocFragmentFill {
368 FunctionVarLocsBuilder *FnVarLocs;
369 const DenseSet<DebugAggregate> *VarsWithStackSlot;
370 bool CoalesceAdjacentFragments;
373 using BaseAddress = unsigned;
374 using OffsetInBitsTy = unsigned;
375 using FragTraits = IntervalMapHalfOpenInfo<OffsetInBitsTy>;
376 using FragsInMemMap = IntervalMap<
377 OffsetInBitsTy, BaseAddress,
378 IntervalMapImpl::NodeSizer<OffsetInBitsTy, BaseAddress>::LeafSize,
380 FragsInMemMap::Allocator IntervalMapAlloc;
381 using VarFragMap = DenseMap<unsigned, FragsInMemMap>;
385 UniqueVector<RawLocationWrapper> Bases;
387 DenseMap<const BasicBlock *, VarFragMap> LiveIn;
388 DenseMap<const BasicBlock *, VarFragMap> LiveOut;
393 unsigned OffsetInBits;
397 using InsertMap = MapVector<VarLocInsertPt, SmallVector<FragMemLoc>>;
404 DenseMap<const BasicBlock *, InsertMap> BBInsertBeforeMap;
406 static bool intervalMapsAreEqual(
const FragsInMemMap &
A,
407 const FragsInMemMap &
B) {
408 auto AIt =
A.
begin(), AEnd =
A.end();
409 auto BIt =
B.begin(), BEnd =
B.end();
410 for (; AIt != AEnd; ++AIt, ++BIt) {
413 if (AIt.start() != BIt.start() || AIt.stop() != BIt.stop())
422 static bool varFragMapsAreEqual(
const VarFragMap &
A,
const VarFragMap &
B) {
423 if (
A.size() !=
B.size())
425 for (
const auto &APair :
A) {
426 auto BIt =
B.find(APair.first);
429 if (!intervalMapsAreEqual(APair.second, BIt->second))
436 std::string
toString(
unsigned BaseID) {
438 return Bases[BaseID].getVariableLocationOp(0)->getName().str();
444 std::string
toString(FragsInMemMap::const_iterator It,
bool Newline =
true) {
446 std::stringstream S(
String);
448 S <<
"[" << It.start() <<
", " << It.stop()
451 S <<
"invalid iterator (end)";
458 FragsInMemMap meetFragments(
const FragsInMemMap &
A,
const FragsInMemMap &
B) {
459 FragsInMemMap
Result(IntervalMapAlloc);
460 for (
auto AIt =
A.begin(), AEnd =
A.end(); AIt != AEnd; ++AIt) {
467 if (!
B.overlaps(AIt.start(), AIt.stop()))
471 auto FirstOverlap =
B.find(AIt.start());
472 assert(FirstOverlap !=
B.end());
473 bool IntersectStart = FirstOverlap.start() < AIt.start();
475 <<
", IntersectStart: " << IntersectStart <<
"\n");
478 auto LastOverlap =
B.find(AIt.stop());
480 LastOverlap !=
B.end() && LastOverlap.start() < AIt.stop();
482 <<
", IntersectEnd: " << IntersectEnd <<
"\n");
485 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
493 if (*AIt && *AIt == *FirstOverlap)
494 Result.insert(AIt.start(), AIt.stop(), *AIt);
502 auto Next = FirstOverlap;
503 if (IntersectStart) {
506 if (*AIt && *AIt == *FirstOverlap)
507 Result.insert(AIt.start(), FirstOverlap.stop(), *AIt);
517 if (*AIt && *AIt == *LastOverlap)
518 Result.insert(LastOverlap.start(), AIt.stop(), *AIt);
527 while (
Next !=
B.end() &&
Next.start() < AIt.stop() &&
528 Next.stop() <= AIt.stop()) {
530 <<
"- insert intersection of a and " <<
toString(
Next));
531 if (*AIt && *AIt == *
Next)
541 void meetVars(VarFragMap &
A,
const VarFragMap &
B) {
545 for (
auto It =
A.begin(), End =
A.end(); It != End; ++It) {
546 unsigned AVar = It->first;
547 FragsInMemMap &AFrags = It->second;
548 auto BIt =
B.find(AVar);
549 if (BIt ==
B.end()) {
554 <<
Aggregates[AVar].first->getName() <<
"\n");
555 AFrags = meetFragments(AFrags, BIt->second);
559 bool meet(
const BasicBlock &BB,
560 const SmallPtrSet<BasicBlock *, 16> &Visited) {
565 bool FirstMeet =
true;
572 if (!Visited.
count(Pred))
575 auto PredLiveOut = LiveOut.
find(Pred);
580 BBLiveIn = PredLiveOut->second;
583 LLVM_DEBUG(
dbgs() <<
"BBLiveIn = meet BBLiveIn, " << Pred->getName()
585 meetVars(BBLiveIn, PredLiveOut->second);
591 if (BBLiveIn.size() == 0)
600 CurrentLiveInEntry->second = std::move(BBLiveIn);
606 if (!varFragMapsAreEqual(BBLiveIn, CurrentLiveInEntry->second)) {
608 CurrentLiveInEntry->second = std::move(BBLiveIn);
616 void insertMemLoc(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
617 unsigned StartBit,
unsigned EndBit,
unsigned Base,
619 assert(StartBit < EndBit &&
"Cannot create fragment of size <= 0");
624 Loc.OffsetInBits = StartBit;
625 Loc.SizeInBits = EndBit - StartBit;
626 assert(
Base &&
"Expected a non-zero ID for Base address");
629 BBInsertBeforeMap[&BB][Before].push_back(Loc);
631 <<
" bits [" << StartBit <<
", " << EndBit <<
")\n");
638 void coalesceFragments(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
639 unsigned StartBit,
unsigned EndBit,
unsigned Base,
641 if (!CoalesceAdjacentFragments)
648 auto CoalescedFrag = FragMap.find(StartBit);
650 if (CoalescedFrag.start() == StartBit && CoalescedFrag.stop() == EndBit)
653 LLVM_DEBUG(
dbgs() <<
"- Insert loc for bits " << CoalescedFrag.start()
654 <<
" to " << CoalescedFrag.stop() <<
"\n");
655 insertMemLoc(BB, Before, Var, CoalescedFrag.start(), CoalescedFrag.stop(),
659 void addDef(
const VarLocInfo &VarLoc,
VarLocInsertPt Before, BasicBlock &BB,
660 VarFragMap &LiveSet) {
673 const DIExpression *DIExpr = VarLoc.
Expr;
677 StartBit = Frag->OffsetInBits;
678 EndBit = StartBit + Frag->SizeInBits;
693 const unsigned Base =
694 DerefOffsetInBytes && *DerefOffsetInBytes * 8 == StartBit
698 << StartBit <<
", " << EndBit <<
"): " <<
toString(
Base)
705 auto FragIt = LiveSet.find(Var);
708 if (FragIt == LiveSet.end()) {
710 auto P = LiveSet.try_emplace(Var, FragsInMemMap(IntervalMapAlloc));
711 assert(
P.second &&
"Var already in map?");
713 P.first->second.insert(StartBit, EndBit,
Base);
718 FragsInMemMap &FragMap = FragIt->second;
721 if (!FragMap.overlaps(StartBit, EndBit)) {
723 FragMap.insert(StartBit, EndBit,
Base);
724 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
731 auto FirstOverlap = FragMap.find(StartBit);
732 assert(FirstOverlap != FragMap.end());
733 bool IntersectStart = FirstOverlap.start() < StartBit;
736 auto LastOverlap = FragMap.find(EndBit);
737 bool IntersectEnd = LastOverlap.valid() && LastOverlap.start() < EndBit;
740 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
741 LLVM_DEBUG(
dbgs() <<
"- Intersect single interval @ both ends\n");
749 auto EndBitOfOverlap = FirstOverlap.stop();
750 unsigned OverlapValue = FirstOverlap.value();
753 FirstOverlap.setStop(StartBit);
754 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
755 OverlapValue, VarLoc.
DL);
758 FragMap.insert(EndBit, EndBitOfOverlap, OverlapValue);
759 insertMemLoc(BB, Before, Var, EndBit, EndBitOfOverlap, OverlapValue,
763 FragMap.insert(StartBit, EndBit,
Base);
773 if (IntersectStart) {
776 FirstOverlap.setStop(StartBit);
777 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
778 *FirstOverlap, VarLoc.
DL);
787 LastOverlap.setStart(EndBit);
788 insertMemLoc(BB, Before, Var, EndBit, LastOverlap.stop(), *LastOverlap,
804 auto It = FirstOverlap;
807 while (It.valid() && It.start() >= StartBit && It.stop() <= EndBit) {
812 assert(!FragMap.overlaps(StartBit, EndBit));
814 FragMap.insert(StartBit, EndBit,
Base);
817 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
821 bool skipVariable(
const DILocalVariable *V) {
return !
V->getSizeInBits(); }
823 void process(BasicBlock &BB, VarFragMap &LiveSet) {
824 BBInsertBeforeMap[&BB].
clear();
826 for (DbgVariableRecord &DVR :
filterDbgVars(
I.getDbgRecordRange())) {
827 if (
const auto *Locs = FnVarLocs->
getWedge(&DVR)) {
828 for (
const VarLocInfo &Loc : *Locs) {
829 addDef(Loc, &DVR, *
I.getParent(), LiveSet);
833 if (
const auto *Locs = FnVarLocs->
getWedge(&
I)) {
834 for (
const VarLocInfo &Loc : *Locs) {
835 addDef(Loc, &
I, *
I.getParent(), LiveSet);
842 MemLocFragmentFill(Function &Fn,
843 const DenseSet<DebugAggregate> *VarsWithStackSlot,
844 bool CoalesceAdjacentFragments)
845 : Fn(Fn), VarsWithStackSlot(VarsWithStackSlot),
846 CoalesceAdjacentFragments(CoalesceAdjacentFragments) {}
868 void run(FunctionVarLocsBuilder *FnVarLocs) {
872 this->FnVarLocs = FnVarLocs;
876 ReversePostOrderTraversal<Function *> RPOT(&Fn);
877 std::priority_queue<unsigned int, std::vector<unsigned int>,
878 std::greater<unsigned int>>
880 std::priority_queue<unsigned int, std::vector<unsigned int>,
881 std::greater<unsigned int>>
883 DenseMap<unsigned int, BasicBlock *> OrderToBB;
884 DenseMap<BasicBlock *, unsigned int> BBToOrder;
886 unsigned int RPONumber = 0;
887 for (BasicBlock *BB : RPOT) {
888 OrderToBB[RPONumber] = BB;
889 BBToOrder[BB] = RPONumber;
890 Worklist.push(RPONumber);
906 SmallPtrSet<BasicBlock *, 16> Visited;
907 while (!Worklist.empty() || !Pending.empty()) {
911 SmallPtrSet<BasicBlock *, 16> OnPending;
913 while (!Worklist.empty()) {
917 bool InChanged = meet(*BB, Visited);
919 InChanged |= Visited.
insert(BB).second;
922 << BB->
getName() <<
" has new InLocs, process it\n");
926 VarFragMap LiveSet = LiveIn[BB];
929 process(*BB, LiveSet);
932 if (!varFragMapsAreEqual(LiveOut[BB], LiveSet)) {
934 <<
" has new OutLocs, add succs to worklist: [ ");
935 LiveOut[BB] = std::move(LiveSet);
937 if (OnPending.
insert(Succ).second) {
939 Pending.push(BBToOrder[Succ]);
946 Worklist.swap(Pending);
949 assert(Pending.empty() &&
"Pending should be empty");
953 for (
auto &Pair : BBInsertBeforeMap) {
954 InsertMap &
Map = Pair.second;
955 for (
auto &Pair : Map) {
956 auto InsertBefore = Pair.first;
957 assert(InsertBefore &&
"should never be null");
958 auto FragMemLocs = Pair.second;
961 for (
auto &FragMemLoc : FragMemLocs) {
962 DIExpression *Expr = DIExpression::get(Ctx, {});
963 if (FragMemLoc.SizeInBits !=
964 *
Aggregates[FragMemLoc.Var].first->getSizeInBits())
966 Expr, FragMemLoc.OffsetInBits, FragMemLoc.SizeInBits);
968 FragMemLoc.OffsetInBits / 8);
969 DebugVariable Var(
Aggregates[FragMemLoc.Var].first, Expr,
970 FragMemLoc.DL.getInlinedAt());
971 FnVarLocs->
addVarLoc(InsertBefore, Var, Expr, FragMemLoc.DL,
972 Bases[FragMemLoc.Base]);
982class AssignmentTrackingLowering {
1007 enum class LocKind { Mem, Val,
None };
1024 enum S { Known, NoneOrPhi } Status;
1029 DbgVariableRecord *
Source =
nullptr;
1031 bool isSameSourceAssignment(
const Assignment &
Other)
const {
1034 return std::tie(Status,
ID) == std::tie(
Other.Status,
Other.ID);
1036 void dump(raw_ostream &OS) {
1037 static const char *
LUT[] = {
"Known",
"NoneOrPhi"};
1038 OS <<
LUT[Status] <<
"(id=";
1051 static Assignment make(DIAssignID *
ID, DbgVariableRecord *Source) {
1053 "Cannot make an assignment from a non-assign DbgVariableRecord");
1054 return Assignment(Known,
ID, Source);
1056 static Assignment makeFromMemDef(DIAssignID *
ID) {
1057 return Assignment(Known,
ID);
1059 static Assignment makeNoneOrPhi() {
return Assignment(NoneOrPhi,
nullptr); }
1061 Assignment() : Status(NoneOrPhi),
ID(nullptr) {}
1062 Assignment(S Status, DIAssignID *
ID) : Status(Status),
ID(
ID) {
1066 Assignment(S Status, DIAssignID *
ID, DbgVariableRecord *Source)
1073 using AssignmentMap = SmallVector<Assignment>;
1074 using LocMap = SmallVector<LocKind>;
1075 using OverlapMap = DenseMap<VariableID, SmallVector<VariableID>>;
1076 using UntaggedStoreAssignmentMap =
1079 using UnknownStoreAssignmentMap =
1080 DenseMap<const Instruction *, SmallVector<VariableID>>;
1085 unsigned TrackedVariablesVectorSize = 0;
1090 UntaggedStoreAssignmentMap UntaggedStoreVars;
1093 UnknownStoreAssignmentMap UnknownStoreVars;
1096 using InstInsertMap = MapVector<VarLocInsertPt, SmallVector<VarLocInfo>>;
1097 InstInsertMap InsertBeforeMap;
1100 void resetInsertionPoint(Instruction &After);
1101 void resetInsertionPoint(DbgVariableRecord &After);
1103 void emitDbgValue(LocKind Kind, DbgVariableRecord *,
VarLocInsertPt After);
1105 static bool mapsAreEqual(
const BitVector &Mask,
const AssignmentMap &
A,
1106 const AssignmentMap &
B) {
1108 return A[VarID].isSameSourceAssignment(B[VarID]);
1117 BitVector VariableIDsInBlock;
1120 AssignmentMap StackHomeValue;
1122 AssignmentMap DebugValue;
1137 const AssignmentMap &getAssignmentMap(AssignmentKind Kind)
const {
1140 return StackHomeValue;
1146 AssignmentMap &getAssignmentMap(AssignmentKind Kind) {
1147 return const_cast<AssignmentMap &
>(
1148 const_cast<const BlockInfo *
>(
this)->getAssignmentMap(Kind));
1151 bool isVariableTracked(
VariableID Var)
const {
1152 return VariableIDsInBlock[
static_cast<unsigned>(Var)];
1155 const Assignment &getAssignment(AssignmentKind Kind,
VariableID Var)
const {
1156 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1157 return getAssignmentMap(Kind)[
static_cast<unsigned>(Var)];
1161 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1162 return LiveLoc[
static_cast<unsigned>(Var)];
1168 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1169 LiveLoc[
static_cast<unsigned>(Var)] = K;
1175 void setAssignment(AssignmentKind Kind,
VariableID Var,
1176 const Assignment &AV) {
1177 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1178 getAssignmentMap(Kind)[
static_cast<unsigned>(Var)] = AV;
1184 bool hasAssignment(AssignmentKind Kind,
VariableID Var,
1185 const Assignment &AV)
const {
1186 if (!isVariableTracked(Var))
1188 return AV.isSameSourceAssignment(getAssignment(Kind, Var));
1194 return VariableIDsInBlock ==
Other.VariableIDsInBlock &&
1195 LiveLoc ==
Other.LiveLoc &&
1196 mapsAreEqual(VariableIDsInBlock, StackHomeValue,
1197 Other.StackHomeValue) &&
1198 mapsAreEqual(VariableIDsInBlock, DebugValue,
Other.DebugValue);
1202 return LiveLoc.size() == DebugValue.size() &&
1203 LiveLoc.size() == StackHomeValue.size();
1207 void init(
int NumVars) {
1208 StackHomeValue.clear();
1211 VariableIDsInBlock = BitVector(NumVars);
1212 StackHomeValue.insert(StackHomeValue.begin(), NumVars,
1213 Assignment::makeNoneOrPhi());
1214 DebugValue.insert(DebugValue.begin(), NumVars,
1215 Assignment::makeNoneOrPhi());
1216 LiveLoc.
insert(LiveLoc.
begin(), NumVars, LocKind::None);
1220 template <
typename ElmtType,
typename FnInputType>
1224 ElmtType (*Fn)(FnInputType, FnInputType)) {
1229 static BlockInfo
join(
const BlockInfo &
A,
const BlockInfo &
B,
int NumVars) {
1248 BitVector Intersect =
A.VariableIDsInBlock;
1249 Intersect &=
B.VariableIDsInBlock;
1252 joinElmt(
VarID, Join.LiveLoc,
A.LiveLoc,
B.LiveLoc, joinKind);
1253 joinElmt(
VarID, Join.DebugValue,
A.DebugValue,
B.DebugValue,
1255 joinElmt(
VarID, Join.StackHomeValue,
A.StackHomeValue,
B.StackHomeValue,
1259 Join.VariableIDsInBlock =
A.VariableIDsInBlock;
1260 Join.VariableIDsInBlock |=
B.VariableIDsInBlock;
1267 const DataLayout &Layout;
1268 const DenseSet<DebugAggregate> *VarsWithStackSlot;
1269 FunctionVarLocsBuilder *FnVarLocs;
1270 DenseMap<const BasicBlock *, BlockInfo> LiveIn;
1271 DenseMap<const BasicBlock *, BlockInfo> LiveOut;
1274 DenseSet<VariableID> VarsTouchedThisFrame;
1277 DenseSet<DebugAggregate> NotAlwaysStackHomed;
1279 VariableID getVariableID(
const DebugVariable &Var) {
1287 bool join(
const BasicBlock &BB,
const SmallPtrSet<BasicBlock *, 16> &Visited);
1307 static LocKind joinKind(LocKind
A, LocKind
B);
1308 static Assignment joinAssignment(
const Assignment &
A,
const Assignment &
B);
1309 BlockInfo joinBlockInfo(
const BlockInfo &
A,
const BlockInfo &
B);
1315 void process(BasicBlock &BB, BlockInfo *LiveSet);
1320 void processNonDbgInstruction(Instruction &
I, BlockInfo *LiveSet);
1323 void processTaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1326 void processUntaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1327 void processUnknownStoreToVariable(Instruction &
I,
VariableID &Var,
1328 BlockInfo *LiveSet);
1329 void processDbgAssign(DbgVariableRecord *Assign, BlockInfo *LiveSet);
1330 void processDbgVariableRecord(DbgVariableRecord &DVR, BlockInfo *LiveSet);
1331 void processDbgValue(DbgVariableRecord *DbgValue, BlockInfo *LiveSet);
1333 void addMemDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1335 void addDbgDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1339 void setLocKind(BlockInfo *LiveSet,
VariableID Var, LocKind K);
1342 LocKind getLocKind(BlockInfo *LiveSet,
VariableID Var);
1344 bool hasVarWithAssignment(BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
1355 bool emitPromotedVarLocs(FunctionVarLocsBuilder *FnVarLocs);
1358 AssignmentTrackingLowering(Function &Fn,
const DataLayout &Layout,
1359 const DenseSet<DebugAggregate> *VarsWithStackSlot)
1360 : Fn(Fn), Layout(Layout), VarsWithStackSlot(VarsWithStackSlot) {}
1363 bool run(FunctionVarLocsBuilder *FnVarLocs);
1368AssignmentTrackingLowering::getContainedFragments(
VariableID Var)
const {
1369 auto R = VarContains.find(Var);
1370 if (R == VarContains.end())
1375void AssignmentTrackingLowering::touchFragment(
VariableID Var) {
1376 VarsTouchedThisFrame.insert(Var);
1379void AssignmentTrackingLowering::setLocKind(BlockInfo *LiveSet,
VariableID Var,
1381 auto SetKind = [
this](BlockInfo *LiveSet,
VariableID Var, LocKind
K) {
1382 LiveSet->setLocKind(Var, K);
1385 SetKind(LiveSet, Var, K);
1388 for (
VariableID Frag : getContainedFragments(Var))
1389 SetKind(LiveSet, Frag, K);
1392AssignmentTrackingLowering::LocKind
1393AssignmentTrackingLowering::getLocKind(BlockInfo *LiveSet,
VariableID Var) {
1394 return LiveSet->getLocKind(Var);
1397void AssignmentTrackingLowering::addMemDef(BlockInfo *LiveSet,
VariableID Var,
1398 const Assignment &AV) {
1399 LiveSet->setAssignment(BlockInfo::Stack, Var, AV);
1404 Assignment FragAV = AV;
1405 FragAV.Source =
nullptr;
1406 for (
VariableID Frag : getContainedFragments(Var))
1407 LiveSet->setAssignment(BlockInfo::Stack, Frag, FragAV);
1410void AssignmentTrackingLowering::addDbgDef(BlockInfo *LiveSet,
VariableID Var,
1411 const Assignment &AV) {
1412 LiveSet->setAssignment(BlockInfo::Debug, Var, AV);
1417 Assignment FragAV = AV;
1418 FragAV.Source =
nullptr;
1419 for (
VariableID Frag : getContainedFragments(Var))
1420 LiveSet->setAssignment(BlockInfo::Debug, Frag, FragAV);
1429 "Cannot get a DIAssignID from a non-assign DbgVariableRecord!");
1434bool AssignmentTrackingLowering::hasVarWithAssignment(
1435 BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
VariableID Var,
1436 const Assignment &AV) {
1437 if (!LiveSet->hasAssignment(Kind, Var, AV))
1442 for (
VariableID Frag : getContainedFragments(Var))
1443 if (!LiveSet->hasAssignment(Kind, Frag, AV))
1449const char *
locStr(AssignmentTrackingLowering::LocKind
Loc) {
1450 using LocKind = AssignmentTrackingLowering::LocKind;
1471 if (!
Next->hasDbgRecords())
1473 return &*
Next->getDbgRecordRange().begin();
1481void AssignmentTrackingLowering::emitDbgValue(
1494 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1503 InsertBeforeMap[InsertBefore].
push_back(VarLoc);
1507 if (Kind == LocKind::Mem) {
1512 if (
Assign->isKillAddress()) {
1514 Kind = LocKind::Val;
1519 "fragment info should be stored in value-expression only");
1522 if (
auto OptFragInfo =
Source->getExpression()->getFragmentInfo()) {
1523 auto FragInfo = *OptFragInfo;
1525 Expr, FragInfo.OffsetInBits, FragInfo.SizeInBits);
1528 std::tie(Val, Expr) =
1535 if (Kind == LocKind::Val) {
1536 Emit(
Source->getRawLocation(),
Source->getExpression());
1540 if (Kind == LocKind::None) {
1541 Emit(
nullptr,
Source->getExpression());
1546void AssignmentTrackingLowering::processNonDbgInstruction(
1547 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1548 if (
I.hasMetadata(LLVMContext::MD_DIAssignID))
1549 processTaggedInstruction(
I, LiveSet);
1551 processUntaggedInstruction(
I, LiveSet);
1554void AssignmentTrackingLowering::processUnknownStoreToVariable(
1558 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1561 if (getLocKind(LiveSet, Var) != LocKind::Mem)
1565 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1566 if (DbgAV.Status != Assignment::NoneOrPhi && DbgAV.Source) {
1568 DbgAV.dump(
dbgs());
dbgs() <<
"\n");
1569 setLocKind(LiveSet, Var, LocKind::Val);
1570 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1576 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1582 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1590 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1593void AssignmentTrackingLowering::processUntaggedInstruction(
1594 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1606 assert(!
I.hasMetadata(LLVMContext::MD_DIAssignID));
1607 auto It = UntaggedStoreVars.find(&
I);
1608 if (It == UntaggedStoreVars.end()) {
1615 if (
auto UnhandledStoreIt = UnknownStoreVars.find(&
I);
1616 UnhandledStoreIt != UnknownStoreVars.end()) {
1617 LLVM_DEBUG(
dbgs() <<
"Processing untagged unknown store " <<
I <<
"\n");
1618 for (
auto &Var : UnhandledStoreIt->second)
1619 processUnknownStoreToVariable(
I, Var, LiveSet);
1624 LLVM_DEBUG(
dbgs() <<
"processUntaggedInstruction on UNTAGGED INST " <<
I
1628 for (
auto [Var,
Info] : It->second) {
1632 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1633 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1634 setLocKind(LiveSet, Var, LocKind::Mem);
1642 if (
auto Frag =
V.getFragment()) {
1645 assert(R &&
"unexpected createFragmentExpression failure");
1649 if (
Info.OffsetInBits)
1650 Ops = {dwarf::DW_OP_plus_uconst,
Info.OffsetInBits / 8};
1657 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1662 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1671 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1675void AssignmentTrackingLowering::processTaggedInstruction(
1676 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1682 if (LinkedDPAssigns.empty())
1691 "expected Assign's variable to have stack slot");
1694 addMemDef(LiveSet, Var, AV);
1702 if (hasVarWithAssignment(LiveSet, BlockInfo::Debug, Var, AV)) {
1708 LiveSet->DebugValue[
static_cast<unsigned>(Var)].dump(
dbgs());
1710 setLocKind(LiveSet, Var, LocKind::Mem);
1711 emitDbgValue(LocKind::Mem, Assign, &
I);
1720 LocKind PrevLoc = getLocKind(LiveSet, Var);
1722 case LocKind::Val: {
1726 setLocKind(LiveSet, Var, LocKind::Val);
1728 case LocKind::Mem: {
1732 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1733 if (DbgAV.Status == Assignment::NoneOrPhi) {
1736 setLocKind(LiveSet, Var, LocKind::None);
1737 emitDbgValue(LocKind::None, Assign, &
I);
1741 setLocKind(LiveSet, Var, LocKind::Val);
1743 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1746 emitDbgValue(LocKind::None, Assign, &
I);
1750 case LocKind::None: {
1754 setLocKind(LiveSet, Var, LocKind::None);
1761 BlockInfo *LiveSet) {
1768 Assignment AV = Assignment::make(
getIDFromMarker(*DbgAssign), DbgAssign);
1769 addDbgDef(LiveSet, Var, AV);
1771 LLVM_DEBUG(
dbgs() <<
"processDbgAssign on " << *DbgAssign <<
"\n";);
1777 if (hasVarWithAssignment(LiveSet, BlockInfo::Stack, Var, AV)) {
1785 <<
"Val, Stack matches Debug program but address is killed\n";);
1786 Kind = LocKind::Val;
1789 Kind = LocKind::Mem;
1791 setLocKind(LiveSet, Var, Kind);
1792 emitDbgValue(Kind, DbgAssign, DbgAssign);
1797 setLocKind(LiveSet, Var, LocKind::Val);
1798 emitDbgValue(LocKind::Val, DbgAssign, DbgAssign);
1803 BlockInfo *LiveSet) {
1816 Assignment AV = Assignment::makeNoneOrPhi();
1817 addDbgDef(LiveSet, Var, AV);
1821 <<
" -> Val, dbg.value override");
1823 setLocKind(LiveSet, Var, LocKind::Val);
1828 if (
auto F =
DbgValue.getExpression()->getFragmentInfo())
1829 return F->SizeInBits == 0;
1833void AssignmentTrackingLowering::processDbgVariableRecord(
1840 processDbgAssign(&DVR, LiveSet);
1842 processDbgValue(&DVR, LiveSet);
1845void AssignmentTrackingLowering::resetInsertionPoint(
Instruction &After) {
1848 if (R == InsertBeforeMap.end())
1854 if (R == InsertBeforeMap.end())
1859void AssignmentTrackingLowering::process(
BasicBlock &BB, BlockInfo *LiveSet) {
1862 bool ProcessedLeadingDbgRecords = !BB.
begin()->hasDbgRecords();
1864 assert(VarsTouchedThisFrame.empty());
1871 if (ProcessedLeadingDbgRecords) {
1876 if (
II->isTerminator())
1878 resetInsertionPoint(*
II);
1879 processNonDbgInstruction(*
II, LiveSet);
1880 assert(LiveSet->isValid());
1886 if (
II != EI &&
II->hasDbgRecords()) {
1891 resetInsertionPoint(DVR);
1892 processDbgVariableRecord(DVR, LiveSet);
1893 assert(LiveSet->isValid());
1896 ProcessedLeadingDbgRecords =
true;
1904 for (
auto Var : VarsTouchedThisFrame) {
1905 LocKind
Loc = getLocKind(LiveSet, Var);
1913 if (
Loc != LocKind::Mem) {
1916 NotAlwaysStackHomed.insert(Aggr);
1919 VarsTouchedThisFrame.clear();
1923AssignmentTrackingLowering::LocKind
1924AssignmentTrackingLowering::joinKind(LocKind
A, LocKind
B) {
1927 return A ==
B ?
A : LocKind::None;
1930AssignmentTrackingLowering::Assignment
1931AssignmentTrackingLowering::joinAssignment(
const Assignment &
A,
1932 const Assignment &
B) {
1939 if (!
A.isSameSourceAssignment(
B))
1940 return Assignment::makeNoneOrPhi();
1941 if (
A.Status == Assignment::NoneOrPhi)
1942 return Assignment::makeNoneOrPhi();
1958 if (
A.Source ==
B.Source)
1960 if (!
A.Source || !
B.Source)
1962 if (
A.Source->isEquivalentTo(*
B.Source))
1967 assert(
A.Status ==
B.Status &&
A.Status == Assignment::Known);
1969 return Assignment::make(
A.ID, Source);
1972AssignmentTrackingLowering::BlockInfo
1973AssignmentTrackingLowering::joinBlockInfo(
const BlockInfo &
A,
1974 const BlockInfo &
B) {
1975 return BlockInfo::join(
A,
B, TrackedVariablesVectorSize);
1978bool AssignmentTrackingLowering::join(
1990 if (Visited.
count(Pred))
1995 if (VisitedPreds.
empty()) {
1997 bool DidInsert = It.second;
1999 It.first->second.init(TrackedVariablesVectorSize);
2004 if (VisitedPreds.
size() == 1) {
2005 const BlockInfo &PredLiveOut = LiveOut.
find(VisitedPreds[0])->second;
2012 if (PredLiveOut != CurrentLiveInEntry->second) {
2013 CurrentLiveInEntry->second = PredLiveOut;
2021 const BlockInfo &PredLiveOut0 = LiveOut.
find(VisitedPreds[0])->second;
2022 const BlockInfo &PredLiveOut1 = LiveOut.
find(VisitedPreds[1])->second;
2023 BlockInfo BBLiveIn = joinBlockInfo(PredLiveOut0, PredLiveOut1);
2028 const auto &PredLiveOut = LiveOut.
find(Pred);
2030 "block should have been processed already");
2031 BBLiveIn = joinBlockInfo(std::move(BBLiveIn), PredLiveOut->second);
2035 auto CurrentLiveInEntry = LiveIn.
find(&BB);
2038 if (CurrentLiveInEntry == LiveIn.
end())
2040 else if (BBLiveIn != CurrentLiveInEntry->second)
2041 CurrentLiveInEntry->second = std::move(BBLiveIn);
2050 auto ALeft =
A.OffsetInBits;
2051 auto BLeft =
B.OffsetInBits;
2055 auto ARight = ALeft +
A.SizeInBits;
2056 auto BRight = BLeft +
B.SizeInBits;
2057 if (BRight > ARight)
2062static std::optional<at::AssignmentInfo>
2072 return std::nullopt;
2080 if (
ID != Intrinsic::experimental_vp_strided_store &&
2081 ID != Intrinsic::masked_store &&
ID != Intrinsic::vp_scatter &&
2082 ID != Intrinsic::masked_scatter &&
ID != Intrinsic::vp_store &&
2083 ID != Intrinsic::masked_compressstore)
2117 AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars,
2118 AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars,
2119 unsigned &TrackedVariablesVectorSize) {
2133 if (
Record->isDbgDeclare()) {
2139 if (!VarsWithStackSlot.
contains(DA))
2141 if (Seen.
insert(DV).second)
2142 FragmentMap[DA].push_back(DV);
2144 for (
auto &BB : Fn) {
2145 for (
auto &
I : BB) {
2147 ProcessDbgRecord(&DVR);
2151 std::optional<DIExpression::FragmentInfo> FragInfo;
2156 I.getDataLayout(),
Info->Base,
2157 Info->OffsetInBits,
Info->SizeInBits, Assign, FragInfo) ||
2158 (FragInfo && FragInfo->SizeInBits == 0))
2167 FragInfo = Assign->getExpression()->getFragmentInfo();
2171 Assign->getDebugLoc().getInlinedAt());
2173 if (!VarsWithStackSlot.
contains(DA))
2177 UntaggedStoreVars[&
I].push_back(
2180 if (Seen.
insert(DV).second)
2181 FragmentMap[DA].push_back(DV);
2184 HandleDbgAssignForStore(DVR);
2192 Assign->getDebugLoc().getInlinedAt());
2194 if (!VarsWithStackSlot.
contains(DA))
2201 HandleDbgAssignForUnknownStore(DVR);
2208 for (
auto &Pair : FragmentMap) {
2210 std::sort(Frags.
begin(), Frags.
end(),
2212 return Elmt.getFragmentOrDefault().SizeInBits >
2213 Next.getFragmentOrDefault().SizeInBits;
2220 AssignmentTrackingLowering::OverlapMap Map;
2221 for (
auto &Pair : FragmentMap) {
2222 auto &Frags = Pair.second;
2223 for (
auto It = Frags.begin(), IEnd = Frags.end(); It != IEnd; ++It) {
2233 for (; OtherIt != IEnd; ++OtherIt) {
2237 Map[OtherVar].push_back(ThisVar);
2248 for (
auto *DVR : DPDeclares)
2255bool AssignmentTrackingLowering::run(FunctionVarLocsBuilder *FnVarLocsBuilder) {
2258 <<
": too many blocks (" << Fn.
size() <<
")\n");
2263 FnVarLocs = FnVarLocsBuilder;
2273 Fn, FnVarLocs, *VarsWithStackSlot, UntaggedStoreVars, UnknownStoreVars,
2274 TrackedVariablesVectorSize);
2278 std::priority_queue<unsigned int, std::vector<unsigned int>,
2279 std::greater<unsigned int>>
2281 std::priority_queue<unsigned int, std::vector<unsigned int>,
2282 std::greater<unsigned int>>
2287 unsigned int RPONumber = 0;
2289 OrderToBB[RPONumber] = BB;
2290 BBToOrder[BB] = RPONumber;
2291 Worklist.push(RPONumber);
2309 while (!Worklist.empty()) {
2314 while (!Worklist.empty()) {
2318 bool InChanged =
join(*BB, Visited);
2320 InChanged |= Visited.
insert(BB).second;
2325 BlockInfo LiveSet = LiveIn[BB];
2328 process(*BB, &LiveSet);
2331 if (LiveOut[BB] != LiveSet) {
2333 <<
" has new OutLocs, add succs to worklist: [ ");
2334 LiveOut[BB] = std::move(LiveSet);
2336 if (OnPending.
insert(Succ).second) {
2338 Pending.push(BBToOrder[Succ]);
2345 Worklist.swap(Pending);
2348 assert(Pending.empty() &&
"Pending should be empty");
2354 bool InsertedAnyIntrinsics =
false;
2363 for (
const auto &Pair : InsertBeforeMap) {
2364 auto &Vec = Pair.second;
2370 if (NotAlwaysStackHomed.contains(Aggr))
2380 NotAlwaysStackHomed.insert(Aggr);
2389 if (AlwaysStackHomed.
insert(Aggr).second) {
2398 InsertedAnyIntrinsics =
true;
2404 for (
const auto &[InsertBefore, Vec] : InsertBeforeMap) {
2411 if (AlwaysStackHomed.
contains(Aggr))
2414 InsertedAnyIntrinsics =
true;
2417 FnVarLocs->
setWedge(InsertBefore, std::move(NewDefs));
2420 InsertedAnyIntrinsics |= emitPromotedVarLocs(FnVarLocs);
2422 return InsertedAnyIntrinsics;
2425bool AssignmentTrackingLowering::emitPromotedVarLocs(
2426 FunctionVarLocsBuilder *FnVarLocs) {
2427 bool InsertedAnyIntrinsics =
false;
2436 assert(InsertBefore &&
"Unexpected: debug intrinsics after a terminator");
2440 InsertedAnyIntrinsics =
true;
2442 for (
auto &BB : Fn) {
2443 for (
auto &
I : BB) {
2447 TranslateDbgRecord(&DVR);
2450 return InsertedAnyIntrinsics;
2470 VariableDefinedBytes.
clear();
2472 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2474 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2479 bool ChangedThisWedge =
false;
2484 for (
auto RIt = Locs->rbegin(), REnd = Locs->rend(); RIt != REnd; ++RIt) {
2488 uint64_t SizeInBits = Aggr.first->getSizeInBits().value_or(0);
2492 const uint64_t MaxSizeBytes = 2048;
2494 if (SizeInBytes == 0 || SizeInBytes > MaxSizeBytes) {
2508 bool FirstDefinition = InsertResult.second;
2509 BitVector &DefinedBytes = InsertResult.first->second;
2512 RIt->Expr->getFragmentInfo().value_or(
2514 bool InvalidFragment = Fragment.endInBits() > SizeInBits;
2515 uint64_t StartInBytes = Fragment.startInBits() / 8;
2519 if (FirstDefinition || InvalidFragment ||
2521 if (!InvalidFragment)
2522 DefinedBytes.
set(StartInBytes, EndInBytes);
2529 ChangedThisWedge =
true;
2534 if (ChangedThisWedge) {
2535 std::reverse(NewDefsReversed.
begin(), NewDefsReversed.
end());
2536 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefsReversed));
2541 HandleLocsForWedge(&
I);
2543 HandleLocsForWedge(&DVR);
2568 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2569 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2574 bool ChangedThisWedge =
false;
2582 std::nullopt,
Loc.DL.getInlinedAt());
2587 if (Inserted || VMI->second.first !=
Loc.Values ||
2588 VMI->second.second !=
Loc.Expr) {
2589 VMI->second = {
Loc.Values,
Loc.Expr};
2595 ChangedThisWedge =
true;
2600 if (ChangedThisWedge) {
2601 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2608 HandleLocsForWedge(&DVR);
2609 HandleLocsForWedge(&
I);
2634 VarsWithDef[
A].
insert(V.getFragmentOrDefault());
2640 auto FragsIt = VarsWithDef.
find(
A);
2641 if (FragsIt == VarsWithDef.
end())
2644 return DIExpression::fragmentsOverlap(Frag, V.getFragmentOrDefault());
2655 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2656 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2661 bool ChangedThisWedge =
false;
2669 Loc.DL.getInlinedAt()};
2674 if (
Loc.Values.isKillLocation(
Loc.Expr) && !HasDefinedBits(Aggr, Var)) {
2677 ChangedThisWedge =
true;
2681 DefineBits(Aggr, Var);
2686 if (ChangedThisWedge) {
2687 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2693 HandleLocsForWedge(&DVR);
2694 HandleLocsForWedge(&
I);
2702 bool MadeChanges =
false;
2716 for (
auto &BB : Fn) {
2717 for (
auto &
I : BB) {
2743 AssignmentTrackingLowering
Pass(Fn, Layout, &VarsWithStackSlot);
2748 MemLocFragmentFill
Pass(Fn, &VarsWithStackSlot,
2750 Pass.run(FnVarLocs);
2767 auto &
DL =
F.getDataLayout();
2791 LLVM_DEBUG(
dbgs() <<
"AssignmentTrackingAnalysis run on " <<
F.getName()
2801 Results->init(Builder);
2804 Results->print(
errs(),
F);
2816 "Assignment Tracking Analysis",
false,
true)
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis Results
std::pair< const DILocalVariable *, const DILocation * > DebugAggregate
A whole (unfragmented) source variable.
VarLocInsertPt getNextNode(const DbgRecord *DVR)
static void analyzeFunction(Function &Fn, const DataLayout &Layout, FunctionVarLocsBuilder *FnVarLocs)
static std::pair< Value *, DIExpression * > walkToAllocaAndPrependOffsetDeref(const DataLayout &DL, Value *Start, DIExpression *Expression)
Walk backwards along constant GEPs and bitcasts to the base storage from Start as far as possible.
static DenseSet< DebugAggregate > findVarsWithStackSlot(Function &Fn)
static AssignmentTrackingLowering::OverlapMap buildOverlapMapAndRecordDeclares(Function &Fn, FunctionVarLocsBuilder *FnVarLocs, const DenseSet< DebugAggregate > &VarsWithStackSlot, AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars, AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars, unsigned &TrackedVariablesVectorSize)
Build a map of {Variable x: Variables y} where all variable fragments contained within the variable f...
static bool fullyContains(DIExpression::FragmentInfo A, DIExpression::FragmentInfo B)
Return true if A fully contains B.
static std::optional< at::AssignmentInfo > getUntaggedStoreAssignmentInfo(const Instruction &I, const DataLayout &Layout)
static bool removeUndefDbgLocsFromEntryBlock(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > PrintResults("print-debug-ata", cl::init(false), cl::Hidden)
Print the results of the analysis. Respects -filter-print-funcs.
const char * locStr(AssignmentTrackingLowering::LocKind Loc)
PointerUnion< const Instruction *, const DbgRecord * > VarLocInsertPt
static bool removeRedundantDbgLocsUsingForwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant location defs using a forward scan.
static bool removeRedundantDbgLocs(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > EnableMemLocFragFill("mem-loc-frag-fill", cl::init(true), cl::Hidden)
Option for debugging the pass, determines if the memory location fragment filling happens after gener...
static DIAssignID * getIDFromMarker(const DbgVariableRecord &DVR)
static DebugAggregate getAggregate(const DebugVariable &Var)
static bool hasZeroSizedFragment(DbgVariableRecord &DbgValue)
static DIAssignID * getIDFromInst(const Instruction &I)
AllocaInst * getUnknownStore(const Instruction &I, const DataLayout &Layout)
static std::optional< int64_t > getDerefOffsetInBytes(const DIExpression *DIExpr)
Extract the offset used in DIExpr.
static bool removeRedundantDbgLocsUsingBackwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant definitions within sequences of consecutive location defs.
static cl::opt< cl::boolOrDefault > CoalesceAdjacentFragmentsOpt("debug-ata-coalesce-frags", cl::Hidden)
Coalesce adjacent dbg locs describing memory locations that have contiguous fragments.
static cl::opt< unsigned > MaxNumBlocks("debug-ata-max-blocks", cl::init(10000), cl::desc("Maximum num basic blocks before debug info dropped"), cl::Hidden)
static bool shouldCoalesceFragments(Function &F)
This file implements the BitVector class.
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static ManagedStatic< cl::opt< bool, true >, CreateDebug > Debug
This file defines DenseMapInfo traits for DenseMap.
This file contains constants used for implementing Dwarf debug support.
Module.h This file contains the declarations for the Module class.
This header defines various interfaces for pass management in LLVM.
This file implements a coalescing interval map for small objects.
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
IntervalMap< SlotIndex, DbgVariableValue, 4 > LocMap
Map of where a user value is live to that value.
print mir2vec MIR2Vec Vocabulary Printer Pass
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
Scalar Replacement Of Aggregates
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Helper class to build FunctionVarLocs, since that class isn't easy to modify.
void setWedge(VarLocInsertPt Before, SmallVector< VarLocInfo > &&Wedge)
Replace the defs that come just before /p Before with /p Wedge.
const SmallVectorImpl< VarLocInfo > * getWedge(VarLocInsertPt Before) const
Return ptr to wedge of defs or nullptr if no defs come just before /p Before.
unsigned getNumVariables() const
void addSingleLocVar(DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def for a variable that is valid for its lifetime.
VariableID insertVariable(DebugVariable V)
Find or insert V and return the ID.
void addVarLoc(VarLocInsertPt Before, DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def to the wedge of defs just before /p Before.
const DebugVariable & getVariable(VariableID ID) const
Get a variable from its ID.
Class recording the (high level) value of a variable.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
bool getBoolValue() const
Convert APInt to a boolean value.
an instruction to allocate memory on the stack
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
AssignmentTrackingAnalysis()
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
LLVM_ABI bool isEntryBlock() const
Return true if this is the entry block of the containing function.
int find_first_unset_in(unsigned Begin, unsigned End) const
find_first_unset_in - Returns the index of the first unset bit in the range [Begin,...
iterator_range< const_set_bits_iterator > set_bits() const
A structured debug information entry.
static LLVM_ABI DIExpression * append(const DIExpression *Expr, ArrayRef< uint64_t > Ops)
Append the opcodes Ops to DIExpr.
unsigned getNumElements() const
DbgVariableFragmentInfo FragmentInfo
LLVM_ABI bool startsWithDeref() const
Return whether the first element a DW_OP_deref.
static LLVM_ABI std::optional< FragmentInfo > getFragmentInfo(expr_op_iterator Start, expr_op_iterator End)
Retrieve the details of this fragment expression.
ArrayRef< uint64_t > getElements() const
static LLVM_ABI std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static LLVM_ABI DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
static LLVM_ABI DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
LLVM_ABI std::optional< uint64_t > getSizeInBits() const
Determines the size of the variable's type.
StringRef getName() const
A parsed version of the target data layout string in and methods for querying it.
LLVM_ABI unsigned getIndexTypeSizeInBits(Type *Ty) const
The size in bits of the index used in GEP calculation for this type.
Instruction * MarkedInstr
Link back to the Instruction that owns this marker.
LLVM_ABI iterator_range< simple_ilist< DbgRecord >::iterator > getDbgRecordRange()
Produce a range over all the DbgRecords in this Marker.
Base class for non-instruction debug metadata records that have positions within IR.
DebugLoc getDebugLoc() const
Record of a variable value-assignment, aka a non instruction representation of the dbg....
LLVM_ABI bool isKillAddress() const
Check whether this kills the address component.
LLVM_ABI DIAssignID * getAssignID() const
DIExpression * getExpression() const
DILocalVariable * getVariable() const
Metadata * getRawLocation() const
Returns the metadata operand for the first location description.
Result run(Function &F, FunctionAnalysisManager &FAM)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
LLVM_ABI DILocation * getInlinedAt() const
Identifies a unique instance of a variable.
const DILocation * getInlinedAt() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again.
Implements a dense probed hash-table based set.
Class representing an expression and its matching format.
Data structure describing the variable locations in a function.
void print(raw_ostream &OS, const Function &Fn) const
const VarLocInfo * locs_begin(const Instruction *Before) const
First variable location definition that comes before Before.
const VarLocInfo * single_locs_begin() const
const VarLocInfo * locs_end(const Instruction *Before) const
One past the last variable location definition that comes before Before.
const VarLocInfo * single_locs_end() const
One past the last single-location variable location definition.
void init(FunctionVarLocsBuilder &Builder)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
bool isTerminator() const
const_iterator begin() const
void insert(KeyT a, KeyT b, ValT y)
insert - Add a mapping of [a;b] to y, coalesce with adjacent intervals.
void clear()
clear - Remove all entries.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void push_back(MachineInstr *MI)
Pass interface - Implemented by all 'passes'.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
void * getOpaqueValue() const
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Lightweight class that wraps the location operand metadata of a debug intrinsic.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
static LLVM_ABI IntegerType * getInt1Ty(LLVMContext &C)
UniqueVector - This class produces a sequential ID number (base 1) for each unique entry that is adde...
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
LLVM Value Representation.
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
DenseMap< FragmentOfVar, SmallVector< DIExpression::FragmentInfo, 1 > > OverlapMap
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
@ BasicBlock
Various leaf nodes.
LLVM_ABI void deleteAll(Function *F)
Remove all Assignment Tracking related intrinsics and metadata from F.
SmallVector< DbgVariableRecord * > getDVRAssignmentMarkers(const Instruction *Inst)
Return a range of dbg_assign records for which Inst performs the assignment they encode.
LLVM_ABI std::optional< AssignmentInfo > getAssignmentInfo(const DataLayout &DL, const MemIntrinsic *I)
LLVM_ABI bool calculateFragmentIntersect(const DataLayout &DL, const Value *Dest, uint64_t SliceOffsetInBits, uint64_t SliceSizeInBits, const DbgVariableRecord *DVRAssign, std::optional< DIExpression::FragmentInfo > &Result)
Calculate the fragment of the variable in DAI covered from (Dest + SliceOffsetInBits) to to (Dest + S...
initializer< Ty > init(const Ty &Val)
@ DW_OP_LLVM_fragment
Only used in LLVM metadata.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
std::tuple< const DIScope *, const DIScope *, const DILocalVariable * > VarID
A unique key that represents a debug variable.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
Printable print(const GCNRegPressure &RP, const GCNSubtarget *ST=nullptr, unsigned DynamicVGPRBlockSize=0)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto successors(const MachineBasicBlock *BB)
bool operator!=(uint64_t V1, const APInt &V2)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isFunctionInPrintList(StringRef FunctionName)
VariableID
Type wrapper for integer ID for Variables. 0 is reserved.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
constexpr T divideCeil(U Numerator, V Denominator)
Returns the integer ceil(Numerator / Denominator).
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
LLVM_ABI bool isAssignmentTrackingEnabled(const Module &M)
Return true if assignment tracking is enabled for module M.
FunctionAddr VTableAddr Next
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
std::string toString(const APInt &I, unsigned Radix, bool Signed, bool formatAsCLiteral=false, bool UpperCase=true, bool InsertSeparators=false)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
auto predecessors(const MachineBasicBlock *BB)
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool debuginfoShouldUseDebugInstrRef(const Triple &T)
Implement std::hash so that hash_code can be used in STL containers.
A special type used by analysis passes to provide an address that identifies that particular analysis...
static VariableID getTombstoneKey()
static bool isEqual(const VariableID &LHS, const VariableID &RHS)
static unsigned getHashValue(const VariableID &Val)
static VariableID getEmptyKey()
DenseMapInfo< unsigned > Wrapped
An information struct used to provide DenseMap with the various necessary components for a given valu...
Variable location definition used by FunctionVarLocs.
RawLocationWrapper Values
llvm::VariableID VariableID
std::size_t operator()(const VarLocInsertPt &Arg) const