31 #define DEBUG_TYPE "memoryssa"
48 auto Cached = CachedPreviousDef.find(
BB);
49 if (Cached != CachedPreviousDef.end())
50 return Cached->second;
57 VisitedBlocks.insert(
BB);
59 MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef);
60 CachedPreviousDef.insert({
BB, Result});
64 if (VisitedBlocks.count(
BB)) {
69 CachedPreviousDef.insert({
BB, Result});
73 if (VisitedBlocks.insert(
BB).second) {
80 bool UniqueIncomingAccess =
true;
84 auto *IncomingAccess = getPreviousDefFromEnd(Pred, CachedPreviousDef);
86 SingleAccess = IncomingAccess;
87 else if (IncomingAccess != SingleAccess)
88 UniqueIncomingAccess =
false;
89 PhiOps.push_back(IncomingAccess);
99 auto *Result = tryRemoveTrivialPhi(Phi, PhiOps);
101 if (Result == Phi && UniqueIncomingAccess && SingleAccess) {
108 Result = SingleAccess;
109 }
else if (Result == Phi && !(UniqueIncomingAccess && SingleAccess)) {
111 Phi = MSSA->createMemoryPhi(
BB);
127 InsertedPHIs.push_back(Phi);
133 VisitedBlocks.erase(
BB);
134 CachedPreviousDef.insert({
BB, Result});
145 if (
auto *LocalResult = getPreviousDefInBlock(MA))
148 return getPreviousDefRecursive(MA->
getBlock(), CachedPreviousDef);
160 if (!isa<MemoryUse>(MA)) {
163 if (Iter != Defs->rend())
169 if (!isa<MemoryUse>(U))
170 return cast<MemoryAccess>(&U);
185 CachedPreviousDef.
insert({
BB, &*Defs->rbegin()});
186 return &*Defs->rbegin();
189 return getPreviousDefRecursive(
BB, CachedPreviousDef);
199 if (
MemoryPhi *UsePhi = dyn_cast<MemoryPhi>(&*U))
200 tryRemoveTrivialPhi(UsePhi);
210 assert(Phi &&
"Can only remove concrete Phi.");
212 return tryRemoveTrivialPhi(Phi, OperRange);
214 template <
class RangeType>
218 if (NonOptPhis.count(Phi))
225 if (
Op == Phi ||
Op == Same)
230 Same = cast<MemoryAccess>(&*
Op);
242 return recursePhi(Same);
246 InsertedPHIs.clear();
262 if (!RenameUses && !InsertedPHIs.empty()) {
265 assert((!Defs || (++Defs->begin() == Defs->end())) &&
266 "Block may have only a Phi or no defs");
269 if (RenameUses && InsertedPHIs.size()) {
277 if (
auto *MD = dyn_cast<MemoryDef>(FirstDef))
278 FirstDef = MD->getDefiningAccess();
284 for (
auto &MP : InsertedPHIs)
285 if (
MemoryPhi *Phi = cast_or_null<MemoryPhi>(MP))
296 assert(
i != -1 &&
"Should have found the basic block in the phi");
315 InsertedPHIs.clear();
319 bool DefBeforeSameBlock =
false;
321 !(isa<MemoryPhi>(DefBefore) &&
323 DefBeforeSameBlock =
true;
329 if (DefBeforeSameBlock) {
334 return !isa<MemoryUse>(Usr) && Usr != MD;
348 unsigned NewPhiIndex = InsertedPHIs.size();
349 if (!DefBeforeSameBlock) {
374 for (
const auto &VH : InsertedPHIs)
375 if (
const auto *RealPHI = cast_or_null<MemoryPhi>(VH))
376 DefiningBlocks.
insert(RealPHI->getBlock());
382 for (
auto *BBIDF : IDFBlocks) {
385 MPhi = MSSA->createMemoryPhi(BBIDF);
386 NewInsertedPHIs.push_back(MPhi);
388 ExistingPhis.
insert(MPhi);
396 NonOptPhis.insert(MPhi);
398 for (
auto &MPhi : NewInsertedPHIs) {
399 auto *BBIDF = MPhi->getBlock();
402 MPhi->addIncoming(getPreviousDefFromEnd(Pred, CachedPreviousDef), Pred);
408 NewPhiIndex = InsertedPHIs.
size();
409 for (
auto &MPhi : NewInsertedPHIs) {
410 InsertedPHIs.push_back(&*MPhi);
411 FixupList.push_back(&*MPhi);
414 FixupList.push_back(MD);
419 unsigned NewPhiIndexEnd = InsertedPHIs.size();
421 while (!FixupList.empty()) {
422 unsigned StartingPHISize = InsertedPHIs.size();
423 fixupDefs(FixupList);
426 FixupList.
append(InsertedPHIs.begin() + StartingPHISize, InsertedPHIs.end());
430 unsigned NewPhiSize = NewPhiIndexEnd - NewPhiIndex;
432 tryRemoveTrivialPhis(
ArrayRef<WeakVH>(&InsertedPHIs[NewPhiIndex], NewPhiSize));
444 if (
auto *MD = dyn_cast<MemoryDef>(FirstDef))
450 for (
auto &MP : InsertedPHIs) {
451 MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP);
457 for (
auto &MP : ExistingPhis) {
458 MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP);
468 for (
auto &Var : Vars) {
469 MemoryAccess *NewDef = dyn_cast_or_null<MemoryAccess>(Var);
477 if (
MemoryPhi *Phi = dyn_cast<MemoryPhi>(NewDef))
478 NonOptPhis.erase(Phi);
481 if (++DefIter != Defs->end()) {
482 cast<MemoryDef>(DefIter)->setDefiningAccess(NewDef);
493 Worklist.push_back(
S);
496 while (!Worklist.empty()) {
497 const BasicBlock *FixupBlock = Worklist.back();
502 auto *FirstDef = &*Defs->begin();
504 assert(!isa<MemoryPhi>(FirstDef) &&
505 "Should have already handled phi nodes!");
509 "Should have dominated the new access");
514 cast<MemoryDef>(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef));
528 Worklist.push_back(
S);
537 MPhi->unorderedDeleteIncomingBlock(
From);
538 tryRemoveTrivialPhi(MPhi);
554 tryRemoveTrivialPhi(MPhi);
565 MA = cast<MemoryAccess>(
Arg);
575 bool CloneWasSimplified,
578 if (
MemoryDef *DefMUD = dyn_cast<MemoryDef>(InsnDefining)) {
581 assert(DefMUDI &&
"Found MemoryUseOrDef with no Instruction.");
583 cast_or_null<Instruction>(VMap.
lookup(DefMUDI))) {
585 if (!CloneWasSimplified)
586 assert(InsnDefining &&
"Defining instruction cannot be nullptr.");
587 else if (!InsnDefining || isa<MemoryUse>(InsnDefining)) {
589 auto DefIt = DefMUD->getDefsIterator();
594 "Previous def must exist");
596 &*(--DefIt), VMap, MPhiMap, CloneWasSimplified, MSSA);
601 MemoryPhi *DefPhi = cast<MemoryPhi>(InsnDefining);
603 InsnDefining = NewDefPhi;
605 assert(InsnDefining &&
"Defining instruction cannot be nullptr.");
612 bool CloneWasSimplified) {
627 dyn_cast_or_null<Instruction>(VMap.
lookup(Insn))) {
631 MPhiMap, CloneWasSimplified, MSSA),
632 CloneWasSimplified ?
nullptr : MUD,
633 CloneWasSimplified ?
false :
true);
649 auto *NewMPhi = MSSA->createMemoryPhi(BEBlock);
650 bool HasUniqueIncomingValue =
true;
652 for (
unsigned I = 0,
E = MPhi->getNumIncomingValues();
I !=
E; ++
I) {
655 if (IBB != Preheader) {
656 NewMPhi->addIncoming(IV, IBB);
657 if (HasUniqueIncomingValue) {
660 else if (UniqueValue != IV)
661 HasUniqueIncomingValue =
false;
668 auto *AccFromPreheader = MPhi->getIncomingValueForBlock(Preheader);
669 MPhi->setIncomingValue(0, AccFromPreheader);
670 MPhi->setIncomingBlock(0, Preheader);
671 for (
unsigned I = MPhi->getNumIncomingValues() - 1;
I >= 1; --
I)
672 MPhi->unorderedDeleteIncoming(
I);
673 MPhi->addIncoming(NewMPhi, BEBlock);
677 tryRemoveTrivialPhi(NewMPhi);
683 bool IgnoreIncomingWithNoClones) {
687 assert(Phi && NewPhi &&
"Invalid Phi nodes.");
697 else if (IgnoreIncomingWithNoClones)
704 if (!NewPhiBBPreds.
count(IncBB))
708 if (
MemoryUseOrDef *IncMUD = dyn_cast<MemoryUseOrDef>(IncomingAccess)) {
711 assert(IncI &&
"Found MemoryUseOrDef with no Instruction.");
713 cast_or_null<Instruction>(VMap.
lookup(IncI))) {
716 "MemoryUseOrDef cannot be null, all preds processed.");
719 NewPhi->addIncoming(IncMUD, IncBB);
721 MemoryPhi *IncPhi = cast<MemoryPhi>(IncomingAccess);
723 NewPhi->addIncoming(NewDefPhi, IncBB);
725 NewPhi->addIncoming(IncPhi, IncBB);
729 MPhiMap[Phi] = SingleAccess;
740 "Cloned block should have no accesses");
744 MemoryPhi *NewPhi = MSSA->createMemoryPhi(NewBlock);
745 MPhiMap[MPhi] = NewPhi;
748 cloneUsesAndDefs(
BB, NewBlock, VMap, MPhiMap);
751 for (
auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks))
754 for (
auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks))
757 FixPhiIncomingValues(MPhi, cast<MemoryPhi>(NewPhi));
772 MPhiMap[MPhi] = MPhi->getIncomingValueForBlock(P1);
773 cloneUsesAndDefs(
BB, P1, VM, MPhiMap,
true);
776 template <
typename Iter>
777 void MemorySSAUpdater::privateUpdateExitBlocksForClonedLoop(
782 for (
auto *Exit : ExitBlocks)
786 Updates.push_back({DT.
Insert, NewExit, ExitSucc});
795 privateUpdateExitBlocksForClonedLoop(ExitBlocks,
std::begin(Arr),
802 auto GetPtr = [&](
const std::unique_ptr<ValueToValueMapTy> &
I) {
805 using MappedIteratorType =
808 auto MapBegin = MappedIteratorType(VMaps.begin(), GetPtr);
809 auto MapEnd = MappedIteratorType(VMaps.end(), GetPtr);
810 privateUpdateExitBlocksForClonedLoop(ExitBlocks, MapBegin, MapEnd, DT);
818 for (
auto &Update : Updates) {
819 if (Update.getKind() == DT.
Insert)
820 InsertUpdates.push_back({DT.
Insert, Update.getFrom(), Update.getTo()});
822 DeleteUpdates.push_back({DT.
Delete, Update.getFrom(), Update.getTo()});
823 RevDeleteUpdates.push_back({DT.
Insert, Update.getFrom(), Update.getTo()});
827 if (!DeleteUpdates.empty()) {
855 for (
auto &Update : DeleteUpdates)
874 return &*(--Defs->
end());
879 for (
auto *Pi : GD->template getChildren</*InverseEdge=*/true>(
BB)) {
894 if (IDom->getBlock() !=
BB) {
895 BB = IDom->getBlock();
901 assert(Count == 1 && Pred &&
"Single predecessor expected.");
914 auto FindNearestCommonDominator =
917 for (
auto *
BB : BBSet)
924 auto GetNoLongerDomBlocks =
927 if (PrevIDom == CurrIDom)
929 BlocksPrevDom.push_back(PrevIDom);
933 if (UpIDom == CurrIDom)
935 BlocksPrevDom.push_back(UpIDom);
960 for (
auto &Edge : Updates) {
962 auto &AddedBlockSet = PredMap[
BB].Added;
963 AddedBlockSet.
insert(Edge.getFrom());
969 for (
auto &BBPredPair : PredMap) {
970 auto *
BB = BBPredPair.first;
971 const auto &AddedBlockSet = BBPredPair.second.Added;
972 auto &PrevBlockSet = BBPredPair.second.Prev;
973 for (
auto *Pi : GD->template getChildren</*InverseEdge=*/true>(
BB)) {
974 if (!AddedBlockSet.count(Pi))
975 PrevBlockSet.insert(Pi);
976 EdgeCountMap[{Pi,
BB}]++;
979 if (PrevBlockSet.empty()) {
983 <<
"Adding a predecessor to a block with no predecessors. "
984 "This must be an edge added to a new, likely cloned, block. "
985 "Its memory accesses must be already correct, assuming completed "
986 "via the updateExitBlocksForClonedLoop API. "
987 "Assert a single such edge is added so no phi addition or "
988 "additional processing is required.\n");
989 assert(AddedBlockSet.size() == 1 &&
990 "Can only handle adding one predecessor to a new block.");
997 for (
auto *
BB : NewBlocks)
1005 for (
auto &Edge : Updates) {
1008 InsertedPhis.push_back(MSSA->createMemoryPhi(
BB));
1012 for (
auto &BBPredPair : PredMap) {
1013 auto *
BB = BBPredPair.first;
1014 const auto &PrevBlockSet = BBPredPair.second.Prev;
1015 const auto &AddedBlockSet = BBPredPair.second.Added;
1016 assert(!PrevBlockSet.empty() &&
1017 "At least one previous predecessor must exist.");
1026 for (
auto *AddedPred : AddedBlockSet) {
1027 auto *DefPn = GetLastDef(AddedPred);
1028 assert(DefPn !=
nullptr &&
"Unable to find last definition.");
1029 LastDefAddedPred[AddedPred] = DefPn;
1036 for (
auto *Pred : AddedBlockSet) {
1037 auto *LastDefForPred = LastDefAddedPred[Pred];
1038 for (
int I = 0,
E = EdgeCountMap[{Pred,
BB}];
I <
E; ++
I)
1044 auto *P1 = *PrevBlockSet.begin();
1049 bool InsertPhi =
false;
1050 for (
auto LastDefPredPair : LastDefAddedPred)
1051 if (DefP1 != LastDefPredPair.second) {
1067 for (
auto *Pred : AddedBlockSet) {
1068 auto *LastDefForPred = LastDefAddedPred[Pred];
1069 for (
int I = 0,
E = EdgeCountMap[{Pred,
BB}];
I <
E; ++
I)
1072 for (
auto *Pred : PrevBlockSet)
1073 for (
int I = 0,
E = EdgeCountMap[{Pred,
BB}];
I <
E; ++
I)
1080 BasicBlock *PrevIDom = FindNearestCommonDominator(PrevBlockSet);
1081 assert(PrevIDom &&
"Previous IDom should exists");
1083 assert(NewIDom &&
"BB should have a new valid idom");
1085 "New idom should dominate old idom");
1086 GetNoLongerDomBlocks(PrevIDom, NewIDom, BlocksWithDefsToReplace);
1089 tryRemoveTrivialPhis(InsertedPhis);
1093 for (
auto &VH : InsertedPhis)
1094 if (
auto *MPhi = cast_or_null<MemoryPhi>(VH))
1095 BlocksToProcess.push_back(MPhi->getBlock());
1099 if (!BlocksToProcess.empty()) {
1102 BlocksToProcess.end());
1103 IDFs.setDefiningBlocks(DefiningBlocks);
1104 IDFs.calculate(IDFBlocks);
1108 for (
auto *BBIDF : IDFBlocks)
1110 auto *IDFPhi = MSSA->createMemoryPhi(BBIDF);
1111 InsertedPhis.push_back(IDFPhi);
1112 PhisToFill.
insert(IDFPhi);
1115 for (
auto *BBIDF : IDFBlocks) {
1117 assert(IDFPhi &&
"Phi must exist");
1118 if (!PhisToFill.
count(IDFPhi)) {
1121 for (
unsigned I = 0,
E = IDFPhi->getNumIncomingValues();
I <
E; ++
I)
1122 IDFPhi->setIncomingValue(
I, GetLastDef(IDFPhi->getIncomingBlock(
I)));
1124 for (
auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BBIDF))
1125 IDFPhi->addIncoming(GetLastDef(Pi), Pi);
1133 for (
auto *BlockWithDefsToReplace : BlocksWithDefsToReplace) {
1135 for (
auto &DefToReplaceUses : *DefsList) {
1136 BasicBlock *DominatingBlock = DefToReplaceUses.getBlock();
1138 E = DefToReplaceUses.use_end();
1143 if (
MemoryPhi *UsrPhi = dyn_cast<MemoryPhi>(Usr)) {
1144 BasicBlock *DominatedBlock = UsrPhi->getIncomingBlock(U);
1145 if (!DT.
dominates(DominatingBlock, DominatedBlock))
1146 U.
set(GetLastDef(DominatedBlock));
1149 if (!DT.
dominates(DominatingBlock, DominatedBlock)) {
1154 assert(IDom &&
"Block must have a valid IDom.");
1155 U.
set(GetLastDef(IDom->getBlock()));
1157 cast<MemoryUseOrDef>(Usr)->resetOptimized();
1164 tryRemoveTrivialPhis(InsertedPhis);
1168 template <
class WhereType>
1172 for (
auto *U : What->
users())
1173 if (
MemoryPhi *PhiUser = dyn_cast<MemoryPhi>(U))
1174 NonOptPhis.insert(PhiUser);
1183 if (
auto *MD = dyn_cast<MemoryDef>(What))
1205 if (Where != MemorySSA::InsertionPlace::BeforeTerminator)
1206 return moveTo(What,
BB, Where);
1211 return moveTo(What,
BB, MemorySSA::InsertionPlace::End);
1222 assert(Start->getParent() == To &&
"Incorrect Start instruction");
1228 auto *MUD = cast<MemoryUseOrDef>(FirstInNew);
1230 auto NextIt = ++MUD->getIterator();
1233 : cast<MemoryUseOrDef>(&*NextIt);
1245 if (Defs && !Defs->
empty())
1246 if (
auto *Phi = dyn_cast<MemoryPhi>(&*Defs->
begin()))
1247 tryRemoveTrivialPhi(Phi);
1254 "To block is expected to be free of MemoryAccesses.");
1255 moveAllAccesses(
From, To, Start);
1258 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(
From), To);
1264 "From block is expected to have a single predecessor (To).");
1265 moveAllAccesses(
From, To, Start);
1268 MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(
From), To);
1273 bool IdenticalEdgesWereMerged) {
1275 "Access list should be null for a new block.");
1281 "Should have moved all predecessors.");
1284 assert(!Preds.
empty() &&
"Must be moving at least one predecessor to the "
1285 "new immediate predecessor.");
1286 MemoryPhi *NewPhi = MSSA->createMemoryPhi(New);
1290 if (!IdenticalEdgesWereMerged)
1292 "If identical edges were not merged, we cannot have duplicate "
1293 "blocks in the predecessors");
1296 NewPhi->addIncoming(MA, B);
1297 if (!IdenticalEdgesWereMerged)
1304 tryRemoveTrivialPhi(NewPhi);
1310 "Trying to remove the live on entry def");
1314 if (
MemoryPhi *MP = dyn_cast<MemoryPhi>(MA)) {
1322 "We can't delete this memory phi");
1324 NewDefTarget = cast<MemoryUseOrDef>(MA)->getDefiningAccess();
1330 if (!isa<MemoryUse>(MA) && !MA->
use_empty()) {
1344 assert(NewDefTarget != MA &&
"Going into an infinite loop");
1347 if (
auto *MUD = dyn_cast<MemoryUseOrDef>(U.
getUser()))
1348 MUD->resetOptimized();
1352 U.
set(NewDefTarget);
1362 if (!PhisToCheck.
empty()) {
1365 PhisToCheck.
clear();
1367 unsigned PhisSize = PhisToOptimize.size();
1368 while (PhisSize-- > 0)
1370 cast_or_null<MemoryPhi>(PhisToOptimize.pop_back_val()))
1371 tryRemoveTrivialPhi(MP);
1380 assert(TI &&
"Basic block expected to have a terminator instruction");
1382 if (!DeadBlocks.count(Succ))
1384 MP->unorderedDeleteIncomingBlock(
BB);
1385 tryRemoveTrivialPhi(MP);
1405 void MemorySSAUpdater::tryRemoveTrivialPhis(
ArrayRef<WeakVH> UpdatedPHIs) {
1406 for (
auto &VH : UpdatedPHIs)
1407 if (
auto *MPhi = cast_or_null<MemoryPhi>(VH))
1408 tryRemoveTrivialPhi(MPhi);
1414 auto BBI =
I->getIterator(), BBE =
BB->end();
1424 MPhi->unorderedDeleteIncomingBlock(
BB);
1425 UpdatedPHIs.push_back(MPhi);
1429 tryRemoveTrivialPhis(UpdatedPHIs);
1440 MPhi->unorderedDeleteIncomingBlock(
BB);
1441 UpdatedPHIs.push_back(MPhi);
1445 tryRemoveTrivialPhis(UpdatedPHIs);
1459 "New and old access must be in the same block");
1469 "New and old access must be in the same block");