136#define DEBUG_TYPE "infer-address-spaces"
142 cl::desc(
"The default address space is assumed as the flat address space. "
143 "This is mainly for test purpose."));
146 std::numeric_limits<unsigned>::max();
157using PredicatedAddrSpaceMapTy =
162 unsigned FlatAddrSpace = 0;
171 InferAddressSpaces(
unsigned AS) :
FunctionPass(
ID), FlatAddrSpace(AS) {
185class InferAddressSpacesImpl {
193 unsigned FlatAddrSpace = 0;
197 bool updateAddressSpace(
const Value &V,
198 ValueToAddrSpaceMapTy &InferredAddrSpace,
199 PredicatedAddrSpaceMapTy &PredicatedAS)
const;
204 ValueToAddrSpaceMapTy &InferredAddrSpace,
205 PredicatedAddrSpaceMapTy &PredicatedAS)
const;
207 bool isSafeToCastConstAddrSpace(
Constant *
C,
unsigned NewAS)
const;
209 Value *cloneInstructionWithNewAddressSpace(
212 const PredicatedAddrSpaceMapTy &PredicatedAS,
220 const ValueToAddrSpaceMapTy &InferredAddrSpace,
221 const PredicatedAddrSpaceMapTy &PredicatedAS,
224 void appendsFlatAddressExpressionToPostorderStack(
225 Value *V, PostorderStackTy &PostorderStack,
231 PostorderStackTy &PostorderStack,
234 std::vector<WeakTrackingVH> collectFlatAddressExpressions(
Function &
F)
const;
236 Value *cloneValueWithNewAddressSpace(
237 Value *V,
unsigned NewAddrSpace,
239 const PredicatedAddrSpaceMapTy &PredicatedAS,
241 unsigned joinAddressSpaces(
unsigned AS1,
unsigned AS2)
const;
243 unsigned getPredicatedAddrSpace(
const Value &V,
Value *Opnd)
const;
248 : AC(AC), DT(DT),
TTI(
TTI), FlatAddrSpace(FlatAddrSpace) {}
254char InferAddressSpaces::ID = 0;
264 assert(Ty->isPtrOrPtrVectorTy());
265 PointerType *NPT = PointerType::get(Ty->getContext(), NewAddrSpace);
266 return Ty->getWithNewType(NPT);
275 auto *P2I = dyn_cast<Operator>(I2P->
getOperand(0));
276 if (!P2I || P2I->getOpcode() != Instruction::PtrToInt)
292 unsigned P2IOp0AS = P2I->getOperand(0)->getType()->getPointerAddressSpace();
298 P2I->getOperand(0)->getType(), P2I->getType(),
312 switch (
Op->getOpcode()) {
313 case Instruction::PHI:
314 assert(
Op->getType()->isPtrOrPtrVectorTy());
316 case Instruction::BitCast:
317 case Instruction::AddrSpaceCast:
318 case Instruction::GetElementPtr:
320 case Instruction::Select:
321 return Op->getType()->isPtrOrPtrVectorTy();
322 case Instruction::Call: {
324 return II &&
II->getIntrinsicID() == Intrinsic::ptrmask;
326 case Instruction::IntToPtr:
341 switch (
Op.getOpcode()) {
342 case Instruction::PHI: {
343 auto IncomingValues = cast<PHINode>(
Op).incoming_values();
344 return {IncomingValues.begin(), IncomingValues.end()};
346 case Instruction::BitCast:
347 case Instruction::AddrSpaceCast:
348 case Instruction::GetElementPtr:
349 return {
Op.getOperand(0)};
350 case Instruction::Select:
351 return {
Op.getOperand(1),
Op.getOperand(2)};
352 case Instruction::Call: {
354 assert(
II.getIntrinsicID() == Intrinsic::ptrmask &&
355 "unexpected intrinsic call");
356 return {
II.getArgOperand(0)};
358 case Instruction::IntToPtr: {
360 auto *P2I = cast<Operator>(
Op.getOperand(0));
361 return {P2I->getOperand(0)};
368bool InferAddressSpacesImpl::rewriteIntrinsicOperands(
IntrinsicInst *
II,
371 Module *
M =
II->getParent()->getParent()->getParent();
374 case Intrinsic::objectsize:
375 case Intrinsic::masked_load: {
376 Type *DestTy =
II->getType();
379 II->setArgOperand(0, NewV);
380 II->setCalledFunction(NewDecl);
383 case Intrinsic::ptrmask:
386 case Intrinsic::masked_gather: {
390 II->setArgOperand(0, NewV);
391 II->setCalledFunction(NewDecl);
394 case Intrinsic::masked_store:
395 case Intrinsic::masked_scatter: {
396 Type *ValueTy =
II->getOperand(0)->getType();
400 II->setArgOperand(1, NewV);
401 II->setCalledFunction(NewDecl);
404 case Intrinsic::prefetch:
405 case Intrinsic::is_constant: {
408 II->setArgOperand(0, NewV);
409 II->setCalledFunction(NewDecl);
417 II->replaceAllUsesWith(Rewrite);
423void InferAddressSpacesImpl::collectRewritableIntrinsicOperands(
426 auto IID =
II->getIntrinsicID();
428 case Intrinsic::ptrmask:
429 case Intrinsic::objectsize:
430 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(0),
431 PostorderStack, Visited);
433 case Intrinsic::is_constant: {
435 if (
Ptr->getType()->isPtrOrPtrVectorTy()) {
436 appendsFlatAddressExpressionToPostorderStack(
Ptr, PostorderStack,
442 case Intrinsic::masked_load:
443 case Intrinsic::masked_gather:
444 case Intrinsic::prefetch:
445 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(0),
446 PostorderStack, Visited);
448 case Intrinsic::masked_store:
449 case Intrinsic::masked_scatter:
450 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(1),
451 PostorderStack, Visited);
456 for (
int Idx : OpIndexes) {
457 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(
Idx),
458 PostorderStack, Visited);
468void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(
469 Value *V, PostorderStackTy &PostorderStack,
471 assert(
V->getType()->isPtrOrPtrVectorTy());
478 PostorderStack.emplace_back(CE,
false);
483 if (
V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
485 if (Visited.
insert(V).second) {
486 PostorderStack.emplace_back(V,
false);
492 PostorderStack.emplace_back(CE,
false);
501std::vector<WeakTrackingVH>
502InferAddressSpacesImpl::collectFlatAddressExpressions(
Function &
F)
const {
505 PostorderStackTy PostorderStack;
509 auto PushPtrOperand = [&](
Value *
Ptr) {
510 appendsFlatAddressExpressionToPostorderStack(
Ptr, PostorderStack, Visited);
517 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(&
I)) {
518 PushPtrOperand(
GEP->getPointerOperand());
519 }
else if (
auto *LI = dyn_cast<LoadInst>(&
I))
520 PushPtrOperand(LI->getPointerOperand());
521 else if (
auto *SI = dyn_cast<StoreInst>(&
I))
522 PushPtrOperand(
SI->getPointerOperand());
523 else if (
auto *RMW = dyn_cast<AtomicRMWInst>(&
I))
524 PushPtrOperand(RMW->getPointerOperand());
525 else if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(&
I))
526 PushPtrOperand(CmpX->getPointerOperand());
527 else if (
auto *
MI = dyn_cast<MemIntrinsic>(&
I)) {
529 PushPtrOperand(
MI->getRawDest());
532 if (
auto *MTI = dyn_cast<MemTransferInst>(
MI))
533 PushPtrOperand(MTI->getRawSource());
534 }
else if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
535 collectRewritableIntrinsicOperands(
II, PostorderStack, Visited);
536 else if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(&
I)) {
537 if (
Cmp->getOperand(0)->getType()->isPtrOrPtrVectorTy()) {
538 PushPtrOperand(
Cmp->getOperand(0));
539 PushPtrOperand(
Cmp->getOperand(1));
541 }
else if (
auto *ASC = dyn_cast<AddrSpaceCastInst>(&
I)) {
542 PushPtrOperand(ASC->getPointerOperand());
543 }
else if (
auto *I2P = dyn_cast<IntToPtrInst>(&
I)) {
545 PushPtrOperand(cast<Operator>(I2P->getOperand(0))->getOperand(0));
546 }
else if (
auto *RI = dyn_cast<ReturnInst>(&
I)) {
547 if (
auto *RV = RI->getReturnValue();
548 RV && RV->getType()->isPtrOrPtrVectorTy())
553 std::vector<WeakTrackingVH> Postorder;
554 while (!PostorderStack.empty()) {
555 Value *TopVal = PostorderStack.back().getPointer();
558 if (PostorderStack.back().getInt()) {
560 Postorder.push_back(TopVal);
561 PostorderStack.pop_back();
565 PostorderStack.back().setInt(
true);
569 appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,
581 const Use &OperandUse,
unsigned NewAddrSpace,
583 const PredicatedAddrSpaceMapTy &PredicatedAS,
589 if (
Constant *
C = dyn_cast<Constant>(Operand))
592 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand))
596 auto I = PredicatedAS.find(std::make_pair(Inst, Operand));
597 if (
I != PredicatedAS.end()) {
599 unsigned NewAS =
I->second;
602 NewI->insertBefore(Inst);
623Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(
626 const PredicatedAddrSpaceMapTy &PredicatedAS,
630 if (
I->getOpcode() == Instruction::AddrSpaceCast) {
631 Value *Src =
I->getOperand(0);
635 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
636 if (Src->getType() != NewPtrType)
644 assert(
II->getIntrinsicID() == Intrinsic::ptrmask);
646 II->getArgOperandUse(0), NewAddrSpace, ValueWithNewAddrSpace,
647 PredicatedAS, PoisonUsesToFix);
651 assert(Rewrite !=
II &&
"cannot modify this pointer operation in place");
664 NewI->insertAfter(
I);
665 NewI->setDebugLoc(
I->getDebugLoc());
671 for (
const Use &OperandUse :
I->operands()) {
672 if (!OperandUse.get()->getType()->isPtrOrPtrVectorTy())
676 OperandUse, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS,
680 switch (
I->getOpcode()) {
681 case Instruction::BitCast:
682 return new BitCastInst(NewPointerOperands[0], NewPtrType);
683 case Instruction::PHI: {
684 assert(
I->getType()->isPtrOrPtrVectorTy());
694 case Instruction::GetElementPtr: {
697 GEP->getSourceElementType(), NewPointerOperands[0],
702 case Instruction::Select:
703 assert(
I->getType()->isPtrOrPtrVectorTy());
705 NewPointerOperands[2],
"",
nullptr,
I);
706 case Instruction::IntToPtr: {
708 Value *Src = cast<Operator>(
I->getOperand(0))->getOperand(0);
709 if (Src->getType() == NewPtrType)
730 CE->getType()->isPtrOrPtrVectorTy()
734 if (CE->getOpcode() == Instruction::AddrSpaceCast) {
738 assert(CE->getOperand(0)->getType()->getPointerAddressSpace() ==
743 if (CE->getOpcode() == Instruction::BitCast) {
744 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(CE->getOperand(0)))
749 if (CE->getOpcode() == Instruction::IntToPtr) {
751 Constant *Src = cast<ConstantExpr>(CE->getOperand(0))->getOperand(0);
752 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
766 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand)) {
768 NewOperands.
push_back(cast<Constant>(NewOperand));
771 if (
auto *CExpr = dyn_cast<ConstantExpr>(Operand))
773 CExpr, NewAddrSpace, ValueWithNewAddrSpace,
DL,
TTI)) {
775 NewOperands.
push_back(cast<Constant>(NewOperand));
787 if (CE->getOpcode() == Instruction::GetElementPtr) {
790 return CE->getWithOperands(NewOperands, TargetType,
false,
791 cast<GEPOperator>(CE)->getSourceElementType());
794 return CE->getWithOperands(NewOperands, TargetType);
802Value *InferAddressSpacesImpl::cloneValueWithNewAddressSpace(
803 Value *V,
unsigned NewAddrSpace,
805 const PredicatedAddrSpaceMapTy &PredicatedAS,
808 assert(
V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
812 Value *NewV = cloneInstructionWithNewAddressSpace(
813 I, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS, PoisonUsesToFix);
814 if (
Instruction *NewI = dyn_cast_or_null<Instruction>(NewV)) {
815 if (NewI->getParent() ==
nullptr) {
816 NewI->insertBefore(
I);
818 NewI->setDebugLoc(
I->getDebugLoc());
825 cast<ConstantExpr>(V), NewAddrSpace, ValueWithNewAddrSpace,
DL,
TTI);
830unsigned InferAddressSpacesImpl::joinAddressSpaces(
unsigned AS1,
831 unsigned AS2)
const {
832 if (AS1 == FlatAddrSpace || AS2 == FlatAddrSpace)
833 return FlatAddrSpace;
841 return (AS1 == AS2) ? AS1 : FlatAddrSpace;
844bool InferAddressSpacesImpl::run(
Function &
F) {
845 DL = &
F.getDataLayout();
857 std::vector<WeakTrackingVH> Postorder = collectFlatAddressExpressions(
F);
861 ValueToAddrSpaceMapTy InferredAddrSpace;
862 PredicatedAddrSpaceMapTy PredicatedAS;
863 inferAddressSpaces(Postorder, InferredAddrSpace, PredicatedAS);
867 return rewriteWithNewAddressSpaces(Postorder, InferredAddrSpace, PredicatedAS,
873void InferAddressSpacesImpl::inferAddressSpaces(
875 ValueToAddrSpaceMapTy &InferredAddrSpace,
876 PredicatedAddrSpaceMapTy &PredicatedAS)
const {
879 for (
Value *V : Postorder)
882 while (!Worklist.empty()) {
883 Value *
V = Worklist.pop_back_val();
887 if (!updateAddressSpace(*V, InferredAddrSpace, PredicatedAS))
892 if (Worklist.count(
User))
895 auto Pos = InferredAddrSpace.find(
User);
898 if (Pos == InferredAddrSpace.end())
904 if (Pos->second == FlatAddrSpace)
907 Worklist.insert(
User);
912unsigned InferAddressSpacesImpl::getPredicatedAddrSpace(
const Value &V,
919 for (
auto &AssumeVH : AC.assumptionsFor(Opnd)) {
922 CallInst *CI = cast<CallInst>(AssumeVH);
936bool InferAddressSpacesImpl::updateAddressSpace(
937 const Value &V, ValueToAddrSpaceMapTy &InferredAddrSpace,
938 PredicatedAddrSpaceMapTy &PredicatedAS)
const {
939 assert(InferredAddrSpace.count(&V));
941 LLVM_DEBUG(
dbgs() <<
"Updating the address space of\n " << V <<
'\n');
948 if (
Op.getOpcode() == Instruction::Select) {
949 Value *Src0 =
Op.getOperand(1);
950 Value *Src1 =
Op.getOperand(2);
952 auto I = InferredAddrSpace.find(Src0);
953 unsigned Src0AS = (
I != InferredAddrSpace.end())
957 auto J = InferredAddrSpace.find(Src1);
958 unsigned Src1AS = (J != InferredAddrSpace.end())
962 auto *C0 = dyn_cast<Constant>(Src0);
963 auto *C1 = dyn_cast<Constant>(Src1);
972 if (C0 && isSafeToCastConstAddrSpace(C0, Src1AS))
974 else if (C1 && isSafeToCastConstAddrSpace(C1, Src0AS))
977 NewAS = joinAddressSpaces(Src0AS, Src1AS);
986 auto I = InferredAddrSpace.find(PtrOperand);
988 if (
I == InferredAddrSpace.end()) {
989 OperandAS = PtrOperand->getType()->getPointerAddressSpace();
990 if (OperandAS == FlatAddrSpace) {
992 unsigned AS = getPredicatedAddrSpace(V, PtrOperand);
995 <<
" deduce operand AS from the predicate addrspace "
999 PredicatedAS[std::make_pair(&V, PtrOperand)] = OperandAS;
1003 OperandAS =
I->second;
1006 NewAS = joinAddressSpaces(NewAS, OperandAS);
1007 if (NewAS == FlatAddrSpace)
1013 unsigned OldAS = InferredAddrSpace.lookup(&V);
1014 assert(OldAS != FlatAddrSpace);
1021 InferredAddrSpace[&
V] = NewAS;
1030 if (U.get() == OldVal) {
1038template <
typename InstrType>
1040 InstrType *MemInstr,
unsigned AddrSpace,
1058 User *Inst,
unsigned AddrSpace,
1060 if (
auto *LI = dyn_cast<LoadInst>(Inst))
1063 if (
auto *SI = dyn_cast<StoreInst>(Inst))
1066 if (
auto *RMW = dyn_cast<AtomicRMWInst>(Inst))
1069 if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst))
1081 MDNode *TBAA =
MI->getMetadata(LLVMContext::MD_tbaa);
1082 MDNode *ScopeMD =
MI->getMetadata(LLVMContext::MD_alias_scope);
1083 MDNode *NoAliasMD =
MI->getMetadata(LLVMContext::MD_noalias);
1085 if (
auto *MSI = dyn_cast<MemSetInst>(
MI)) {
1086 B.CreateMemSet(NewV, MSI->getValue(), MSI->getLength(), MSI->getDestAlign(),
1088 TBAA, ScopeMD, NoAliasMD);
1089 }
else if (
auto *MTI = dyn_cast<MemTransferInst>(
MI)) {
1090 Value *Src = MTI->getRawSource();
1091 Value *Dest = MTI->getRawDest();
1100 if (isa<MemCpyInlineInst>(MTI)) {
1101 MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct);
1102 B.CreateMemCpyInline(Dest, MTI->getDestAlign(), Src,
1103 MTI->getSourceAlign(), MTI->getLength(),
1105 TBAA, TBAAStruct, ScopeMD, NoAliasMD);
1106 }
else if (isa<MemCpyInst>(MTI)) {
1107 MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct);
1108 B.CreateMemCpy(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),
1111 TBAA, TBAAStruct, ScopeMD, NoAliasMD);
1113 assert(isa<MemMoveInst>(MTI));
1114 B.CreateMemMove(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),
1117 TBAA, ScopeMD, NoAliasMD);
1122 MI->eraseFromParent();
1128bool InferAddressSpacesImpl::isSafeToCastConstAddrSpace(
Constant *
C,
1129 unsigned NewAS)
const {
1132 unsigned SrcAS =
C->getType()->getPointerAddressSpace();
1133 if (SrcAS == NewAS || isa<UndefValue>(
C))
1137 if (SrcAS != FlatAddrSpace && NewAS != FlatAddrSpace)
1140 if (isa<ConstantPointerNull>(
C))
1143 if (
auto *
Op = dyn_cast<Operator>(
C)) {
1146 if (
Op->getOpcode() == Instruction::AddrSpaceCast)
1147 return isSafeToCastConstAddrSpace(cast<Constant>(
Op->getOperand(0)),
1150 if (
Op->getOpcode() == Instruction::IntToPtr &&
1151 Op->getType()->getPointerAddressSpace() == FlatAddrSpace)
1160 User *CurUser =
I->getUser();
1163 while (
I !=
End &&
I->getUser() == CurUser)
1169bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
1171 const ValueToAddrSpaceMapTy &InferredAddrSpace,
1172 const PredicatedAddrSpaceMapTy &PredicatedAS,
Function *
F)
const {
1179 for (
Value *V : Postorder) {
1180 unsigned NewAddrSpace = InferredAddrSpace.lookup(V);
1187 if (
V->getType()->getPointerAddressSpace() != NewAddrSpace) {
1189 cloneValueWithNewAddressSpace(V, NewAddrSpace, ValueWithNewAddrSpace,
1190 PredicatedAS, &PoisonUsesToFix);
1192 ValueWithNewAddrSpace[
V] =
New;
1196 if (ValueWithNewAddrSpace.
empty())
1200 for (
const Use *PoisonUse : PoisonUsesToFix) {
1201 User *
V = PoisonUse->getUser();
1202 User *NewV = cast_or_null<User>(ValueWithNewAddrSpace.
lookup(V));
1206 unsigned OperandNo = PoisonUse->getOperandNo();
1208 NewV->
setOperand(OperandNo, ValueWithNewAddrSpace.
lookup(PoisonUse->get()));
1217 assert(WVH &&
"value was unexpectedly deleted");
1220 if (NewV ==
nullptr)
1223 LLVM_DEBUG(
dbgs() <<
"Replacing the uses of " << *V <<
"\n with\n "
1226 if (
Constant *
C = dyn_cast<Constant>(V)) {
1230 LLVM_DEBUG(
dbgs() <<
"Inserting replacement const cast: " << Replace
1231 <<
": " << *Replace <<
'\n');
1234 if (
auto *
I = dyn_cast<Instruction>(U)) {
1235 if (
I->getFunction() ==
F)
1236 I->replaceUsesOfWith(
C, Replace);
1238 WorkList.
append(
U->user_begin(),
U->user_end());
1241 if (!WorkList.
empty()) {
1244 while (!WorkList.
empty()) {
1246 if (
auto *
I = dyn_cast<Instruction>(U)) {
1247 if (
I->getFunction() ==
F)
1248 VMapper.remapInstruction(*
I);
1251 for (
User *U2 :
U->users())
1252 if (Visited.
insert(U2).second)
1261 for (
I =
V->use_begin(), E =
V->use_end();
I != E;) {
1263 User *CurUser =
U.getUser();
1269 unsigned AddrSpace =
V->getType()->getPointerAddressSpace();
1274 if (CurUser == NewV)
1277 if (
auto *CurUserI = dyn_cast<Instruction>(CurUser);
1278 CurUserI && CurUserI->getFunction() !=
F)
1282 if (
auto *
MI = dyn_cast<MemIntrinsic>(CurUser)) {
1287 if (
auto *
II = dyn_cast<IntrinsicInst>(CurUser)) {
1288 if (rewriteIntrinsicOperands(
II, V, NewV))
1292 if (isa<Instruction>(CurUser)) {
1293 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(CurUser)) {
1301 int SrcIdx =
U.getOperandNo();
1302 int OtherIdx = (SrcIdx == 0) ? 1 : 0;
1303 Value *OtherSrc =
Cmp->getOperand(OtherIdx);
1305 if (
Value *OtherNewV = ValueWithNewAddrSpace.
lookup(OtherSrc)) {
1306 if (OtherNewV->getType()->getPointerAddressSpace() == NewAS) {
1307 Cmp->setOperand(OtherIdx, OtherNewV);
1308 Cmp->setOperand(SrcIdx, NewV);
1314 if (
auto *KOtherSrc = dyn_cast<Constant>(OtherSrc)) {
1315 if (isSafeToCastConstAddrSpace(KOtherSrc, NewAS)) {
1316 Cmp->setOperand(SrcIdx, NewV);
1326 if (ASC->getDestAddressSpace() == NewAS) {
1327 ASC->replaceAllUsesWith(NewV);
1334 if (
Instruction *VInst = dyn_cast<Instruction>(V)) {
1336 if (U == V && isa<AddrSpaceCastInst>(V))
1341 if (
Instruction *NewVInst = dyn_cast<Instruction>(NewV))
1342 InsertPos = std::next(NewVInst->getIterator());
1344 InsertPos = std::next(VInst->getIterator());
1346 while (isa<PHINode>(InsertPos))
1359 if (
V->use_empty()) {
1371bool InferAddressSpaces::runOnFunction(
Function &
F) {
1372 if (skipFunction(
F))
1375 auto *DTWP = getAnalysisIfAvailable<DominatorTreeWrapperPass>();
1377 return InferAddressSpacesImpl(
1378 getAnalysis<AssumptionCacheTracker>().getAssumptionCache(
F), DT,
1379 &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(
F),
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file defines the DenseSet and SmallDenseSet classes.
This defines the Use class.
static bool replaceIfSimplePointerUse(const TargetTransformInfo &TTI, User *Inst, unsigned AddrSpace, Value *OldV, Value *NewV)
If OldV is used as the pointer operand of a compatible memory operation Inst, replaces the pointer op...
static bool replaceOperandIfSame(Instruction *Inst, unsigned OpIdx, Value *OldVal, Value *NewVal)
Replace operand OpIdx in Inst, if the value is the same as OldVal with NewVal.
static cl::opt< bool > AssumeDefaultIsFlatAddressSpace("assume-default-is-flat-addrspace", cl::init(false), cl::ReallyHidden, cl::desc("The default address space is assumed as the flat address space. " "This is mainly for test purpose."))
static bool isNoopPtrIntCastPair(const Operator *I2P, const DataLayout &DL, const TargetTransformInfo *TTI)
static bool isAddressExpression(const Value &V, const DataLayout &DL, const TargetTransformInfo *TTI)
static bool handleMemIntrinsicPtrUse(MemIntrinsic *MI, Value *OldV, Value *NewV)
Update memory intrinsic uses that require more complex processing than simple memory instructions.
static SmallVector< Value *, 2 > getPointerOperands(const Value &V, const DataLayout &DL, const TargetTransformInfo *TTI)
static Value * operandWithNewAddressSpaceOrCreatePoison(const Use &OperandUse, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, const PredicatedAddrSpaceMapTy &PredicatedAS, SmallVectorImpl< const Use * > *PoisonUsesToFix)
static Value * cloneConstantExprWithNewAddressSpace(ConstantExpr *CE, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, const DataLayout *DL, const TargetTransformInfo *TTI)
static Value::use_iterator skipToNextUser(Value::use_iterator I, Value::use_iterator End)
Infer address static false Type * getPtrOrVecOfPtrsWithNewAS(Type *Ty, unsigned NewAddrSpace)
static bool replaceSimplePointerUse(const TargetTransformInfo &TTI, InstrType *MemInstr, unsigned AddrSpace, Value *OldV, Value *NewV)
static const unsigned UninitializedAddressSpace
uint64_t IntrinsicInst * II
This header defines various interfaces for pass management in LLVM.
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallVector class.
This class represents a conversion between pointers from one address space to another.
A container for analyses that lazily runs them and caches their results.
PassT::Result * getCachedResult(IRUnitT &IR) const
Get the cached result of an analysis pass for a given IR unit.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
An immutable pass that tracks lazily created AssumptionCache objects.
A cache of @llvm.assume calls within a function.
InstListType::iterator iterator
Instruction iterators...
This class represents a no-op cast from one type to another.
Represents analyses that only rely on functions' control flow.
Value * getArgOperand(unsigned i) const
This class represents a function call, abstracting a target machine's calling convention.
static CastInst * CreatePointerBitCastOrAddrSpaceCast(Value *S, Type *Ty, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Create a BitCast or an AddrSpaceCast cast instruction.
static bool isNoopCast(Instruction::CastOps Opcode, Type *SrcTy, Type *DstTy, const DataLayout &DL)
A no-op cast is one that can be effected without changing any bits.
A constant value that is initialized with an expression using other constant values.
static Constant * getAddrSpaceCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getBitCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
This is an important base class in LLVM.
This class represents an Operation in the Expression.
uint64_t getNumOperands() const
A parsed version of the target data layout string in and methods for querying it.
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Legacy analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
FunctionPass class - This class is used to implement most global optimizations.
virtual bool runOnFunction(Function &F)=0
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
void setIsInBounds(bool b=true)
Set or clear the inbounds flag on this GEP instruction.
This instruction compares its operands according to the predicate given to the constructor.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
A wrapper class for inspecting calls to intrinsic functions.
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
This is a utility class that provides an abstraction for the common functionality between Instruction...
unsigned getOpcode() const
Return the opcode for this Instruction or ConstantExpr.
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
static unsigned getOperandNumForIncomingValue(unsigned i)
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void preserveSet()
Mark an analysis set as preserved.
void preserve()
Mark an analysis as preserved.
static SelectInst * Create(Value *C, Value *S1, Value *S2, const Twine &NameStr="", InsertPosition InsertBefore=nullptr, Instruction *MDFrom=nullptr)
A vector that has set insertion semantics.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Analysis pass providing the TargetTransformInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
Context for (re-)mapping values (and metadata).
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
const Value * stripInBoundsOffsets(function_ref< void(const Value *)> Func=[](const Value *) {}) const
Strip off pointer casts and inbounds GEPs.
use_iterator_impl< Use > use_iterator
Value handle that is nullable, but tries to track the Value.
std::pair< iterator, bool > insert(const ValueT &V)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ C
The default llvm calling convention, compatible with C.
InstrType
This represents what is and is not supported when finding similarity in Instructions.
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=std::nullopt)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
This is an optimization pass for GlobalISel generic memory operations.
bool isValidAssumeForContext(const Instruction *I, const Instruction *CxtI, const DominatorTree *DT=nullptr, bool AllowEphemerals=false)
Return true if it is valid to use the assumptions provided by an assume intrinsic,...
bool RecursivelyDeleteTriviallyDeadInstructions(Value *V, const TargetLibraryInfo *TLI=nullptr, MemorySSAUpdater *MSSAU=nullptr, std::function< void(Value *)> AboutToDeleteCallback=std::function< void(Value *)>())
If the specified value is a trivially dead instruction, delete it.
void initializeInferAddressSpacesPass(PassRegistry &)
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
@ RF_IgnoreMissingLocals
If this flag is set, the remapper ignores missing function-local entries (Argument,...
@ RF_NoModuleLevelChanges
If this flag is set, the remapper knows that only local values within a function (such as an instruct...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
FunctionPass * createInferAddressSpacesPass(unsigned AddressSpace=~0u)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)