136#define DEBUG_TYPE "infer-address-spaces"
142 cl::desc(
"The default address space is assumed as the flat address space. "
143 "This is mainly for test purpose."));
146 std::numeric_limits<unsigned>::max();
157using PredicatedAddrSpaceMapTy =
162 unsigned FlatAddrSpace = 0;
171 InferAddressSpaces(
unsigned AS) :
FunctionPass(
ID), FlatAddrSpace(AS) {
185class InferAddressSpacesImpl {
193 unsigned FlatAddrSpace = 0;
197 bool updateAddressSpace(
const Value &V,
198 ValueToAddrSpaceMapTy &InferredAddrSpace,
199 PredicatedAddrSpaceMapTy &PredicatedAS)
const;
204 ValueToAddrSpaceMapTy &InferredAddrSpace,
205 PredicatedAddrSpaceMapTy &PredicatedAS)
const;
207 bool isSafeToCastConstAddrSpace(
Constant *
C,
unsigned NewAS)
const;
209 Value *cloneInstructionWithNewAddressSpace(
212 const PredicatedAddrSpaceMapTy &PredicatedAS,
220 const ValueToAddrSpaceMapTy &InferredAddrSpace,
221 const PredicatedAddrSpaceMapTy &PredicatedAS,
224 void appendsFlatAddressExpressionToPostorderStack(
225 Value *V, PostorderStackTy &PostorderStack,
231 PostorderStackTy &PostorderStack,
234 std::vector<WeakTrackingVH> collectFlatAddressExpressions(
Function &
F)
const;
236 Value *cloneValueWithNewAddressSpace(
237 Value *V,
unsigned NewAddrSpace,
239 const PredicatedAddrSpaceMapTy &PredicatedAS,
241 unsigned joinAddressSpaces(
unsigned AS1,
unsigned AS2)
const;
243 unsigned getPredicatedAddrSpace(
const Value &V,
Value *Opnd)
const;
248 : AC(AC), DT(DT),
TTI(
TTI), FlatAddrSpace(FlatAddrSpace) {}
254char InferAddressSpaces::ID = 0;
264 assert(Ty->isPtrOrPtrVectorTy());
265 PointerType *NPT = PointerType::get(Ty->getContext(), NewAddrSpace);
266 return Ty->getWithNewType(NPT);
275 auto *P2I = dyn_cast<Operator>(I2P->
getOperand(0));
276 if (!P2I || P2I->getOpcode() != Instruction::PtrToInt)
292 unsigned P2IOp0AS = P2I->getOperand(0)->getType()->getPointerAddressSpace();
298 P2I->getOperand(0)->getType(), P2I->getType(),
312 switch (
Op->getOpcode()) {
313 case Instruction::PHI:
314 assert(
Op->getType()->isPtrOrPtrVectorTy());
316 case Instruction::BitCast:
317 case Instruction::AddrSpaceCast:
318 case Instruction::GetElementPtr:
320 case Instruction::Select:
321 return Op->getType()->isPtrOrPtrVectorTy();
322 case Instruction::Call: {
324 return II &&
II->getIntrinsicID() == Intrinsic::ptrmask;
326 case Instruction::IntToPtr:
341 switch (
Op.getOpcode()) {
342 case Instruction::PHI: {
343 auto IncomingValues = cast<PHINode>(
Op).incoming_values();
344 return {IncomingValues.begin(), IncomingValues.end()};
346 case Instruction::BitCast:
347 case Instruction::AddrSpaceCast:
348 case Instruction::GetElementPtr:
349 return {
Op.getOperand(0)};
350 case Instruction::Select:
351 return {
Op.getOperand(1),
Op.getOperand(2)};
352 case Instruction::Call: {
354 assert(
II.getIntrinsicID() == Intrinsic::ptrmask &&
355 "unexpected intrinsic call");
356 return {
II.getArgOperand(0)};
358 case Instruction::IntToPtr: {
360 auto *P2I = cast<Operator>(
Op.getOperand(0));
361 return {P2I->getOperand(0)};
368bool InferAddressSpacesImpl::rewriteIntrinsicOperands(
IntrinsicInst *
II,
371 Module *
M =
II->getParent()->getParent()->getParent();
373 switch (
II->getIntrinsicID()) {
374 case Intrinsic::objectsize: {
375 Type *DestTy =
II->getType();
379 II->setArgOperand(0, NewV);
380 II->setCalledFunction(NewDecl);
383 case Intrinsic::ptrmask:
386 case Intrinsic::masked_gather: {
391 II->setArgOperand(0, NewV);
392 II->setCalledFunction(NewDecl);
395 case Intrinsic::masked_scatter: {
396 Type *ValueTy =
II->getOperand(0)->getType();
400 II->setArgOperand(1, NewV);
401 II->setCalledFunction(NewDecl);
409 II->replaceAllUsesWith(Rewrite);
415void InferAddressSpacesImpl::collectRewritableIntrinsicOperands(
418 auto IID =
II->getIntrinsicID();
420 case Intrinsic::ptrmask:
421 case Intrinsic::objectsize:
422 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(0),
423 PostorderStack, Visited);
425 case Intrinsic::masked_gather:
426 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(0),
427 PostorderStack, Visited);
429 case Intrinsic::masked_scatter:
430 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(1),
431 PostorderStack, Visited);
436 for (
int Idx : OpIndexes) {
437 appendsFlatAddressExpressionToPostorderStack(
II->getArgOperand(
Idx),
438 PostorderStack, Visited);
448void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(
449 Value *V, PostorderStackTy &PostorderStack,
451 assert(
V->getType()->isPtrOrPtrVectorTy());
458 PostorderStack.emplace_back(CE,
false);
463 if (
V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
465 if (Visited.
insert(V).second) {
466 PostorderStack.emplace_back(V,
false);
472 PostorderStack.emplace_back(CE,
false);
481std::vector<WeakTrackingVH>
482InferAddressSpacesImpl::collectFlatAddressExpressions(
Function &
F)
const {
485 PostorderStackTy PostorderStack;
489 auto PushPtrOperand = [&](
Value *
Ptr) {
490 appendsFlatAddressExpressionToPostorderStack(
Ptr, PostorderStack, Visited);
497 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(&
I)) {
498 PushPtrOperand(
GEP->getPointerOperand());
499 }
else if (
auto *LI = dyn_cast<LoadInst>(&
I))
500 PushPtrOperand(LI->getPointerOperand());
501 else if (
auto *SI = dyn_cast<StoreInst>(&
I))
502 PushPtrOperand(
SI->getPointerOperand());
503 else if (
auto *RMW = dyn_cast<AtomicRMWInst>(&
I))
504 PushPtrOperand(RMW->getPointerOperand());
505 else if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(&
I))
506 PushPtrOperand(CmpX->getPointerOperand());
507 else if (
auto *
MI = dyn_cast<MemIntrinsic>(&
I)) {
509 PushPtrOperand(
MI->getRawDest());
512 if (
auto *MTI = dyn_cast<MemTransferInst>(
MI))
513 PushPtrOperand(MTI->getRawSource());
514 }
else if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
515 collectRewritableIntrinsicOperands(
II, PostorderStack, Visited);
516 else if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(&
I)) {
517 if (
Cmp->getOperand(0)->getType()->isPtrOrPtrVectorTy()) {
518 PushPtrOperand(
Cmp->getOperand(0));
519 PushPtrOperand(
Cmp->getOperand(1));
521 }
else if (
auto *ASC = dyn_cast<AddrSpaceCastInst>(&
I)) {
522 PushPtrOperand(ASC->getPointerOperand());
523 }
else if (
auto *I2P = dyn_cast<IntToPtrInst>(&
I)) {
525 PushPtrOperand(cast<Operator>(I2P->getOperand(0))->getOperand(0));
526 }
else if (
auto *RI = dyn_cast<ReturnInst>(&
I)) {
527 if (
auto *RV = RI->getReturnValue();
528 RV && RV->getType()->isPtrOrPtrVectorTy())
533 std::vector<WeakTrackingVH> Postorder;
534 while (!PostorderStack.empty()) {
535 Value *TopVal = PostorderStack.back().getPointer();
538 if (PostorderStack.back().getInt()) {
540 Postorder.push_back(TopVal);
541 PostorderStack.pop_back();
545 PostorderStack.back().setInt(
true);
549 appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,
561 const Use &OperandUse,
unsigned NewAddrSpace,
563 const PredicatedAddrSpaceMapTy &PredicatedAS,
569 if (
Constant *
C = dyn_cast<Constant>(Operand))
572 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand))
576 auto I = PredicatedAS.find(std::make_pair(Inst, Operand));
577 if (
I != PredicatedAS.end()) {
579 unsigned NewAS =
I->second;
582 NewI->insertBefore(Inst);
603Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(
606 const PredicatedAddrSpaceMapTy &PredicatedAS,
610 if (
I->getOpcode() == Instruction::AddrSpaceCast) {
611 Value *Src =
I->getOperand(0);
615 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
616 if (Src->getType() != NewPtrType)
624 assert(
II->getIntrinsicID() == Intrinsic::ptrmask);
626 II->getArgOperandUse(0), NewAddrSpace, ValueWithNewAddrSpace,
627 PredicatedAS, PoisonUsesToFix);
631 assert(Rewrite !=
II &&
"cannot modify this pointer operation in place");
644 NewI->insertAfter(
I);
645 NewI->setDebugLoc(
I->getDebugLoc());
651 for (
const Use &OperandUse :
I->operands()) {
652 if (!OperandUse.get()->getType()->isPtrOrPtrVectorTy())
656 OperandUse, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS,
660 switch (
I->getOpcode()) {
661 case Instruction::BitCast:
662 return new BitCastInst(NewPointerOperands[0], NewPtrType);
663 case Instruction::PHI: {
664 assert(
I->getType()->isPtrOrPtrVectorTy());
674 case Instruction::GetElementPtr: {
677 GEP->getSourceElementType(), NewPointerOperands[0],
682 case Instruction::Select:
683 assert(
I->getType()->isPtrOrPtrVectorTy());
685 NewPointerOperands[2],
"",
nullptr,
I);
686 case Instruction::IntToPtr: {
688 Value *Src = cast<Operator>(
I->getOperand(0))->getOperand(0);
689 if (Src->getType() == NewPtrType)
710 CE->getType()->isPtrOrPtrVectorTy()
714 if (CE->getOpcode() == Instruction::AddrSpaceCast) {
718 assert(CE->getOperand(0)->getType()->getPointerAddressSpace() ==
723 if (CE->getOpcode() == Instruction::BitCast) {
724 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(CE->getOperand(0)))
729 if (CE->getOpcode() == Instruction::IntToPtr) {
731 Constant *Src = cast<ConstantExpr>(CE->getOperand(0))->getOperand(0);
732 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
746 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand)) {
748 NewOperands.
push_back(cast<Constant>(NewOperand));
751 if (
auto *CExpr = dyn_cast<ConstantExpr>(Operand))
753 CExpr, NewAddrSpace, ValueWithNewAddrSpace,
DL,
TTI)) {
755 NewOperands.
push_back(cast<Constant>(NewOperand));
767 if (CE->getOpcode() == Instruction::GetElementPtr) {
770 return CE->getWithOperands(NewOperands, TargetType,
false,
771 cast<GEPOperator>(CE)->getSourceElementType());
774 return CE->getWithOperands(NewOperands, TargetType);
782Value *InferAddressSpacesImpl::cloneValueWithNewAddressSpace(
783 Value *V,
unsigned NewAddrSpace,
785 const PredicatedAddrSpaceMapTy &PredicatedAS,
788 assert(
V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
792 Value *NewV = cloneInstructionWithNewAddressSpace(
793 I, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS, PoisonUsesToFix);
794 if (
Instruction *NewI = dyn_cast_or_null<Instruction>(NewV)) {
795 if (NewI->getParent() ==
nullptr) {
796 NewI->insertBefore(
I);
798 NewI->setDebugLoc(
I->getDebugLoc());
805 cast<ConstantExpr>(V), NewAddrSpace, ValueWithNewAddrSpace,
DL,
TTI);
810unsigned InferAddressSpacesImpl::joinAddressSpaces(
unsigned AS1,
811 unsigned AS2)
const {
812 if (AS1 == FlatAddrSpace || AS2 == FlatAddrSpace)
813 return FlatAddrSpace;
821 return (AS1 == AS2) ? AS1 : FlatAddrSpace;
824bool InferAddressSpacesImpl::run(
Function &
F) {
825 DL = &
F.getDataLayout();
837 std::vector<WeakTrackingVH> Postorder = collectFlatAddressExpressions(
F);
841 ValueToAddrSpaceMapTy InferredAddrSpace;
842 PredicatedAddrSpaceMapTy PredicatedAS;
843 inferAddressSpaces(Postorder, InferredAddrSpace, PredicatedAS);
847 return rewriteWithNewAddressSpaces(Postorder, InferredAddrSpace, PredicatedAS,
853void InferAddressSpacesImpl::inferAddressSpaces(
855 ValueToAddrSpaceMapTy &InferredAddrSpace,
856 PredicatedAddrSpaceMapTy &PredicatedAS)
const {
859 for (
Value *V : Postorder)
862 while (!Worklist.empty()) {
863 Value *
V = Worklist.pop_back_val();
867 if (!updateAddressSpace(*V, InferredAddrSpace, PredicatedAS))
872 if (Worklist.count(
User))
875 auto Pos = InferredAddrSpace.find(
User);
878 if (Pos == InferredAddrSpace.end())
884 if (Pos->second == FlatAddrSpace)
887 Worklist.insert(
User);
892unsigned InferAddressSpacesImpl::getPredicatedAddrSpace(
const Value &V,
899 for (
auto &AssumeVH : AC.assumptionsFor(Opnd)) {
902 CallInst *CI = cast<CallInst>(AssumeVH);
916bool InferAddressSpacesImpl::updateAddressSpace(
917 const Value &V, ValueToAddrSpaceMapTy &InferredAddrSpace,
918 PredicatedAddrSpaceMapTy &PredicatedAS)
const {
919 assert(InferredAddrSpace.count(&V));
921 LLVM_DEBUG(
dbgs() <<
"Updating the address space of\n " << V <<
'\n');
928 if (
Op.getOpcode() == Instruction::Select) {
929 Value *Src0 =
Op.getOperand(1);
930 Value *Src1 =
Op.getOperand(2);
932 auto I = InferredAddrSpace.find(Src0);
933 unsigned Src0AS = (
I != InferredAddrSpace.end())
937 auto J = InferredAddrSpace.find(Src1);
938 unsigned Src1AS = (J != InferredAddrSpace.end())
942 auto *C0 = dyn_cast<Constant>(Src0);
943 auto *C1 = dyn_cast<Constant>(Src1);
952 if (C0 && isSafeToCastConstAddrSpace(C0, Src1AS))
954 else if (C1 && isSafeToCastConstAddrSpace(C1, Src0AS))
957 NewAS = joinAddressSpaces(Src0AS, Src1AS);
966 auto I = InferredAddrSpace.find(PtrOperand);
968 if (
I == InferredAddrSpace.end()) {
969 OperandAS = PtrOperand->getType()->getPointerAddressSpace();
970 if (OperandAS == FlatAddrSpace) {
972 unsigned AS = getPredicatedAddrSpace(V, PtrOperand);
975 <<
" deduce operand AS from the predicate addrspace "
979 PredicatedAS[std::make_pair(&V, PtrOperand)] = OperandAS;
983 OperandAS =
I->second;
986 NewAS = joinAddressSpaces(NewAS, OperandAS);
987 if (NewAS == FlatAddrSpace)
993 unsigned OldAS = InferredAddrSpace.lookup(&V);
994 assert(OldAS != FlatAddrSpace);
1001 InferredAddrSpace[&
V] = NewAS;
1011 Use &U,
unsigned AddrSpace) {
1012 User *Inst = U.getUser();
1013 unsigned OpNo = U.getOperandNo();
1014 bool VolatileIsAllowed =
false;
1015 if (
auto *
I = dyn_cast<Instruction>(Inst))
1018 if (
auto *LI = dyn_cast<LoadInst>(Inst))
1020 (VolatileIsAllowed || !LI->isVolatile());
1022 if (
auto *SI = dyn_cast<StoreInst>(Inst))
1024 (VolatileIsAllowed || !SI->isVolatile());
1026 if (
auto *RMW = dyn_cast<AtomicRMWInst>(Inst))
1028 (VolatileIsAllowed || !RMW->isVolatile());
1030 if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst))
1032 (VolatileIsAllowed || !CmpX->isVolatile());
1043 MDNode *TBAA =
MI->getMetadata(LLVMContext::MD_tbaa);
1044 MDNode *ScopeMD =
MI->getMetadata(LLVMContext::MD_alias_scope);
1045 MDNode *NoAliasMD =
MI->getMetadata(LLVMContext::MD_noalias);
1047 if (
auto *MSI = dyn_cast<MemSetInst>(
MI)) {
1048 B.CreateMemSet(NewV, MSI->getValue(), MSI->getLength(), MSI->getDestAlign(),
1050 TBAA, ScopeMD, NoAliasMD);
1051 }
else if (
auto *MTI = dyn_cast<MemTransferInst>(
MI)) {
1052 Value *Src = MTI->getRawSource();
1053 Value *Dest = MTI->getRawDest();
1062 if (isa<MemCpyInlineInst>(MTI)) {
1063 MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct);
1064 B.CreateMemCpyInline(Dest, MTI->getDestAlign(), Src,
1065 MTI->getSourceAlign(), MTI->getLength(),
1067 TBAA, TBAAStruct, ScopeMD, NoAliasMD);
1068 }
else if (isa<MemCpyInst>(MTI)) {
1069 MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct);
1070 B.CreateMemCpy(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),
1073 TBAA, TBAAStruct, ScopeMD, NoAliasMD);
1075 assert(isa<MemMoveInst>(MTI));
1076 B.CreateMemMove(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),
1079 TBAA, ScopeMD, NoAliasMD);
1084 MI->eraseFromParent();
1090bool InferAddressSpacesImpl::isSafeToCastConstAddrSpace(
Constant *
C,
1091 unsigned NewAS)
const {
1094 unsigned SrcAS =
C->getType()->getPointerAddressSpace();
1095 if (SrcAS == NewAS || isa<UndefValue>(
C))
1099 if (SrcAS != FlatAddrSpace && NewAS != FlatAddrSpace)
1102 if (isa<ConstantPointerNull>(
C))
1105 if (
auto *
Op = dyn_cast<Operator>(
C)) {
1108 if (
Op->getOpcode() == Instruction::AddrSpaceCast)
1109 return isSafeToCastConstAddrSpace(cast<Constant>(
Op->getOperand(0)),
1112 if (
Op->getOpcode() == Instruction::IntToPtr &&
1113 Op->getType()->getPointerAddressSpace() == FlatAddrSpace)
1122 User *CurUser =
I->getUser();
1125 while (
I !=
End &&
I->getUser() == CurUser)
1131bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
1133 const ValueToAddrSpaceMapTy &InferredAddrSpace,
1134 const PredicatedAddrSpaceMapTy &PredicatedAS,
Function *
F)
const {
1141 for (
Value *V : Postorder) {
1142 unsigned NewAddrSpace = InferredAddrSpace.lookup(V);
1149 if (
V->getType()->getPointerAddressSpace() != NewAddrSpace) {
1151 cloneValueWithNewAddressSpace(V, NewAddrSpace, ValueWithNewAddrSpace,
1152 PredicatedAS, &PoisonUsesToFix);
1154 ValueWithNewAddrSpace[
V] =
New;
1158 if (ValueWithNewAddrSpace.
empty())
1162 for (
const Use *PoisonUse : PoisonUsesToFix) {
1163 User *
V = PoisonUse->getUser();
1164 User *NewV = cast_or_null<User>(ValueWithNewAddrSpace.
lookup(V));
1168 unsigned OperandNo = PoisonUse->getOperandNo();
1170 NewV->
setOperand(OperandNo, ValueWithNewAddrSpace.
lookup(PoisonUse->get()));
1179 assert(WVH &&
"value was unexpectedly deleted");
1182 if (NewV ==
nullptr)
1185 LLVM_DEBUG(
dbgs() <<
"Replacing the uses of " << *V <<
"\n with\n "
1188 if (
Constant *
C = dyn_cast<Constant>(V)) {
1192 LLVM_DEBUG(
dbgs() <<
"Inserting replacement const cast: " << Replace
1193 <<
": " << *Replace <<
'\n');
1196 if (
auto *
I = dyn_cast<Instruction>(U)) {
1197 if (
I->getFunction() ==
F)
1198 I->replaceUsesOfWith(
C, Replace);
1200 WorkList.
append(
U->user_begin(),
U->user_end());
1203 if (!WorkList.
empty()) {
1206 while (!WorkList.
empty()) {
1208 if (
auto *
I = dyn_cast<Instruction>(U)) {
1209 if (
I->getFunction() ==
F)
1210 VMapper.remapInstruction(*
I);
1213 for (
User *U2 :
U->users())
1214 if (Visited.
insert(U2).second)
1223 for (
I =
V->use_begin(), E =
V->use_end();
I != E;) {
1225 User *CurUser =
U.getUser();
1232 *
TTI, U,
V->getType()->getPointerAddressSpace())) {
1241 if (CurUser == NewV)
1244 if (
auto *CurUserI = dyn_cast<Instruction>(CurUser);
1245 CurUserI && CurUserI->getFunction() !=
F)
1249 if (
auto *
MI = dyn_cast<MemIntrinsic>(CurUser)) {
1254 if (
auto *
II = dyn_cast<IntrinsicInst>(CurUser)) {
1255 if (rewriteIntrinsicOperands(
II, V, NewV))
1259 if (isa<Instruction>(CurUser)) {
1260 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(CurUser)) {
1268 int SrcIdx =
U.getOperandNo();
1269 int OtherIdx = (SrcIdx == 0) ? 1 : 0;
1270 Value *OtherSrc =
Cmp->getOperand(OtherIdx);
1272 if (
Value *OtherNewV = ValueWithNewAddrSpace.
lookup(OtherSrc)) {
1273 if (OtherNewV->getType()->getPointerAddressSpace() == NewAS) {
1274 Cmp->setOperand(OtherIdx, OtherNewV);
1275 Cmp->setOperand(SrcIdx, NewV);
1281 if (
auto *KOtherSrc = dyn_cast<Constant>(OtherSrc)) {
1282 if (isSafeToCastConstAddrSpace(KOtherSrc, NewAS)) {
1283 Cmp->setOperand(SrcIdx, NewV);
1293 if (ASC->getDestAddressSpace() == NewAS) {
1294 ASC->replaceAllUsesWith(NewV);
1301 if (
Instruction *VInst = dyn_cast<Instruction>(V)) {
1303 if (U == V && isa<AddrSpaceCastInst>(V))
1308 if (
Instruction *NewVInst = dyn_cast<Instruction>(NewV))
1309 InsertPos = std::next(NewVInst->getIterator());
1311 InsertPos = std::next(VInst->getIterator());
1313 while (isa<PHINode>(InsertPos))
1326 if (
V->use_empty()) {
1338bool InferAddressSpaces::runOnFunction(
Function &
F) {
1339 if (skipFunction(
F))
1342 auto *DTWP = getAnalysisIfAvailable<DominatorTreeWrapperPass>();
1344 return InferAddressSpacesImpl(
1345 getAnalysis<AssumptionCacheTracker>().getAssumptionCache(
F), DT,
1346 &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(
F),
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file defines the DenseSet and SmallDenseSet classes.
This defines the Use class.
static cl::opt< bool > AssumeDefaultIsFlatAddressSpace("assume-default-is-flat-addrspace", cl::init(false), cl::ReallyHidden, cl::desc("The default address space is assumed as the flat address space. " "This is mainly for test purpose."))
static bool isNoopPtrIntCastPair(const Operator *I2P, const DataLayout &DL, const TargetTransformInfo *TTI)
static bool isAddressExpression(const Value &V, const DataLayout &DL, const TargetTransformInfo *TTI)
static bool handleMemIntrinsicPtrUse(MemIntrinsic *MI, Value *OldV, Value *NewV)
Update memory intrinsic uses that require more complex processing than simple memory instructions.
static SmallVector< Value *, 2 > getPointerOperands(const Value &V, const DataLayout &DL, const TargetTransformInfo *TTI)
static Value * operandWithNewAddressSpaceOrCreatePoison(const Use &OperandUse, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, const PredicatedAddrSpaceMapTy &PredicatedAS, SmallVectorImpl< const Use * > *PoisonUsesToFix)
static Value * cloneConstantExprWithNewAddressSpace(ConstantExpr *CE, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, const DataLayout *DL, const TargetTransformInfo *TTI)
static bool isSimplePointerUseValidToReplace(const TargetTransformInfo &TTI, Use &U, unsigned AddrSpace)
returns true if U is the pointer operand of a memory instruction with a single pointer operand that c...
static Value::use_iterator skipToNextUser(Value::use_iterator I, Value::use_iterator End)
Infer address static false Type * getPtrOrVecOfPtrsWithNewAS(Type *Ty, unsigned NewAddrSpace)
static const unsigned UninitializedAddressSpace
uint64_t IntrinsicInst * II
This header defines various interfaces for pass management in LLVM.
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallVector class.
This class represents a conversion between pointers from one address space to another.
A container for analyses that lazily runs them and caches their results.
PassT::Result * getCachedResult(IRUnitT &IR) const
Get the cached result of an analysis pass for a given IR unit.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
An immutable pass that tracks lazily created AssumptionCache objects.
A cache of @llvm.assume calls within a function.
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
InstListType::iterator iterator
Instruction iterators...
This class represents a no-op cast from one type to another.
Represents analyses that only rely on functions' control flow.
Value * getArgOperand(unsigned i) const
This class represents a function call, abstracting a target machine's calling convention.
static CastInst * CreatePointerBitCastOrAddrSpaceCast(Value *S, Type *Ty, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Create a BitCast or an AddrSpaceCast cast instruction.
static bool isNoopCast(Instruction::CastOps Opcode, Type *SrcTy, Type *DstTy, const DataLayout &DL)
A no-op cast is one that can be effected without changing any bits.
A constant value that is initialized with an expression using other constant values.
static Constant * getAddrSpaceCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getBitCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
This is an important base class in LLVM.
This class represents an Operation in the Expression.
uint64_t getNumOperands() const
A parsed version of the target data layout string in and methods for querying it.
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Legacy analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
FunctionPass class - This class is used to implement most global optimizations.
virtual bool runOnFunction(Function &F)=0
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
void setIsInBounds(bool b=true)
Set or clear the inbounds flag on this GEP instruction.
This instruction compares its operands according to the predicate given to the constructor.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
A wrapper class for inspecting calls to intrinsic functions.
static unsigned getPointerOperandIndex()
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
This is a utility class that provides an abstraction for the common functionality between Instruction...
unsigned getOpcode() const
Return the opcode for this Instruction or ConstantExpr.
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
static unsigned getOperandNumForIncomingValue(unsigned i)
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void preserveSet()
Mark an analysis set as preserved.
void preserve()
Mark an analysis as preserved.
static SelectInst * Create(Value *C, Value *S1, Value *S2, const Twine &NameStr="", InsertPosition InsertBefore=nullptr, Instruction *MDFrom=nullptr)
A vector that has set insertion semantics.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
static unsigned getPointerOperandIndex()
Analysis pass providing the TargetTransformInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
Context for (re-)mapping values (and metadata).
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
const Value * stripInBoundsOffsets(function_ref< void(const Value *)> Func=[](const Value *) {}) const
Strip off pointer casts and inbounds GEPs.
use_iterator_impl< Use > use_iterator
Value handle that is nullable, but tries to track the Value.
std::pair< iterator, bool > insert(const ValueT &V)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=std::nullopt)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
This is an optimization pass for GlobalISel generic memory operations.
bool isValidAssumeForContext(const Instruction *I, const Instruction *CxtI, const DominatorTree *DT=nullptr, bool AllowEphemerals=false)
Return true if it is valid to use the assumptions provided by an assume intrinsic,...
bool RecursivelyDeleteTriviallyDeadInstructions(Value *V, const TargetLibraryInfo *TLI=nullptr, MemorySSAUpdater *MSSAU=nullptr, std::function< void(Value *)> AboutToDeleteCallback=std::function< void(Value *)>())
If the specified value is a trivially dead instruction, delete it.
void initializeInferAddressSpacesPass(PassRegistry &)
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
@ RF_IgnoreMissingLocals
If this flag is set, the remapper ignores missing function-local entries (Argument,...
@ RF_NoModuleLevelChanges
If this flag is set, the remapper knows that only local values within a function (such as an instruct...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
FunctionPass * createInferAddressSpacesPass(unsigned AddressSpace=~0u)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)