24#include "llvm/IR/IntrinsicsSPIRV.h"
31#include <unordered_set>
54#define GET_BuiltinGroup_DECL
55#include "SPIRVGenTables.inc"
60class SPIRVEmitIntrinsics
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM =
nullptr;
64 SPIRVGlobalRegistry *GR =
nullptr;
66 bool TrackConstants =
true;
67 bool HaveFunPtrs =
false;
68 DenseMap<Instruction *, Constant *> AggrConsts;
69 DenseMap<Instruction *, Type *> AggrConstTypes;
70 DenseSet<Instruction *> AggrStores;
71 std::unordered_set<Value *> Named;
74 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
77 bool CanTodoType =
true;
78 unsigned TodoTypeSz = 0;
79 DenseMap<Value *, bool> TodoType;
80 void insertTodoType(
Value *
Op) {
83 auto It = TodoType.try_emplace(
Op,
true);
89 auto It = TodoType.find(
Op);
90 if (It != TodoType.end() && It->second) {
98 auto It = TodoType.find(
Op);
99 return It != TodoType.end() && It->second;
103 std::unordered_set<Instruction *> TypeValidated;
106 enum WellKnownTypes { Event };
109 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
111 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
112 bool UnknownElemTypeI8,
113 bool IgnoreKnownType =
false);
114 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
115 bool UnknownElemTypeI8);
116 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
117 std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8);
120 std::unordered_set<Value *> &Visited,
121 bool UnknownElemTypeI8);
123 bool UnknownElemTypeI8);
126 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
128 std::unordered_set<Value *> &Visited,
129 bool UnknownElemTypeI8);
133 SmallPtrSet<Instruction *, 4> *IncompleteRets,
134 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
135 bool IsPostprocessing =
false);
140 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
141 bool IsPostprocessing);
146 bool UnknownElemTypeI8);
148 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
151 Type *ExpectedElementType,
152 unsigned OperandToReplace,
155 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
157 void insertConstantsForFPFastMathDefault(
Module &
M);
158 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
163 std::unordered_set<Function *> &FVisited);
165 bool deduceOperandElementTypeCalledFunction(
167 Type *&KnownElemTy,
bool &Incomplete);
168 void deduceOperandElementTypeFunctionPointer(
170 Type *&KnownElemTy,
bool IsPostprocessing);
171 bool deduceOperandElementTypeFunctionRet(
172 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
173 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
178 DenseMap<Function *, CallInst *> Ptrcasts);
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
183 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
184 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
185 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
186 std::unordered_set<Value *> &Visited,
187 DenseMap<Function *, CallInst *> Ptrcasts);
195 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
198 bool postprocessTypes(
Module &
M);
199 bool processFunctionPointers(
Module &
M);
200 void parseFunDeclarations(
Module &
M);
202 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
218 bool walkLogicalAccessChain(
219 GetElementPtrInst &
GEP,
220 const std::function<
void(
Type *PointedType, uint64_t
Index)>
229 Type *getGEPType(GetElementPtrInst *
GEP);
236 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
238 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
242 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
243 : ModulePass(ID), TM(TM) {}
246 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
248 Instruction *visitInsertElementInst(InsertElementInst &
I);
249 Instruction *visitExtractElementInst(ExtractElementInst &
I);
251 Instruction *visitExtractValueInst(ExtractValueInst &
I);
255 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
259 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
261 bool runOnModule(
Module &
M)
override;
263 void getAnalysisUsage(AnalysisUsage &AU)
const override {
264 ModulePass::getAnalysisUsage(AU);
273 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
275 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
278bool expectIgnoredInIRTranslation(
const Instruction *
I) {
282 switch (
II->getIntrinsicID()) {
283 case Intrinsic::invariant_start:
284 case Intrinsic::spv_resource_handlefrombinding:
285 case Intrinsic::spv_resource_getpointer:
295 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
296 Value *V =
II->getArgOperand(0);
297 return getPointerRoot(V);
305char SPIRVEmitIntrinsics::ID = 0;
328 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
334 B.SetCurrentDebugLocation(
I->getDebugLoc());
335 if (
I->getType()->isVoidTy())
336 B.SetInsertPoint(
I->getNextNode());
338 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
343 switch (Intr->getIntrinsicID()) {
344 case Intrinsic::invariant_start:
345 case Intrinsic::invariant_end:
353 if (
I->getType()->isTokenTy())
355 "does not support token type",
360 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
361 expectIgnoredInIRTranslation(
I))
369 if (Name.starts_with(
"spv.mutated_callsite"))
371 if (Name.starts_with(
"spv.named_mutated_callsite"))
372 I->setName(Name.substr(Name.rfind(
'.') + 1));
377 std::vector<Value *> Args = {
380 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
383void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
387 if (isTodoType(Src)) {
390 insertTodoType(Dest);
394void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
399 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
400 Src->eraseFromParent();
403 if (Named.insert(Dest).second)
428Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
429 bool IsPostprocessing) {
444 if (UnknownElemTypeI8) {
445 if (!IsPostprocessing)
453CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
461 B.SetInsertPointPastAllocas(OpA->getParent());
464 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
466 Type *OpTy =
Op->getType();
470 CallInst *PtrCasted =
471 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
476void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
478 DenseMap<Function *, CallInst *> Ptrcasts) {
480 CallInst *PtrCastedI =
nullptr;
481 auto It = Ptrcasts.
find(
F);
482 if (It == Ptrcasts.
end()) {
483 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
484 Ptrcasts[
F] = PtrCastedI;
486 PtrCastedI = It->second;
488 I->replaceUsesOfWith(
Op, PtrCastedI);
491void SPIRVEmitIntrinsics::propagateElemType(
493 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
494 DenseMap<Function *, CallInst *> Ptrcasts;
496 for (
auto *U :
Users) {
499 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
505 TypeValidated.find(UI) != TypeValidated.end())
506 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
510void SPIRVEmitIntrinsics::propagateElemTypeRec(
512 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
513 std::unordered_set<Value *> Visited;
514 DenseMap<Function *, CallInst *> Ptrcasts;
515 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
516 std::move(Ptrcasts));
519void SPIRVEmitIntrinsics::propagateElemTypeRec(
521 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
522 std::unordered_set<Value *> &Visited,
523 DenseMap<Function *, CallInst *> Ptrcasts) {
524 if (!Visited.insert(
Op).second)
527 for (
auto *U :
Users) {
530 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
536 TypeValidated.find(UI) != TypeValidated.end())
537 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
545SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
546 bool UnknownElemTypeI8) {
547 std::unordered_set<Value *> Visited;
548 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
552Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
553 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
554 bool UnknownElemTypeI8) {
559 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
570Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
571 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
583 for (User *OpU :
Op->users()) {
585 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
598 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
607Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
608 bool UnknownElemTypeI8) {
609 std::unordered_set<Value *> Visited;
610 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
613void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
614 bool UnknownElemTypeI8) {
616 if (!UnknownElemTypeI8)
623bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
624 GetElementPtrInst &
GEP,
625 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
626 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
634 Value *Src = getPointerRoot(
GEP.getPointerOperand());
635 Type *CurType = deduceElementType(Src,
true);
644 OnDynamicIndexing(AT->getElementType(), Operand);
645 return AT ==
nullptr;
653 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
657 CurType = AT->getElementType();
658 OnLiteralIndexing(CurType, Index);
660 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
663 const auto &STL =
DL.getStructLayout(ST);
664 unsigned Element = STL->getElementContainingOffset(
Offset);
665 Offset -= STL->getElementOffset(Element);
666 CurType =
ST->getElementType(Element);
667 OnLiteralIndexing(CurType, Element);
679SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
682 B.SetInsertPoint(&
GEP);
684 std::vector<Value *> Indices;
685 Indices.push_back(ConstantInt::get(
686 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
687 walkLogicalAccessChain(
689 [&Indices, &
B](
Type *EltType, uint64_t Index) {
691 ConstantInt::get(
B.getInt64Ty(), Index,
false));
694 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
696 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
698 Indices.push_back(Index);
703 Args.push_back(
B.getInt1(
GEP.isInBounds()));
704 Args.push_back(
GEP.getOperand(0));
706 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
707 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
711Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
713 Type *CurType =
GEP->getResultElementType();
715 bool Interrupted = walkLogicalAccessChain(
716 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
719 return Interrupted ?
GEP->getResultElementType() : CurType;
722Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
723 if (
Ref->getSourceElementType() ==
724 IntegerType::getInt8Ty(CurrF->
getContext()) &&
726 return getGEPTypeLogical(
Ref);
733 Ty =
Ref->getSourceElementType();
737 Ty =
Ref->getResultElementType();
742Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
743 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
744 bool IgnoreKnownType) {
750 if (!IgnoreKnownType)
755 if (!Visited.insert(
I).second)
762 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
764 Ty = getGEPType(
Ref);
769 KnownTy =
Op->getType();
771 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
774 Ty = SPIRV::getOriginalFunctionType(*Fn);
777 Ty = deduceElementTypeByValueDeep(
779 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
783 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
785 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
787 maybeAssignPtrType(Ty,
I,
Ref->getDestTy(), UnknownElemTypeI8);
789 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
791 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
796 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
800 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
802 Type *BestTy =
nullptr;
804 DenseMap<Type *, unsigned> PhiTys;
805 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
806 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
813 if (It.first->second > MaxN) {
814 MaxN = It.first->second;
822 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
823 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
828 static StringMap<unsigned> ResTypeByArg = {
832 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
833 {
"__spirv_GenericCastToPtr_ToLocal", 0},
834 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
835 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
836 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
837 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
841 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
843 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
844 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
845 for (User *U :
II->users()) {
850 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
852 Ty = HandleType->getTypeParameter(0);
864 }
else if (
II &&
II->getIntrinsicID() ==
865 Intrinsic::spv_generic_cast_to_ptr_explicit) {
866 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
868 }
else if (Function *CalledF = CI->getCalledFunction()) {
869 std::string DemangledName =
871 if (DemangledName.length() > 0)
872 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
873 auto AsArgIt = ResTypeByArg.
find(DemangledName);
874 if (AsArgIt != ResTypeByArg.
end())
875 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
876 Visited, UnknownElemTypeI8);
883 if (Ty && !IgnoreKnownType) {
894Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
895 bool UnknownElemTypeI8) {
896 std::unordered_set<Value *> Visited;
897 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
900Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
901 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
902 bool UnknownElemTypeI8) {
911 if (!Visited.insert(U).second)
917 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
919 assert(
Op &&
"Operands should not be null.");
920 Type *OpTy =
Op->getType();
924 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
931 Change |= Ty != OpTy;
939 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
940 Type *OpTy = ArrTy->getElementType();
944 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
951 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
957 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
958 Type *OpTy = VecTy->getElementType();
962 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
969 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
979Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
980 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
982 if (!UnknownElemTypeI8)
985 return IntegerType::getInt8Ty(
I->getContext());
989 Value *PointerOperand) {
1003bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
1005 Type *&KnownElemTy,
bool &Incomplete) {
1009 std::string DemangledName =
1011 if (DemangledName.length() > 0 &&
1013 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
1014 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
1015 DemangledName,
ST.getPreferredInstructionSet());
1016 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1017 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1023 KnownElemTy = ElemTy;
1024 Ops.push_back(std::make_pair(
Op, i));
1026 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1033 case SPIRV::OpAtomicFAddEXT:
1034 case SPIRV::OpAtomicFMinEXT:
1035 case SPIRV::OpAtomicFMaxEXT:
1036 case SPIRV::OpAtomicLoad:
1037 case SPIRV::OpAtomicCompareExchangeWeak:
1038 case SPIRV::OpAtomicCompareExchange:
1039 case SPIRV::OpAtomicExchange:
1040 case SPIRV::OpAtomicIAdd:
1041 case SPIRV::OpAtomicISub:
1042 case SPIRV::OpAtomicOr:
1043 case SPIRV::OpAtomicXor:
1044 case SPIRV::OpAtomicAnd:
1045 case SPIRV::OpAtomicUMin:
1046 case SPIRV::OpAtomicUMax:
1047 case SPIRV::OpAtomicSMin:
1048 case SPIRV::OpAtomicSMax: {
1053 Incomplete = isTodoType(
Op);
1054 Ops.push_back(std::make_pair(
Op, 0));
1056 case SPIRV::OpAtomicStore: {
1065 Incomplete = isTodoType(
Op);
1066 Ops.push_back(std::make_pair(
Op, 0));
1075void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1077 Type *&KnownElemTy,
bool IsPostprocessing) {
1081 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1082 FunctionType *FTy = SPIRV::getOriginalFunctionType(*CI);
1083 bool IsNewFTy =
false, IsIncomplete =
false;
1086 Type *ArgTy = Arg->getType();
1091 if (isTodoType(Arg))
1092 IsIncomplete =
true;
1094 IsIncomplete =
true;
1097 ArgTy = FTy->getFunctionParamType(ParmIdx);
1101 Type *RetTy = FTy->getReturnType();
1108 IsIncomplete =
true;
1110 IsIncomplete =
true;
1113 if (!IsPostprocessing && IsIncomplete)
1116 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1119bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1120 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1121 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1133 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1134 for (User *U :
F->users()) {
1142 propagateElemType(CI, PrevElemTy, VisitedSubst);
1152 for (Instruction *IncompleteRetI : *IncompleteRets)
1153 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1155 }
else if (IncompleteRets) {
1158 TypeValidated.insert(
I);
1166void SPIRVEmitIntrinsics::deduceOperandElementType(
1167 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1168 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1170 Type *KnownElemTy =
nullptr;
1171 bool Incomplete =
false;
1177 Incomplete = isTodoType(
I);
1178 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1181 Ops.push_back(std::make_pair(
Op, i));
1187 Incomplete = isTodoType(
I);
1188 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1195 Incomplete = isTodoType(
I);
1196 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1200 KnownElemTy =
Ref->getSourceElementType();
1201 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1204 KnownElemTy =
I->getType();
1210 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1214 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1219 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1227 Incomplete = isTodoType(
Ref->getPointerOperand());
1228 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1236 Incomplete = isTodoType(
Ref->getPointerOperand());
1237 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1243 Incomplete = isTodoType(
I);
1244 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1247 Ops.push_back(std::make_pair(
Op, i));
1255 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1256 IsPostprocessing, KnownElemTy,
Op,
1259 Incomplete = isTodoType(CurrF);
1260 Ops.push_back(std::make_pair(
Op, 0));
1266 bool Incomplete0 = isTodoType(Op0);
1267 bool Incomplete1 = isTodoType(Op1);
1269 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1271 : GR->findDeducedElementType(Op0);
1273 KnownElemTy = ElemTy0;
1274 Incomplete = Incomplete0;
1275 Ops.push_back(std::make_pair(Op1, 1));
1276 }
else if (ElemTy1) {
1277 KnownElemTy = ElemTy1;
1278 Incomplete = Incomplete1;
1279 Ops.push_back(std::make_pair(Op0, 0));
1283 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1284 else if (HaveFunPtrs)
1285 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1290 if (!KnownElemTy ||
Ops.size() == 0)
1295 for (
auto &OpIt :
Ops) {
1299 Type *AskTy =
nullptr;
1300 CallInst *AskCI =
nullptr;
1301 if (IsPostprocessing && AskOps) {
1307 if (Ty == KnownElemTy)
1310 Type *OpTy =
Op->getType();
1311 if (
Op->hasUseList() &&
1318 else if (!IsPostprocessing)
1322 if (AssignCI ==
nullptr) {
1331 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1332 std::make_pair(
I,
Op)};
1333 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1337 CallInst *PtrCastI =
1338 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1339 if (OpIt.second == std::numeric_limits<unsigned>::max())
1342 I->setOperand(OpIt.second, PtrCastI);
1345 TypeValidated.insert(
I);
1348void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1353 if (isAssignTypeInstr(U)) {
1354 B.SetInsertPoint(U);
1355 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1356 CallInst *AssignCI =
1357 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1359 U->eraseFromParent();
1362 U->replaceUsesOfWith(Old, New);
1367 New->copyMetadata(*Old);
1371void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1372 std::queue<Instruction *> Worklist;
1376 while (!Worklist.empty()) {
1378 bool BPrepared =
false;
1381 for (
auto &
Op :
I->operands()) {
1383 if (!AggrUndef || !
Op->getType()->isAggregateType())
1390 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1391 Worklist.push(IntrUndef);
1392 I->replaceUsesOfWith(
Op, IntrUndef);
1393 AggrConsts[IntrUndef] = AggrUndef;
1394 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1399void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1400 std::queue<Instruction *> Worklist;
1404 while (!Worklist.empty()) {
1405 auto *
I = Worklist.front();
1408 bool KeepInst =
false;
1409 for (
const auto &
Op :
I->operands()) {
1411 Type *ResTy =
nullptr;
1414 ResTy = COp->getType();
1426 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1431 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1432 Args.push_back(COp->getElementAsConstant(i));
1436 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1437 :
B.SetInsertPoint(
I);
1441 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1445 AggrConsts[CI] = AggrConst;
1446 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1458 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1463 unsigned RoundingModeDeco,
1470 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1479 MDNode *SaturatedConversionNode =
1481 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1488 if (Fu->isIntrinsic()) {
1489 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1490 switch (IntrinsicId) {
1491 case Intrinsic::fptosi_sat:
1492 case Intrinsic::fptoui_sat:
1511 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1519 B.SetInsertPoint(&
Call);
1520 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1525void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1528 if (!
RM.has_value())
1530 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1531 switch (
RM.value()) {
1535 case RoundingMode::NearestTiesToEven:
1536 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1538 case RoundingMode::TowardNegative:
1539 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1541 case RoundingMode::TowardPositive:
1542 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1544 case RoundingMode::TowardZero:
1545 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1547 case RoundingMode::Dynamic:
1548 case RoundingMode::NearestTiesToAway:
1552 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1558Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1561 B.SetInsertPoint(&
I);
1564 for (
auto &
Op :
I.operands()) {
1565 if (
Op.get()->getType()->isSized()) {
1574 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1575 {
I.getOperand(0)->getType()}, {
Args});
1579 I.eraseFromParent();
1582 B.SetInsertPoint(ParentBB);
1583 IndirectBrInst *BrI =
B.CreateIndirectBr(
1586 for (BasicBlock *BBCase : BBCases)
1592 if (
GEP->getNumIndices() == 0)
1595 return CI->getZExtValue() == 0;
1600Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1602 B.SetInsertPoint(&
I);
1610 if (
I.getSourceElementType() ==
1611 IntegerType::getInt8Ty(CurrF->
getContext())) {
1612 return buildLogicalAccessChainFromGEP(
I);
1617 Value *PtrOp =
I.getPointerOperand();
1618 Type *SrcElemTy =
I.getSourceElementType();
1619 Type *DeducedPointeeTy = deduceElementType(PtrOp,
true);
1622 if (ArrTy->getElementType() == SrcElemTy) {
1624 Type *FirstIdxType =
I.getOperand(1)->getType();
1625 NewIndices.
push_back(ConstantInt::get(FirstIdxType, 0));
1626 for (
Value *Idx :
I.indices())
1631 Args.push_back(
B.getInt1(
I.isInBounds()));
1632 Args.push_back(
I.getPointerOperand());
1635 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1636 replaceAllUsesWithAndErase(
B, &
I, NewI);
1644 Args.push_back(
B.getInt1(
I.isInBounds()));
1646 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1647 replaceAllUsesWithAndErase(
B, &
I, NewI);
1651Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1653 B.SetInsertPoint(&
I);
1662 I.eraseFromParent();
1668 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1669 replaceAllUsesWithAndErase(
B, &
I, NewI);
1673void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1675 Type *VTy =
V->getType();
1680 if (ElemTy != AssignedType)
1693 if (CurrentType == AssignedType)
1700 " for value " +
V->getName(),
1708void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1709 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1711 TypeValidated.insert(
I);
1714 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1715 if (PointerElemTy == ExpectedElementType ||
1721 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1723 bool FirstPtrCastOrAssignPtrType =
true;
1729 for (
auto User :
Pointer->users()) {
1732 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1733 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1734 II->getOperand(0) != Pointer)
1739 FirstPtrCastOrAssignPtrType =
false;
1740 if (
II->getOperand(1) != VMD ||
1747 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1752 if (
II->getParent() !=
I->getParent())
1755 I->setOperand(OperandToReplace,
II);
1761 if (FirstPtrCastOrAssignPtrType) {
1766 }
else if (isTodoType(Pointer)) {
1767 eraseTodoType(Pointer);
1774 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1775 std::make_pair(
I, Pointer)};
1777 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1789 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1795void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1800 replacePointerOperandWithPtrCast(
1801 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1807 Type *OpTy =
Op->getType();
1810 if (OpTy ==
Op->getType())
1811 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1812 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1817 Type *OpTy = LI->getType();
1822 Type *NewOpTy = OpTy;
1823 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1824 if (OpTy == NewOpTy)
1825 insertTodoType(Pointer);
1828 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1833 Type *OpTy =
nullptr;
1845 OpTy = GEPI->getSourceElementType();
1847 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1849 insertTodoType(Pointer);
1861 std::string DemangledName =
1865 bool HaveTypes =
false;
1883 for (User *U : CalledArg->
users()) {
1885 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1891 HaveTypes |= ElemTy !=
nullptr;
1896 if (DemangledName.empty() && !HaveTypes)
1914 Type *ExpectedType =
1916 if (!ExpectedType && !DemangledName.empty())
1917 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1918 DemangledName,
OpIdx,
I->getContext());
1919 if (!ExpectedType || ExpectedType->
isVoidTy())
1927 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1931Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1938 I.getOperand(1)->getType(),
1939 I.getOperand(2)->getType()};
1941 B.SetInsertPoint(&
I);
1943 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1944 replaceAllUsesWithAndErase(
B, &
I, NewI);
1949SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1956 B.SetInsertPoint(&
I);
1958 I.getIndexOperand()->getType()};
1959 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1960 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1961 replaceAllUsesWithAndErase(
B, &
I, NewI);
1965Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1967 B.SetInsertPoint(&
I);
1970 Value *AggregateOp =
I.getAggregateOperand();
1974 Args.push_back(AggregateOp);
1975 Args.push_back(
I.getInsertedValueOperand());
1976 for (
auto &
Op :
I.indices())
1977 Args.push_back(
B.getInt32(
Op));
1979 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1980 replaceMemInstrUses(&
I, NewI,
B);
1984Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1985 if (
I.getAggregateOperand()->getType()->isAggregateType())
1988 B.SetInsertPoint(&
I);
1990 for (
auto &
Op :
I.indices())
1991 Args.push_back(
B.getInt32(
Op));
1993 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1994 replaceAllUsesWithAndErase(
B, &
I, NewI);
1998Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1999 if (!
I.getType()->isAggregateType())
2002 B.SetInsertPoint(&
I);
2003 TrackConstants =
false;
2008 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
2009 {
I.getPointerOperand(),
B.getInt16(Flags),
2010 B.getInt8(
I.getAlign().value())});
2011 replaceMemInstrUses(&
I, NewI,
B);
2015Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
2019 B.SetInsertPoint(&
I);
2020 TrackConstants =
false;
2024 auto *PtrOp =
I.getPointerOperand();
2025 auto *NewI =
B.CreateIntrinsic(
2026 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->
getType()},
2027 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
2028 B.getInt8(
I.getAlign().value())});
2030 I.eraseFromParent();
2034Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
2035 Value *ArraySize =
nullptr;
2036 if (
I.isArrayAllocation()) {
2039 SPIRV::Extension::SPV_INTEL_variable_length_array))
2041 "array allocation: this instruction requires the following "
2042 "SPIR-V extension: SPV_INTEL_variable_length_array",
2044 ArraySize =
I.getArraySize();
2047 B.SetInsertPoint(&
I);
2048 TrackConstants =
false;
2049 Type *PtrTy =
I.getType();
2052 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
2053 {PtrTy, ArraySize->
getType()},
2054 {ArraySize,
B.getInt8(
I.getAlign().value())})
2055 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
2056 {
B.getInt8(
I.getAlign().value())});
2057 replaceAllUsesWithAndErase(
B, &
I, NewI);
2061Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2062 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2064 B.SetInsertPoint(&
I);
2066 Args.push_back(
B.getInt32(
2067 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2068 Args.push_back(
B.getInt32(
2070 Args.push_back(
B.getInt32(
2072 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2073 {
I.getPointerOperand()->getType()}, {
Args});
2074 replaceMemInstrUses(&
I, NewI,
B);
2078Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2080 B.SetInsertPoint(&
I);
2081 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2085void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2088 static const StringSet<> ArtificialGlobals{
"llvm.global.annotations",
2089 "llvm.compiler.used"};
2099 deduceElementTypeHelper(&GV,
false);
2103 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2105 InitInst->setArgOperand(1, Init);
2108 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2114bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2116 bool UnknownElemTypeI8) {
2122 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2129void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2132 static StringMap<unsigned> ResTypeWellKnown = {
2133 {
"async_work_group_copy", WellKnownTypes::Event},
2134 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2135 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2139 bool IsKnown =
false;
2144 std::string DemangledName =
2147 if (DemangledName.length() > 0)
2149 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2150 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2151 if (ResIt != ResTypeWellKnown.
end()) {
2154 switch (ResIt->second) {
2155 case WellKnownTypes::Event:
2162 switch (DecorationId) {
2165 case FPDecorationId::SAT:
2168 case FPDecorationId::RTE:
2170 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2172 case FPDecorationId::RTZ:
2174 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2176 case FPDecorationId::RTP:
2178 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2180 case FPDecorationId::RTN:
2182 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2188 Type *Ty =
I->getType();
2191 Type *TypeToAssign = Ty;
2193 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2194 II->getIntrinsicID() == Intrinsic::spv_undef) {
2195 auto It = AggrConstTypes.find(
II);
2196 if (It == AggrConstTypes.end())
2198 TypeToAssign = It->second;
2204 for (
const auto &
Op :
I->operands()) {
2211 Type *OpTy =
Op->getType();
2213 CallInst *AssignCI =
2218 Type *OpTy =
Op->getType();
2233 CallInst *AssignCI =
2243bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2244 Instruction *Inst) {
2246 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2257 case Intrinsic::spv_load:
2258 case Intrinsic::spv_store:
2265 const std::string
Prefix =
"__spirv_Atomic";
2266 const bool IsAtomic =
Name.find(Prefix) == 0;
2274void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2276 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2278 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2283 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2284 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2285 if (shouldTryToAddMemAliasingDecoration(
I)) {
2286 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2287 ? SPIRV::Decoration::AliasScopeINTEL
2288 : SPIRV::Decoration::NoAliasINTEL;
2290 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2293 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2294 {
I->getType()}, {
Args});
2298 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2299 processMemAliasingDecoration(LLVMContext::MD_noalias);
2302 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2304 bool AllowFPMaxError =
2306 if (!AllowFPMaxError)
2310 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2319 &FPFastMathDefaultInfoMap,
2321 auto it = FPFastMathDefaultInfoMap.
find(
F);
2322 if (it != FPFastMathDefaultInfoMap.
end())
2330 SPIRV::FPFastMathMode::None);
2332 SPIRV::FPFastMathMode::None);
2334 SPIRV::FPFastMathMode::None);
2335 return FPFastMathDefaultInfoMap[
F] = std::move(FPFastMathDefaultInfoVec);
2341 size_t BitWidth = Ty->getScalarSizeInBits();
2345 assert(Index >= 0 && Index < 3 &&
2346 "Expected FPFastMathDefaultInfo for half, float, or double");
2347 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2348 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2349 return FPFastMathDefaultInfoVec[Index];
2352void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(
Module &M) {
2354 if (!
ST->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2363 auto Node =
M.getNamedMetadata(
"spirv.ExecutionMode");
2365 if (!
M.getNamedMetadata(
"opencl.enable.FP_CONTRACT")) {
2373 ConstantInt::get(Type::getInt32Ty(
M.getContext()), 0);
2376 [[maybe_unused]] GlobalVariable *GV =
2377 new GlobalVariable(M,
2378 Type::getInt32Ty(
M.getContext()),
2392 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2393 FPFastMathDefaultInfoMap;
2395 for (
unsigned i = 0; i <
Node->getNumOperands(); i++) {
2404 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2406 "Expected 4 operands for FPFastMathDefault");
2412 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2414 SPIRV::FPFastMathDefaultInfo &
Info =
2417 Info.FPFastMathDefault =
true;
2418 }
else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2420 "Expected no operands for ContractionOff");
2424 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2426 for (SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2427 Info.ContractionOff =
true;
2429 }
else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2431 "Expected 1 operand for SignedZeroInfNanPreserve");
2432 unsigned TargetWidth =
2437 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2441 assert(Index >= 0 && Index < 3 &&
2442 "Expected FPFastMathDefaultInfo for half, float, or double");
2443 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2444 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2445 FPFastMathDefaultInfoVec[
Index].SignedZeroInfNanPreserve =
true;
2449 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2450 for (
auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2451 if (FPFastMathDefaultInfoVec.
empty())
2454 for (
const SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2455 assert(
Info.Ty &&
"Expected target type for FPFastMathDefaultInfo");
2458 if (Flags == SPIRV::FPFastMathMode::None && !
Info.ContractionOff &&
2459 !
Info.SignedZeroInfNanPreserve && !
Info.FPFastMathDefault)
2463 if (
Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2465 "and AllowContract");
2467 if (
Info.SignedZeroInfNanPreserve &&
2469 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2470 SPIRV::FPFastMathMode::NSZ))) {
2471 if (
Info.FPFastMathDefault)
2473 "SignedZeroInfNanPreserve but at least one of "
2474 "NotNaN/NotInf/NSZ is enabled.");
2477 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2478 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2479 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2481 "AllowTransform requires AllowReassoc and "
2482 "AllowContract to be set.");
2485 auto it = GlobalVars.find(Flags);
2486 GlobalVariable *GV =
nullptr;
2487 if (it != GlobalVars.end()) {
2493 ConstantInt::get(Type::getInt32Ty(
M.getContext()), Flags);
2496 GV =
new GlobalVariable(M,
2497 Type::getInt32Ty(
M.getContext()),
2502 GlobalVars[
Flags] = GV;
2508void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2511 bool IsConstComposite =
2512 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2513 if (IsConstComposite && TrackConstants) {
2515 auto t = AggrConsts.find(
I);
2516 assert(t != AggrConsts.end());
2519 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2521 NewOp->setArgOperand(0,
I);
2524 for (
const auto &
Op :
I->operands()) {
2528 unsigned OpNo =
Op.getOperandNo();
2529 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2530 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2534 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2535 :
B.SetInsertPoint(
I);
2538 Type *OpTy =
Op->getType();
2546 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2548 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2549 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2551 SmallVector<Value *, 2>
Args = {
2554 CallInst *PtrCasted =
2555 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2560 I->setOperand(OpNo, NewOp);
2562 if (Named.insert(
I).second)
2566Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2568 std::unordered_set<Function *> FVisited;
2569 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2572Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2573 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2575 if (!FVisited.insert(
F).second)
2578 std::unordered_set<Value *> Visited;
2581 for (User *U :
F->users()) {
2593 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2596 for (User *OpU : OpArg->
users()) {
2598 if (!Inst || Inst == CI)
2601 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2608 if (FVisited.find(OuterF) != FVisited.end())
2610 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2611 if (OuterF->
getArg(i) == OpArg) {
2612 Lookup.push_back(std::make_pair(OuterF, i));
2619 for (
auto &Pair :
Lookup) {
2620 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2627void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2629 B.SetInsertPointPastAllocas(
F);
2643 for (User *U :
F->users()) {
2659 for (User *U : Arg->
users()) {
2663 CI->
getParent()->getParent() == CurrF) {
2665 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2676void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2677 B.SetInsertPointPastAllocas(
F);
2683 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2685 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2687 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2699 bool IsNewFTy =
false;
2715bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2718 if (
F.isIntrinsic())
2720 if (
F.isDeclaration()) {
2721 for (User *U :
F.users()) {
2734 for (User *U :
F.users()) {
2736 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2738 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2739 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2746 if (Worklist.
empty())
2752 "cannot allocate a name for the internal service function");
2753 LLVMContext &Ctx =
M.getContext();
2761 for (Function *
F : Worklist) {
2763 for (
const auto &Arg :
F->args())
2765 IRB.CreateCall(
F, Args);
2767 IRB.CreateRetVoid();
2773void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2774 DenseMap<Function *, CallInst *> Ptrcasts;
2775 for (
auto It : FDeclPtrTys) {
2777 for (
auto *U :
F->users()) {
2782 for (
auto [Idx, ElemTy] : It.second) {
2790 B.SetInsertPointPastAllocas(Arg->
getParent());
2795 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2804 .getFirstNonPHIOrDbgOrAlloca());
2825SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2832 Type *SrcTy =
GEP->getSourceElementType();
2833 SmallVector<Value *, 8> Indices(
GEP->indices());
2835 if (ArrTy && ArrTy->getNumElements() == 0 &&
2837 Indices.erase(Indices.begin());
2838 SrcTy = ArrTy->getElementType();
2840 GEP->getNoWrapFlags(),
"",
2841 GEP->getIterator());
2846bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2847 if (
Func.isDeclaration())
2851 GR =
ST.getSPIRVGlobalRegistry();
2855 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2860 AggrConstTypes.clear();
2865 SmallPtrSet<Instruction *, 4> DeadInsts;
2868 if (!
Ref || GR->findDeducedElementType(
Ref))
2871 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2873 Ref->replaceAllUsesWith(NewGEP);
2877 if (
Type *GepTy = getGEPType(
Ref))
2881 for (
auto *
I : DeadInsts) {
2882 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2883 I->eraseFromParent();
2886 processParamTypesByFunHeader(CurrF,
B);
2895 Type *ElTy =
SI->getValueOperand()->getType();
2900 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2901 for (
auto &GV :
Func.getParent()->globals())
2902 processGlobalValue(GV,
B);
2904 preprocessUndefs(
B);
2905 preprocessCompositeConstants(
B);
2909 applyDemangledPtrArgTypes(
B);
2912 for (
auto &
I : Worklist) {
2914 if (isConvergenceIntrinsic(
I))
2917 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2919 insertAssignTypeIntrs(
I,
B);
2920 insertPtrCastOrAssignTypeInstr(
I,
B);
2924 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2925 insertAssignPtrTypeIntrs(
I,
B,
true);
2928 useRoundingMode(FPI,
B);
2933 SmallPtrSet<Instruction *, 4> IncompleteRets;
2935 deduceOperandElementType(&
I, &IncompleteRets);
2939 for (BasicBlock &BB : Func)
2940 for (PHINode &Phi : BB.
phis())
2942 deduceOperandElementType(&Phi,
nullptr);
2944 for (
auto *
I : Worklist) {
2945 TrackConstants =
true;
2955 if (isConvergenceIntrinsic(
I))
2959 processInstrAfterVisit(
I,
B);
2966bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2967 if (!GR || TodoTypeSz == 0)
2970 unsigned SzTodo = TodoTypeSz;
2971 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2976 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2977 Type *KnownTy = GR->findDeducedElementType(
Op);
2978 if (!KnownTy || !AssignCI)
2984 std::unordered_set<Value *> Visited;
2985 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2986 if (ElemTy != KnownTy) {
2987 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2988 propagateElemType(CI, ElemTy, VisitedSubst);
2995 if (
Op->hasUseList()) {
2996 for (User *U :
Op->users()) {
3003 if (TodoTypeSz == 0)
3008 SmallPtrSet<Instruction *, 4> IncompleteRets;
3010 auto It = ToProcess.
find(&
I);
3011 if (It == ToProcess.
end())
3013 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
3014 if (It->second.size() == 0)
3016 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
3017 if (TodoTypeSz == 0)
3022 return SzTodo > TodoTypeSz;
3026void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
3028 if (!
F.isDeclaration() ||
F.isIntrinsic())
3032 if (DemangledName.empty())
3036 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
3037 DemangledName,
ST.getPreferredInstructionSet());
3038 if (Opcode != SPIRV::OpGroupAsyncCopy)
3041 SmallVector<unsigned> Idxs;
3050 LLVMContext &Ctx =
F.getContext();
3052 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
3053 if (!TypeStrs.
size())
3056 for (
unsigned Idx : Idxs) {
3057 if (Idx >= TypeStrs.
size())
3060 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3063 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
3068bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
3071 parseFunDeclarations(M);
3072 insertConstantsForFPFastMathDefault(M);
3082 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
3084 processParamTypes(&
F,
B);
3088 CanTodoType =
false;
3089 Changed |= postprocessTypes(M);
3092 Changed |= processFunctionPointers(M);
3098 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static bool isFirstIndexZero(const GetElementPtrInst *GEP)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
StringSet - A set-like wrapper for the StringMap.
static SymbolRef::Type getType(const Symbol *Sym)
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
bool contains(StringRef key) const
Check if the set contains the given key.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
void setOperand(unsigned i, Value *Val)
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
auto enumerate(FirstRange &&First, RestRanges &&...Rest)
Given two or more input ranges, returns a new range whose values are tuples (A, B,...
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
@ Enabled
Convert any .debug_str_offsets tables to DWARF64 if needed.
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
Type * reconstitutePeeledArrayType(Type *Ty)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)