24#include "llvm/IR/IntrinsicsSPIRV.h"
31#include <unordered_set>
54#define GET_BuiltinGroup_DECL
55#include "SPIRVGenTables.inc"
60class SPIRVEmitIntrinsics
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM =
nullptr;
64 SPIRVGlobalRegistry *GR =
nullptr;
66 bool TrackConstants =
true;
67 bool HaveFunPtrs =
false;
68 DenseMap<Instruction *, Constant *> AggrConsts;
69 DenseMap<Instruction *, Type *> AggrConstTypes;
70 DenseSet<Instruction *> AggrStores;
71 std::unordered_set<Value *> Named;
74 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
77 bool CanTodoType =
true;
78 unsigned TodoTypeSz = 0;
79 DenseMap<Value *, bool> TodoType;
80 void insertTodoType(
Value *
Op) {
83 auto It = TodoType.try_emplace(
Op,
true);
89 auto It = TodoType.find(
Op);
90 if (It != TodoType.end() && It->second) {
98 auto It = TodoType.find(
Op);
99 return It != TodoType.end() && It->second;
103 std::unordered_set<Instruction *> TypeValidated;
106 enum WellKnownTypes { Event };
109 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
111 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
112 bool UnknownElemTypeI8,
113 bool IgnoreKnownType =
false);
114 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
115 bool UnknownElemTypeI8);
116 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
117 std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8);
120 std::unordered_set<Value *> &Visited,
121 bool UnknownElemTypeI8);
123 bool UnknownElemTypeI8);
126 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
128 std::unordered_set<Value *> &Visited,
129 bool UnknownElemTypeI8);
133 SmallPtrSet<Instruction *, 4> *IncompleteRets,
134 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
135 bool IsPostprocessing =
false);
140 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
141 bool IsPostprocessing);
146 bool UnknownElemTypeI8);
148 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
151 Type *ExpectedElementType,
152 unsigned OperandToReplace,
155 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
157 void insertConstantsForFPFastMathDefault(
Module &
M);
158 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
163 std::unordered_set<Function *> &FVisited);
165 bool deduceOperandElementTypeCalledFunction(
167 Type *&KnownElemTy,
bool &Incomplete);
168 void deduceOperandElementTypeFunctionPointer(
170 Type *&KnownElemTy,
bool IsPostprocessing);
171 bool deduceOperandElementTypeFunctionRet(
172 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
173 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
178 DenseMap<Function *, CallInst *> Ptrcasts);
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
183 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
184 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
185 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
186 std::unordered_set<Value *> &Visited,
187 DenseMap<Function *, CallInst *> Ptrcasts);
195 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
198 bool postprocessTypes(
Module &
M);
199 bool processFunctionPointers(
Module &
M);
200 void parseFunDeclarations(
Module &
M);
202 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
218 bool walkLogicalAccessChain(
219 GetElementPtrInst &
GEP,
220 const std::function<
void(
Type *PointedType, uint64_t
Index)>
229 Type *getGEPType(GetElementPtrInst *
GEP);
236 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
238 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
242 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
243 : ModulePass(ID), TM(TM) {}
246 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
248 Instruction *visitInsertElementInst(InsertElementInst &
I);
249 Instruction *visitExtractElementInst(ExtractElementInst &
I);
251 Instruction *visitExtractValueInst(ExtractValueInst &
I);
255 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
259 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
261 bool runOnModule(
Module &
M)
override;
263 void getAnalysisUsage(AnalysisUsage &AU)
const override {
264 ModulePass::getAnalysisUsage(AU);
273 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
275 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
278bool expectIgnoredInIRTranslation(
const Instruction *
I) {
282 switch (
II->getIntrinsicID()) {
283 case Intrinsic::invariant_start:
284 case Intrinsic::spv_resource_handlefrombinding:
285 case Intrinsic::spv_resource_getpointer:
295 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
296 Value *V =
II->getArgOperand(0);
297 return getPointerRoot(V);
305char SPIRVEmitIntrinsics::ID = 0;
328 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
334 B.SetCurrentDebugLocation(
I->getDebugLoc());
335 if (
I->getType()->isVoidTy())
336 B.SetInsertPoint(
I->getNextNode());
338 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
343 switch (Intr->getIntrinsicID()) {
344 case Intrinsic::invariant_start:
345 case Intrinsic::invariant_end:
353 if (
I->getType()->isTokenTy())
355 "does not support token type",
360 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
361 expectIgnoredInIRTranslation(
I))
366 std::vector<Value *> Args = {
369 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
372void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
376 if (isTodoType(Src)) {
379 insertTodoType(Dest);
383void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
388 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
389 Src->eraseFromParent();
392 if (Named.insert(Dest).second)
417Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
418 bool IsPostprocessing) {
433 if (UnknownElemTypeI8) {
434 if (!IsPostprocessing)
442CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
450 B.SetInsertPointPastAllocas(OpA->getParent());
453 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
455 Type *OpTy =
Op->getType();
459 CallInst *PtrCasted =
460 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
465void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
467 DenseMap<Function *, CallInst *> Ptrcasts) {
469 CallInst *PtrCastedI =
nullptr;
470 auto It = Ptrcasts.
find(
F);
471 if (It == Ptrcasts.
end()) {
472 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
473 Ptrcasts[
F] = PtrCastedI;
475 PtrCastedI = It->second;
477 I->replaceUsesOfWith(
Op, PtrCastedI);
480void SPIRVEmitIntrinsics::propagateElemType(
482 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
483 DenseMap<Function *, CallInst *> Ptrcasts;
485 for (
auto *U :
Users) {
488 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
494 TypeValidated.find(UI) != TypeValidated.end())
495 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
499void SPIRVEmitIntrinsics::propagateElemTypeRec(
501 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
502 std::unordered_set<Value *> Visited;
503 DenseMap<Function *, CallInst *> Ptrcasts;
504 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
505 std::move(Ptrcasts));
508void SPIRVEmitIntrinsics::propagateElemTypeRec(
510 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
511 std::unordered_set<Value *> &Visited,
512 DenseMap<Function *, CallInst *> Ptrcasts) {
513 if (!Visited.insert(
Op).second)
516 for (
auto *U :
Users) {
519 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
525 TypeValidated.find(UI) != TypeValidated.end())
526 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
534SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
535 bool UnknownElemTypeI8) {
536 std::unordered_set<Value *> Visited;
537 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
541Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
542 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
543 bool UnknownElemTypeI8) {
548 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
559Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
560 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
572 for (User *OpU :
Op->users()) {
574 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
587 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
596Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
597 bool UnknownElemTypeI8) {
598 std::unordered_set<Value *> Visited;
599 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
602void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
603 bool UnknownElemTypeI8) {
605 if (!UnknownElemTypeI8)
612bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
613 GetElementPtrInst &
GEP,
614 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
615 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
623 Value *Src = getPointerRoot(
GEP.getPointerOperand());
624 Type *CurType = deduceElementType(Src,
true);
633 OnDynamicIndexing(AT->getElementType(), Operand);
634 return AT ==
nullptr;
642 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
646 CurType = AT->getElementType();
647 OnLiteralIndexing(CurType, Index);
649 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
652 const auto &STL =
DL.getStructLayout(ST);
653 unsigned Element = STL->getElementContainingOffset(
Offset);
654 Offset -= STL->getElementOffset(Element);
655 CurType =
ST->getElementType(Element);
656 OnLiteralIndexing(CurType, Element);
668SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
671 B.SetInsertPoint(&
GEP);
673 std::vector<Value *> Indices;
674 Indices.push_back(ConstantInt::get(
675 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
676 walkLogicalAccessChain(
678 [&Indices, &
B](
Type *EltType, uint64_t Index) {
680 ConstantInt::get(
B.getInt64Ty(), Index,
false));
683 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
685 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
687 Indices.push_back(Index);
692 Args.push_back(
B.getInt1(
GEP.isInBounds()));
693 Args.push_back(
GEP.getOperand(0));
695 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
696 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
700Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
702 Type *CurType =
GEP->getResultElementType();
704 bool Interrupted = walkLogicalAccessChain(
705 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
708 return Interrupted ?
GEP->getResultElementType() : CurType;
711Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
712 if (
Ref->getSourceElementType() ==
713 IntegerType::getInt8Ty(CurrF->
getContext()) &&
715 return getGEPTypeLogical(
Ref);
722 Ty =
Ref->getSourceElementType();
726 Ty =
Ref->getResultElementType();
731Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
732 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
733 bool IgnoreKnownType) {
739 if (!IgnoreKnownType)
744 if (!Visited.insert(
I).second)
751 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
753 Ty = getGEPType(
Ref);
758 KnownTy =
Op->getType();
760 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
762 Ty = deduceElementTypeByValueDeep(
764 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
767 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
769 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
771 maybeAssignPtrType(Ty,
I,
Ref->getDestTy(), UnknownElemTypeI8);
773 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
775 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
780 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
784 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
786 Type *BestTy =
nullptr;
788 DenseMap<Type *, unsigned> PhiTys;
789 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
790 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
797 if (It.first->second > MaxN) {
798 MaxN = It.first->second;
806 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
807 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
812 static StringMap<unsigned> ResTypeByArg = {
816 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
817 {
"__spirv_GenericCastToPtr_ToLocal", 0},
818 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
819 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
820 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
821 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
825 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
827 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
828 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
829 for (User *U :
II->users()) {
834 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
836 Ty = HandleType->getTypeParameter(0);
848 }
else if (
II &&
II->getIntrinsicID() ==
849 Intrinsic::spv_generic_cast_to_ptr_explicit) {
850 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
852 }
else if (Function *CalledF = CI->getCalledFunction()) {
853 std::string DemangledName =
855 if (DemangledName.length() > 0)
856 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
857 auto AsArgIt = ResTypeByArg.
find(DemangledName);
858 if (AsArgIt != ResTypeByArg.
end())
859 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
860 Visited, UnknownElemTypeI8);
867 if (Ty && !IgnoreKnownType) {
878Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
879 bool UnknownElemTypeI8) {
880 std::unordered_set<Value *> Visited;
881 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
884Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
885 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
886 bool UnknownElemTypeI8) {
895 if (!Visited.insert(U).second)
901 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
903 assert(
Op &&
"Operands should not be null.");
904 Type *OpTy =
Op->getType();
908 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
915 Change |= Ty != OpTy;
923 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
924 Type *OpTy = ArrTy->getElementType();
928 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
935 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
941 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
942 Type *OpTy = VecTy->getElementType();
946 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
953 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
963Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
964 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
966 if (!UnknownElemTypeI8)
969 return IntegerType::getInt8Ty(
I->getContext());
973 Value *PointerOperand) {
987bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
989 Type *&KnownElemTy,
bool &Incomplete) {
993 std::string DemangledName =
995 if (DemangledName.length() > 0 &&
997 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
998 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
999 DemangledName,
ST.getPreferredInstructionSet());
1000 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1001 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1007 KnownElemTy = ElemTy;
1008 Ops.push_back(std::make_pair(
Op, i));
1010 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1017 case SPIRV::OpAtomicFAddEXT:
1018 case SPIRV::OpAtomicFMinEXT:
1019 case SPIRV::OpAtomicFMaxEXT:
1020 case SPIRV::OpAtomicLoad:
1021 case SPIRV::OpAtomicCompareExchangeWeak:
1022 case SPIRV::OpAtomicCompareExchange:
1023 case SPIRV::OpAtomicExchange:
1024 case SPIRV::OpAtomicIAdd:
1025 case SPIRV::OpAtomicISub:
1026 case SPIRV::OpAtomicOr:
1027 case SPIRV::OpAtomicXor:
1028 case SPIRV::OpAtomicAnd:
1029 case SPIRV::OpAtomicUMin:
1030 case SPIRV::OpAtomicUMax:
1031 case SPIRV::OpAtomicSMin:
1032 case SPIRV::OpAtomicSMax: {
1037 Incomplete = isTodoType(
Op);
1038 Ops.push_back(std::make_pair(
Op, 0));
1040 case SPIRV::OpAtomicStore: {
1049 Incomplete = isTodoType(
Op);
1050 Ops.push_back(std::make_pair(
Op, 0));
1059void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1061 Type *&KnownElemTy,
bool IsPostprocessing) {
1065 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1067 bool IsNewFTy =
false, IsIncomplete =
false;
1070 Type *ArgTy = Arg->getType();
1075 if (isTodoType(Arg))
1076 IsIncomplete =
true;
1078 IsIncomplete =
true;
1083 Type *RetTy = FTy->getReturnType();
1090 IsIncomplete =
true;
1092 IsIncomplete =
true;
1095 if (!IsPostprocessing && IsIncomplete)
1098 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1101bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1102 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1103 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1115 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1116 for (User *U :
F->users()) {
1124 propagateElemType(CI, PrevElemTy, VisitedSubst);
1134 for (Instruction *IncompleteRetI : *IncompleteRets)
1135 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1137 }
else if (IncompleteRets) {
1140 TypeValidated.insert(
I);
1148void SPIRVEmitIntrinsics::deduceOperandElementType(
1149 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1150 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1152 Type *KnownElemTy =
nullptr;
1153 bool Incomplete =
false;
1159 Incomplete = isTodoType(
I);
1160 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1163 Ops.push_back(std::make_pair(
Op, i));
1169 Incomplete = isTodoType(
I);
1170 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1177 Incomplete = isTodoType(
I);
1178 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1182 KnownElemTy =
Ref->getSourceElementType();
1183 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1186 KnownElemTy =
I->getType();
1192 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1196 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1201 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1209 Incomplete = isTodoType(
Ref->getPointerOperand());
1210 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1218 Incomplete = isTodoType(
Ref->getPointerOperand());
1219 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1225 Incomplete = isTodoType(
I);
1226 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1229 Ops.push_back(std::make_pair(
Op, i));
1237 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1238 IsPostprocessing, KnownElemTy,
Op,
1241 Incomplete = isTodoType(CurrF);
1242 Ops.push_back(std::make_pair(
Op, 0));
1248 bool Incomplete0 = isTodoType(Op0);
1249 bool Incomplete1 = isTodoType(Op1);
1251 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1253 : GR->findDeducedElementType(Op0);
1255 KnownElemTy = ElemTy0;
1256 Incomplete = Incomplete0;
1257 Ops.push_back(std::make_pair(Op1, 1));
1258 }
else if (ElemTy1) {
1259 KnownElemTy = ElemTy1;
1260 Incomplete = Incomplete1;
1261 Ops.push_back(std::make_pair(Op0, 0));
1265 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1266 else if (HaveFunPtrs)
1267 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1272 if (!KnownElemTy ||
Ops.size() == 0)
1277 for (
auto &OpIt :
Ops) {
1281 Type *AskTy =
nullptr;
1282 CallInst *AskCI =
nullptr;
1283 if (IsPostprocessing && AskOps) {
1289 if (Ty == KnownElemTy)
1292 Type *OpTy =
Op->getType();
1293 if (
Op->hasUseList() &&
1300 else if (!IsPostprocessing)
1304 if (AssignCI ==
nullptr) {
1313 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1314 std::make_pair(
I,
Op)};
1315 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1319 CallInst *PtrCastI =
1320 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1321 if (OpIt.second == std::numeric_limits<unsigned>::max())
1324 I->setOperand(OpIt.second, PtrCastI);
1327 TypeValidated.insert(
I);
1330void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1335 if (isAssignTypeInstr(U)) {
1336 B.SetInsertPoint(U);
1337 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1338 CallInst *AssignCI =
1339 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1341 U->eraseFromParent();
1344 U->replaceUsesOfWith(Old, New);
1349 New->copyMetadata(*Old);
1353void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1354 std::queue<Instruction *> Worklist;
1358 while (!Worklist.empty()) {
1360 bool BPrepared =
false;
1363 for (
auto &
Op :
I->operands()) {
1365 if (!AggrUndef || !
Op->getType()->isAggregateType())
1372 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1373 Worklist.push(IntrUndef);
1374 I->replaceUsesOfWith(
Op, IntrUndef);
1375 AggrConsts[IntrUndef] = AggrUndef;
1376 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1381void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1382 std::queue<Instruction *> Worklist;
1386 while (!Worklist.empty()) {
1387 auto *
I = Worklist.front();
1390 bool KeepInst =
false;
1391 for (
const auto &
Op :
I->operands()) {
1393 Type *ResTy =
nullptr;
1396 ResTy = COp->getType();
1408 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1413 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1414 Args.push_back(COp->getElementAsConstant(i));
1418 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1419 :
B.SetInsertPoint(
I);
1423 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1427 AggrConsts[CI] = AggrConst;
1428 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1440 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1445 unsigned RoundingModeDeco,
1452 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1461 MDNode *SaturatedConversionNode =
1463 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1470 if (Fu->isIntrinsic()) {
1471 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1472 switch (IntrinsicId) {
1473 case Intrinsic::fptosi_sat:
1474 case Intrinsic::fptoui_sat:
1493 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1501 B.SetInsertPoint(&
Call);
1502 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1507void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1510 if (!
RM.has_value())
1512 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1513 switch (
RM.value()) {
1517 case RoundingMode::NearestTiesToEven:
1518 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1520 case RoundingMode::TowardNegative:
1521 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1523 case RoundingMode::TowardPositive:
1524 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1526 case RoundingMode::TowardZero:
1527 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1529 case RoundingMode::Dynamic:
1530 case RoundingMode::NearestTiesToAway:
1534 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1540Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1543 B.SetInsertPoint(&
I);
1546 for (
auto &
Op :
I.operands()) {
1547 if (
Op.get()->getType()->isSized()) {
1556 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1557 {
I.getOperand(0)->getType()}, {
Args});
1561 I.eraseFromParent();
1564 B.SetInsertPoint(ParentBB);
1565 IndirectBrInst *BrI =
B.CreateIndirectBr(
1568 for (BasicBlock *BBCase : BBCases)
1574 if (
GEP->getNumIndices() == 0)
1577 return CI->getZExtValue() == 0;
1582Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1584 B.SetInsertPoint(&
I);
1592 if (
I.getSourceElementType() ==
1593 IntegerType::getInt8Ty(CurrF->
getContext())) {
1594 return buildLogicalAccessChainFromGEP(
I);
1599 Value *PtrOp =
I.getPointerOperand();
1600 Type *SrcElemTy =
I.getSourceElementType();
1601 Type *DeducedPointeeTy = deduceElementType(PtrOp,
true);
1604 if (ArrTy->getElementType() == SrcElemTy) {
1606 Type *FirstIdxType =
I.getOperand(1)->getType();
1607 NewIndices.
push_back(ConstantInt::get(FirstIdxType, 0));
1608 for (
Value *Idx :
I.indices())
1613 Args.push_back(
B.getInt1(
I.isInBounds()));
1614 Args.push_back(
I.getPointerOperand());
1617 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1618 replaceAllUsesWithAndErase(
B, &
I, NewI);
1626 Args.push_back(
B.getInt1(
I.isInBounds()));
1628 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1629 replaceAllUsesWithAndErase(
B, &
I, NewI);
1633Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1635 B.SetInsertPoint(&
I);
1644 I.eraseFromParent();
1650 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1651 replaceAllUsesWithAndErase(
B, &
I, NewI);
1655void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1657 Type *VTy =
V->getType();
1662 if (ElemTy != AssignedType)
1675 if (CurrentType == AssignedType)
1682 " for value " +
V->getName(),
1690void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1691 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1693 TypeValidated.insert(
I);
1696 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1697 if (PointerElemTy == ExpectedElementType ||
1703 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1705 bool FirstPtrCastOrAssignPtrType =
true;
1711 for (
auto User :
Pointer->users()) {
1714 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1715 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1716 II->getOperand(0) != Pointer)
1721 FirstPtrCastOrAssignPtrType =
false;
1722 if (
II->getOperand(1) != VMD ||
1729 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1734 if (
II->getParent() !=
I->getParent())
1737 I->setOperand(OperandToReplace,
II);
1743 if (FirstPtrCastOrAssignPtrType) {
1748 }
else if (isTodoType(Pointer)) {
1749 eraseTodoType(Pointer);
1756 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1757 std::make_pair(
I, Pointer)};
1759 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1771 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1777void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1782 replacePointerOperandWithPtrCast(
1783 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1789 Type *OpTy =
Op->getType();
1792 if (OpTy ==
Op->getType())
1793 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1794 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1799 Type *OpTy = LI->getType();
1804 Type *NewOpTy = OpTy;
1805 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1806 if (OpTy == NewOpTy)
1807 insertTodoType(Pointer);
1810 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1815 Type *OpTy =
nullptr;
1827 OpTy = GEPI->getSourceElementType();
1829 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1831 insertTodoType(Pointer);
1843 std::string DemangledName =
1847 bool HaveTypes =
false;
1865 for (User *U : CalledArg->
users()) {
1867 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1873 HaveTypes |= ElemTy !=
nullptr;
1878 if (DemangledName.empty() && !HaveTypes)
1896 Type *ExpectedType =
1898 if (!ExpectedType && !DemangledName.empty())
1899 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1900 DemangledName,
OpIdx,
I->getContext());
1901 if (!ExpectedType || ExpectedType->
isVoidTy())
1909 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1913Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1920 I.getOperand(1)->getType(),
1921 I.getOperand(2)->getType()};
1923 B.SetInsertPoint(&
I);
1925 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1926 replaceAllUsesWithAndErase(
B, &
I, NewI);
1931SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1938 B.SetInsertPoint(&
I);
1940 I.getIndexOperand()->getType()};
1941 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1942 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1943 replaceAllUsesWithAndErase(
B, &
I, NewI);
1947Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1949 B.SetInsertPoint(&
I);
1952 Value *AggregateOp =
I.getAggregateOperand();
1956 Args.push_back(AggregateOp);
1957 Args.push_back(
I.getInsertedValueOperand());
1958 for (
auto &
Op :
I.indices())
1959 Args.push_back(
B.getInt32(
Op));
1961 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1962 replaceMemInstrUses(&
I, NewI,
B);
1966Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1967 if (
I.getAggregateOperand()->getType()->isAggregateType())
1970 B.SetInsertPoint(&
I);
1972 for (
auto &
Op :
I.indices())
1973 Args.push_back(
B.getInt32(
Op));
1975 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1976 replaceAllUsesWithAndErase(
B, &
I, NewI);
1980Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1981 if (!
I.getType()->isAggregateType())
1984 B.SetInsertPoint(&
I);
1985 TrackConstants =
false;
1990 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
1991 {
I.getPointerOperand(),
B.getInt16(Flags),
1992 B.getInt8(
I.getAlign().value())});
1993 replaceMemInstrUses(&
I, NewI,
B);
1997Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
2001 B.SetInsertPoint(&
I);
2002 TrackConstants =
false;
2006 auto *PtrOp =
I.getPointerOperand();
2007 auto *NewI =
B.CreateIntrinsic(
2008 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->
getType()},
2009 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
2010 B.getInt8(
I.getAlign().value())});
2012 I.eraseFromParent();
2016Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
2017 Value *ArraySize =
nullptr;
2018 if (
I.isArrayAllocation()) {
2021 SPIRV::Extension::SPV_INTEL_variable_length_array))
2023 "array allocation: this instruction requires the following "
2024 "SPIR-V extension: SPV_INTEL_variable_length_array",
2026 ArraySize =
I.getArraySize();
2029 B.SetInsertPoint(&
I);
2030 TrackConstants =
false;
2031 Type *PtrTy =
I.getType();
2034 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
2035 {PtrTy, ArraySize->
getType()},
2036 {ArraySize,
B.getInt8(
I.getAlign().value())})
2037 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
2038 {
B.getInt8(
I.getAlign().value())});
2039 replaceAllUsesWithAndErase(
B, &
I, NewI);
2043Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2044 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2046 B.SetInsertPoint(&
I);
2048 Args.push_back(
B.getInt32(
2049 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2050 Args.push_back(
B.getInt32(
2052 Args.push_back(
B.getInt32(
2054 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2055 {
I.getPointerOperand()->getType()}, {
Args});
2056 replaceMemInstrUses(&
I, NewI,
B);
2060Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2062 B.SetInsertPoint(&
I);
2063 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2067void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2070 static const StringSet<> ArtificialGlobals{
"llvm.global.annotations",
2071 "llvm.compiler.used"};
2081 deduceElementTypeHelper(&GV,
false);
2085 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2087 InitInst->setArgOperand(1, Init);
2090 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2096bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2098 bool UnknownElemTypeI8) {
2104 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2111void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2114 static StringMap<unsigned> ResTypeWellKnown = {
2115 {
"async_work_group_copy", WellKnownTypes::Event},
2116 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2117 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2121 bool IsKnown =
false;
2126 std::string DemangledName =
2129 if (DemangledName.length() > 0)
2131 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2132 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2133 if (ResIt != ResTypeWellKnown.
end()) {
2136 switch (ResIt->second) {
2137 case WellKnownTypes::Event:
2144 switch (DecorationId) {
2147 case FPDecorationId::SAT:
2150 case FPDecorationId::RTE:
2152 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2154 case FPDecorationId::RTZ:
2156 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2158 case FPDecorationId::RTP:
2160 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2162 case FPDecorationId::RTN:
2164 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2170 Type *Ty =
I->getType();
2173 Type *TypeToAssign = Ty;
2175 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2176 II->getIntrinsicID() == Intrinsic::spv_undef) {
2177 auto It = AggrConstTypes.find(
II);
2178 if (It == AggrConstTypes.end())
2180 TypeToAssign = It->second;
2186 for (
const auto &
Op :
I->operands()) {
2193 Type *OpTy =
Op->getType();
2195 CallInst *AssignCI =
2200 Type *OpTy =
Op->getType();
2215 CallInst *AssignCI =
2225bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2226 Instruction *Inst) {
2228 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2239 case Intrinsic::spv_load:
2240 case Intrinsic::spv_store:
2247 const std::string
Prefix =
"__spirv_Atomic";
2248 const bool IsAtomic =
Name.find(Prefix) == 0;
2256void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2258 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2260 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2265 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2266 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2267 if (shouldTryToAddMemAliasingDecoration(
I)) {
2268 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2269 ? SPIRV::Decoration::AliasScopeINTEL
2270 : SPIRV::Decoration::NoAliasINTEL;
2272 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2275 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2276 {
I->getType()}, {
Args});
2280 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2281 processMemAliasingDecoration(LLVMContext::MD_noalias);
2284 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2286 bool AllowFPMaxError =
2288 if (!AllowFPMaxError)
2292 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2301 &FPFastMathDefaultInfoMap,
2303 auto it = FPFastMathDefaultInfoMap.
find(
F);
2304 if (it != FPFastMathDefaultInfoMap.
end())
2312 SPIRV::FPFastMathMode::None);
2314 SPIRV::FPFastMathMode::None);
2316 SPIRV::FPFastMathMode::None);
2317 return FPFastMathDefaultInfoMap[
F] = std::move(FPFastMathDefaultInfoVec);
2323 size_t BitWidth = Ty->getScalarSizeInBits();
2327 assert(Index >= 0 && Index < 3 &&
2328 "Expected FPFastMathDefaultInfo for half, float, or double");
2329 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2330 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2331 return FPFastMathDefaultInfoVec[Index];
2334void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(
Module &M) {
2336 if (!
ST->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2345 auto Node =
M.getNamedMetadata(
"spirv.ExecutionMode");
2347 if (!
M.getNamedMetadata(
"opencl.enable.FP_CONTRACT")) {
2355 ConstantInt::get(Type::getInt32Ty(
M.getContext()), 0);
2358 [[maybe_unused]] GlobalVariable *GV =
2359 new GlobalVariable(M,
2360 Type::getInt32Ty(
M.getContext()),
2374 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2375 FPFastMathDefaultInfoMap;
2377 for (
unsigned i = 0; i <
Node->getNumOperands(); i++) {
2386 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2388 "Expected 4 operands for FPFastMathDefault");
2394 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2396 SPIRV::FPFastMathDefaultInfo &
Info =
2399 Info.FPFastMathDefault =
true;
2400 }
else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2402 "Expected no operands for ContractionOff");
2406 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2408 for (SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2409 Info.ContractionOff =
true;
2411 }
else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2413 "Expected 1 operand for SignedZeroInfNanPreserve");
2414 unsigned TargetWidth =
2419 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2423 assert(Index >= 0 && Index < 3 &&
2424 "Expected FPFastMathDefaultInfo for half, float, or double");
2425 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2426 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2427 FPFastMathDefaultInfoVec[
Index].SignedZeroInfNanPreserve =
true;
2431 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2432 for (
auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2433 if (FPFastMathDefaultInfoVec.
empty())
2436 for (
const SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2437 assert(
Info.Ty &&
"Expected target type for FPFastMathDefaultInfo");
2440 if (Flags == SPIRV::FPFastMathMode::None && !
Info.ContractionOff &&
2441 !
Info.SignedZeroInfNanPreserve && !
Info.FPFastMathDefault)
2445 if (
Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2447 "and AllowContract");
2449 if (
Info.SignedZeroInfNanPreserve &&
2451 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2452 SPIRV::FPFastMathMode::NSZ))) {
2453 if (
Info.FPFastMathDefault)
2455 "SignedZeroInfNanPreserve but at least one of "
2456 "NotNaN/NotInf/NSZ is enabled.");
2459 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2460 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2461 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2463 "AllowTransform requires AllowReassoc and "
2464 "AllowContract to be set.");
2467 auto it = GlobalVars.find(Flags);
2468 GlobalVariable *GV =
nullptr;
2469 if (it != GlobalVars.end()) {
2475 ConstantInt::get(Type::getInt32Ty(
M.getContext()), Flags);
2478 GV =
new GlobalVariable(M,
2479 Type::getInt32Ty(
M.getContext()),
2484 GlobalVars[
Flags] = GV;
2490void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2493 bool IsConstComposite =
2494 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2495 if (IsConstComposite && TrackConstants) {
2497 auto t = AggrConsts.find(
I);
2498 assert(t != AggrConsts.end());
2501 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2503 NewOp->setArgOperand(0,
I);
2506 for (
const auto &
Op :
I->operands()) {
2510 unsigned OpNo =
Op.getOperandNo();
2511 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2512 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2516 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2517 :
B.SetInsertPoint(
I);
2520 Type *OpTy =
Op->getType();
2528 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2530 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2531 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2533 SmallVector<Value *, 2>
Args = {
2536 CallInst *PtrCasted =
2537 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2542 I->setOperand(OpNo, NewOp);
2544 if (Named.insert(
I).second)
2548Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2550 std::unordered_set<Function *> FVisited;
2551 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2554Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2555 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2557 if (!FVisited.insert(
F).second)
2560 std::unordered_set<Value *> Visited;
2563 for (User *U :
F->users()) {
2575 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2578 for (User *OpU : OpArg->
users()) {
2580 if (!Inst || Inst == CI)
2583 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2590 if (FVisited.find(OuterF) != FVisited.end())
2592 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2593 if (OuterF->
getArg(i) == OpArg) {
2594 Lookup.push_back(std::make_pair(OuterF, i));
2601 for (
auto &Pair :
Lookup) {
2602 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2609void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2611 B.SetInsertPointPastAllocas(
F);
2625 for (User *U :
F->users()) {
2641 for (User *U : Arg->
users()) {
2645 CI->
getParent()->getParent() == CurrF) {
2647 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2658void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2659 B.SetInsertPointPastAllocas(
F);
2665 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2667 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2669 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2681 bool IsNewFTy =
false;
2697bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2700 if (
F.isIntrinsic())
2702 if (
F.isDeclaration()) {
2703 for (User *U :
F.users()) {
2716 for (User *U :
F.users()) {
2718 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2720 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2721 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2728 if (Worklist.
empty())
2734 "cannot allocate a name for the internal service function");
2735 LLVMContext &Ctx =
M.getContext();
2743 for (Function *
F : Worklist) {
2745 for (
const auto &Arg :
F->args())
2747 IRB.CreateCall(
F, Args);
2749 IRB.CreateRetVoid();
2755void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2756 DenseMap<Function *, CallInst *> Ptrcasts;
2757 for (
auto It : FDeclPtrTys) {
2759 for (
auto *U :
F->users()) {
2764 for (
auto [Idx, ElemTy] : It.second) {
2772 B.SetInsertPointPastAllocas(Arg->
getParent());
2777 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2786 .getFirstNonPHIOrDbgOrAlloca());
2807SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2814 Type *SrcTy =
GEP->getSourceElementType();
2815 SmallVector<Value *, 8> Indices(
GEP->indices());
2817 if (ArrTy && ArrTy->getNumElements() == 0 &&
2820 Indices.erase(Indices.begin());
2821 SrcTy = ArrTy->getElementType();
2822 Value *NewGEP = Builder.CreateGEP(SrcTy,
GEP->getPointerOperand(), Indices,
2823 "",
GEP->getNoWrapFlags());
2830bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2831 if (
Func.isDeclaration())
2835 GR =
ST.getSPIRVGlobalRegistry();
2839 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2844 AggrConstTypes.clear();
2849 SmallPtrSet<Instruction *, 4> DeadInsts;
2852 if (!
Ref || GR->findDeducedElementType(
Ref))
2855 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2857 Ref->replaceAllUsesWith(NewGEP);
2861 if (
Type *GepTy = getGEPType(
Ref))
2865 for (
auto *
I : DeadInsts) {
2866 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2867 I->eraseFromParent();
2870 processParamTypesByFunHeader(CurrF,
B);
2879 Type *ElTy =
SI->getValueOperand()->getType();
2884 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2885 for (
auto &GV :
Func.getParent()->globals())
2886 processGlobalValue(GV,
B);
2888 preprocessUndefs(
B);
2889 preprocessCompositeConstants(
B);
2893 applyDemangledPtrArgTypes(
B);
2896 for (
auto &
I : Worklist) {
2898 if (isConvergenceIntrinsic(
I))
2901 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2903 insertAssignTypeIntrs(
I,
B);
2904 insertPtrCastOrAssignTypeInstr(
I,
B);
2908 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2909 insertAssignPtrTypeIntrs(
I,
B,
true);
2912 useRoundingMode(FPI,
B);
2917 SmallPtrSet<Instruction *, 4> IncompleteRets;
2919 deduceOperandElementType(&
I, &IncompleteRets);
2923 for (BasicBlock &BB : Func)
2924 for (PHINode &Phi : BB.
phis())
2926 deduceOperandElementType(&Phi,
nullptr);
2928 for (
auto *
I : Worklist) {
2929 TrackConstants =
true;
2939 if (isConvergenceIntrinsic(
I))
2943 processInstrAfterVisit(
I,
B);
2950bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2951 if (!GR || TodoTypeSz == 0)
2954 unsigned SzTodo = TodoTypeSz;
2955 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2960 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2961 Type *KnownTy = GR->findDeducedElementType(
Op);
2962 if (!KnownTy || !AssignCI)
2968 std::unordered_set<Value *> Visited;
2969 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2970 if (ElemTy != KnownTy) {
2971 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2972 propagateElemType(CI, ElemTy, VisitedSubst);
2979 if (
Op->hasUseList()) {
2980 for (User *U :
Op->users()) {
2987 if (TodoTypeSz == 0)
2992 SmallPtrSet<Instruction *, 4> IncompleteRets;
2994 auto It = ToProcess.
find(&
I);
2995 if (It == ToProcess.
end())
2997 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
2998 if (It->second.size() == 0)
3000 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
3001 if (TodoTypeSz == 0)
3006 return SzTodo > TodoTypeSz;
3010void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
3012 if (!
F.isDeclaration() ||
F.isIntrinsic())
3016 if (DemangledName.empty())
3020 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
3021 DemangledName,
ST.getPreferredInstructionSet());
3022 if (Opcode != SPIRV::OpGroupAsyncCopy)
3025 SmallVector<unsigned> Idxs;
3034 LLVMContext &Ctx =
F.getContext();
3036 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
3037 if (!TypeStrs.
size())
3040 for (
unsigned Idx : Idxs) {
3041 if (Idx >= TypeStrs.
size())
3044 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3047 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
3052bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
3055 parseFunDeclarations(M);
3056 insertConstantsForFPFastMathDefault(M);
3066 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
3068 processParamTypes(&
F,
B);
3072 CanTodoType =
false;
3073 Changed |= postprocessTypes(M);
3076 Changed |= processFunctionPointers(M);
3082 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static bool isFirstIndexZero(const GetElementPtrInst *GEP)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
StringSet - A set-like wrapper for the StringMap.
static SymbolRef::Type getType(const Symbol *Sym)
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
bool contains(StringRef key) const
Check if the set contains the given key.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
void setOperand(unsigned i, Value *Val)
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
@ Enabled
Convert any .debug_str_offsets tables to DWARF64 if needed.
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
Type * reconstitutePeeledArrayType(Type *Ty)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)