24#include "llvm/IR/IntrinsicsSPIRV.h"
31#include <unordered_set>
54#define GET_BuiltinGroup_DECL
55#include "SPIRVGenTables.inc"
60class SPIRVEmitIntrinsics
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM =
nullptr;
64 SPIRVGlobalRegistry *GR =
nullptr;
66 bool TrackConstants =
true;
67 bool HaveFunPtrs =
false;
68 DenseMap<Instruction *, Constant *> AggrConsts;
69 DenseMap<Instruction *, Type *> AggrConstTypes;
70 DenseSet<Instruction *> AggrStores;
71 std::unordered_set<Value *> Named;
74 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
77 bool CanTodoType =
true;
78 unsigned TodoTypeSz = 0;
79 DenseMap<Value *, bool> TodoType;
80 void insertTodoType(
Value *
Op) {
83 auto It = TodoType.try_emplace(
Op,
true);
89 auto It = TodoType.find(
Op);
90 if (It != TodoType.end() && It->second) {
98 auto It = TodoType.find(
Op);
99 return It != TodoType.end() && It->second;
103 std::unordered_set<Instruction *> TypeValidated;
106 enum WellKnownTypes { Event };
109 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
111 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
112 bool UnknownElemTypeI8,
113 bool IgnoreKnownType =
false);
114 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
115 bool UnknownElemTypeI8);
116 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
117 std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8);
120 std::unordered_set<Value *> &Visited,
121 bool UnknownElemTypeI8);
123 bool UnknownElemTypeI8);
126 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
128 std::unordered_set<Value *> &Visited,
129 bool UnknownElemTypeI8);
133 SmallPtrSet<Instruction *, 4> *IncompleteRets,
134 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
135 bool IsPostprocessing =
false);
140 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
141 bool IsPostprocessing);
146 bool UnknownElemTypeI8);
148 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
151 Type *ExpectedElementType,
152 unsigned OperandToReplace,
155 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
157 void insertConstantsForFPFastMathDefault(
Module &
M);
158 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
163 std::unordered_set<Function *> &FVisited);
165 bool deduceOperandElementTypeCalledFunction(
167 Type *&KnownElemTy,
bool &Incomplete);
168 void deduceOperandElementTypeFunctionPointer(
170 Type *&KnownElemTy,
bool IsPostprocessing);
171 bool deduceOperandElementTypeFunctionRet(
172 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
173 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
178 DenseMap<Function *, CallInst *> Ptrcasts);
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
183 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
184 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
185 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
186 std::unordered_set<Value *> &Visited,
187 DenseMap<Function *, CallInst *> Ptrcasts);
195 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
198 bool postprocessTypes(
Module &
M);
199 bool processFunctionPointers(
Module &
M);
200 void parseFunDeclarations(
Module &
M);
202 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
218 bool walkLogicalAccessChain(
219 GetElementPtrInst &
GEP,
220 const std::function<
void(
Type *PointedType, uint64_t
Index)>
229 Type *getGEPType(GetElementPtrInst *
GEP);
236 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
238 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
242 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
243 : ModulePass(ID), TM(TM) {}
246 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
248 Instruction *visitInsertElementInst(InsertElementInst &
I);
249 Instruction *visitExtractElementInst(ExtractElementInst &
I);
251 Instruction *visitExtractValueInst(ExtractValueInst &
I);
255 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
259 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
261 bool runOnModule(
Module &
M)
override;
263 void getAnalysisUsage(AnalysisUsage &AU)
const override {
264 ModulePass::getAnalysisUsage(AU);
273 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
275 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
278bool expectIgnoredInIRTranslation(
const Instruction *
I) {
282 switch (
II->getIntrinsicID()) {
283 case Intrinsic::invariant_start:
284 case Intrinsic::spv_resource_handlefrombinding:
285 case Intrinsic::spv_resource_getpointer:
295 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
296 Value *V =
II->getArgOperand(0);
297 return getPointerRoot(V);
305char SPIRVEmitIntrinsics::ID = 0;
328 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
334 B.SetCurrentDebugLocation(
I->getDebugLoc());
335 if (
I->getType()->isVoidTy())
336 B.SetInsertPoint(
I->getNextNode());
338 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
343 switch (Intr->getIntrinsicID()) {
344 case Intrinsic::invariant_start:
345 case Intrinsic::invariant_end:
353 if (
I->getType()->isTokenTy())
355 "does not support token type",
360 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
361 expectIgnoredInIRTranslation(
I))
369 if (Name.starts_with(
"spv.mutated_callsite"))
371 if (Name.starts_with(
"spv.named_mutated_callsite"))
372 I->setName(Name.substr(Name.rfind(
'.') + 1));
377 std::vector<Value *> Args = {
380 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
383void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
387 if (isTodoType(Src)) {
390 insertTodoType(Dest);
394void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
399 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
400 Src->eraseFromParent();
403 if (Named.insert(Dest).second)
428Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
429 bool IsPostprocessing) {
444 if (UnknownElemTypeI8) {
445 if (!IsPostprocessing)
453CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
461 B.SetInsertPointPastAllocas(OpA->getParent());
464 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
466 Type *OpTy =
Op->getType();
470 CallInst *PtrCasted =
471 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
476void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
478 DenseMap<Function *, CallInst *> Ptrcasts) {
480 CallInst *PtrCastedI =
nullptr;
481 auto It = Ptrcasts.
find(
F);
482 if (It == Ptrcasts.
end()) {
483 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
484 Ptrcasts[
F] = PtrCastedI;
486 PtrCastedI = It->second;
488 I->replaceUsesOfWith(
Op, PtrCastedI);
491void SPIRVEmitIntrinsics::propagateElemType(
493 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
494 DenseMap<Function *, CallInst *> Ptrcasts;
496 for (
auto *U :
Users) {
499 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
505 TypeValidated.find(UI) != TypeValidated.end())
506 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
510void SPIRVEmitIntrinsics::propagateElemTypeRec(
512 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
513 std::unordered_set<Value *> Visited;
514 DenseMap<Function *, CallInst *> Ptrcasts;
515 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
516 std::move(Ptrcasts));
519void SPIRVEmitIntrinsics::propagateElemTypeRec(
521 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
522 std::unordered_set<Value *> &Visited,
523 DenseMap<Function *, CallInst *> Ptrcasts) {
524 if (!Visited.insert(
Op).second)
527 for (
auto *U :
Users) {
530 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
536 TypeValidated.find(UI) != TypeValidated.end())
537 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
545SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
546 bool UnknownElemTypeI8) {
547 std::unordered_set<Value *> Visited;
548 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
552Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
553 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
554 bool UnknownElemTypeI8) {
559 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
570Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
571 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
583 for (User *OpU :
Op->users()) {
585 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
598 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
607Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
608 bool UnknownElemTypeI8) {
609 std::unordered_set<Value *> Visited;
610 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
613void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
614 bool UnknownElemTypeI8) {
616 if (!UnknownElemTypeI8)
623bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
624 GetElementPtrInst &
GEP,
625 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
626 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
634 Value *Src = getPointerRoot(
GEP.getPointerOperand());
635 Type *CurType = deduceElementType(Src,
true);
644 OnDynamicIndexing(AT->getElementType(), Operand);
645 return AT ==
nullptr;
653 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
657 CurType = AT->getElementType();
658 OnLiteralIndexing(CurType, Index);
660 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
663 const auto &STL =
DL.getStructLayout(ST);
664 unsigned Element = STL->getElementContainingOffset(
Offset);
665 Offset -= STL->getElementOffset(Element);
666 CurType =
ST->getElementType(Element);
667 OnLiteralIndexing(CurType, Element);
679SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
682 B.SetInsertPoint(&
GEP);
684 std::vector<Value *> Indices;
685 Indices.push_back(ConstantInt::get(
686 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
687 walkLogicalAccessChain(
689 [&Indices, &
B](
Type *EltType, uint64_t Index) {
691 ConstantInt::get(
B.getInt64Ty(), Index,
false));
694 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
696 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
698 Indices.push_back(Index);
703 Args.push_back(
B.getInt1(
GEP.isInBounds()));
704 Args.push_back(
GEP.getOperand(0));
706 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
707 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
711Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
713 Type *CurType =
GEP->getResultElementType();
715 bool Interrupted = walkLogicalAccessChain(
716 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
719 return Interrupted ?
GEP->getResultElementType() : CurType;
722Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
723 if (
Ref->getSourceElementType() ==
724 IntegerType::getInt8Ty(CurrF->
getContext()) &&
726 return getGEPTypeLogical(
Ref);
733 Ty =
Ref->getSourceElementType();
737 Ty =
Ref->getResultElementType();
742Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
743 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
744 bool IgnoreKnownType) {
750 if (!IgnoreKnownType)
755 if (!Visited.insert(
I).second)
762 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
764 Ty = getGEPType(
Ref);
769 KnownTy =
Op->getType();
771 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
774 Ty = SPIRV::getOriginalFunctionType(*Fn);
777 Ty = deduceElementTypeByValueDeep(
779 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
783 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
785 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
787 maybeAssignPtrType(Ty,
I,
Ref->getDestTy(), UnknownElemTypeI8);
789 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
791 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
796 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
800 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
802 Type *BestTy =
nullptr;
804 DenseMap<Type *, unsigned> PhiTys;
805 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
806 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
813 if (It.first->second > MaxN) {
814 MaxN = It.first->second;
822 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
823 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
828 static StringMap<unsigned> ResTypeByArg = {
832 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
833 {
"__spirv_GenericCastToPtr_ToLocal", 0},
834 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
835 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
836 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
837 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
841 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
843 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
844 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
845 for (User *U :
II->users()) {
850 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
852 Ty = HandleType->getTypeParameter(0);
864 }
else if (
II &&
II->getIntrinsicID() ==
865 Intrinsic::spv_generic_cast_to_ptr_explicit) {
866 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
868 }
else if (Function *CalledF = CI->getCalledFunction()) {
869 std::string DemangledName =
871 if (DemangledName.length() > 0)
872 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
873 auto AsArgIt = ResTypeByArg.
find(DemangledName);
874 if (AsArgIt != ResTypeByArg.
end())
875 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
876 Visited, UnknownElemTypeI8);
883 if (Ty && !IgnoreKnownType) {
894Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
895 bool UnknownElemTypeI8) {
896 std::unordered_set<Value *> Visited;
897 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
900Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
901 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
902 bool UnknownElemTypeI8) {
911 if (!Visited.insert(U).second)
917 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
919 assert(
Op &&
"Operands should not be null.");
920 Type *OpTy =
Op->getType();
924 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
931 Change |= Ty != OpTy;
939 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
940 Type *OpTy = ArrTy->getElementType();
944 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
951 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
957 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
958 Type *OpTy = VecTy->getElementType();
962 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
969 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
979Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
980 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
982 if (!UnknownElemTypeI8)
985 return IntegerType::getInt8Ty(
I->getContext());
989 Value *PointerOperand) {
1003bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
1005 Type *&KnownElemTy,
bool &Incomplete) {
1009 std::string DemangledName =
1011 if (DemangledName.length() > 0 &&
1013 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
1014 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
1015 DemangledName,
ST.getPreferredInstructionSet());
1016 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1017 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1023 KnownElemTy = ElemTy;
1024 Ops.push_back(std::make_pair(
Op, i));
1026 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1033 case SPIRV::OpAtomicFAddEXT:
1034 case SPIRV::OpAtomicFMinEXT:
1035 case SPIRV::OpAtomicFMaxEXT:
1036 case SPIRV::OpAtomicLoad:
1037 case SPIRV::OpAtomicCompareExchangeWeak:
1038 case SPIRV::OpAtomicCompareExchange:
1039 case SPIRV::OpAtomicExchange:
1040 case SPIRV::OpAtomicIAdd:
1041 case SPIRV::OpAtomicISub:
1042 case SPIRV::OpAtomicOr:
1043 case SPIRV::OpAtomicXor:
1044 case SPIRV::OpAtomicAnd:
1045 case SPIRV::OpAtomicUMin:
1046 case SPIRV::OpAtomicUMax:
1047 case SPIRV::OpAtomicSMin:
1048 case SPIRV::OpAtomicSMax: {
1053 Incomplete = isTodoType(
Op);
1054 Ops.push_back(std::make_pair(
Op, 0));
1056 case SPIRV::OpAtomicStore: {
1065 Incomplete = isTodoType(
Op);
1066 Ops.push_back(std::make_pair(
Op, 0));
1075void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1077 Type *&KnownElemTy,
bool IsPostprocessing) {
1081 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1082 FunctionType *FTy = SPIRV::getOriginalFunctionType(*CI);
1083 bool IsNewFTy =
false, IsIncomplete =
false;
1086 Type *ArgTy = Arg->getType();
1091 if (isTodoType(Arg))
1092 IsIncomplete =
true;
1094 IsIncomplete =
true;
1097 ArgTy = FTy->getFunctionParamType(ParmIdx);
1101 Type *RetTy = FTy->getReturnType();
1108 IsIncomplete =
true;
1110 IsIncomplete =
true;
1113 if (!IsPostprocessing && IsIncomplete)
1116 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1119bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1120 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1121 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1133 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1134 for (User *U :
F->users()) {
1142 propagateElemType(CI, PrevElemTy, VisitedSubst);
1152 for (Instruction *IncompleteRetI : *IncompleteRets)
1153 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1155 }
else if (IncompleteRets) {
1158 TypeValidated.insert(
I);
1166void SPIRVEmitIntrinsics::deduceOperandElementType(
1167 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1168 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1170 Type *KnownElemTy =
nullptr;
1171 bool Incomplete =
false;
1177 Incomplete = isTodoType(
I);
1178 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1181 Ops.push_back(std::make_pair(
Op, i));
1187 Incomplete = isTodoType(
I);
1188 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1195 Incomplete = isTodoType(
I);
1196 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1200 KnownElemTy =
Ref->getSourceElementType();
1201 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1204 KnownElemTy =
I->getType();
1210 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1214 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1219 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1227 Incomplete = isTodoType(
Ref->getPointerOperand());
1228 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1236 Incomplete = isTodoType(
Ref->getPointerOperand());
1237 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1243 Incomplete = isTodoType(
I);
1244 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1247 Ops.push_back(std::make_pair(
Op, i));
1255 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1256 IsPostprocessing, KnownElemTy,
Op,
1259 Incomplete = isTodoType(CurrF);
1260 Ops.push_back(std::make_pair(
Op, 0));
1266 bool Incomplete0 = isTodoType(Op0);
1267 bool Incomplete1 = isTodoType(Op1);
1269 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1271 : GR->findDeducedElementType(Op0);
1273 KnownElemTy = ElemTy0;
1274 Incomplete = Incomplete0;
1275 Ops.push_back(std::make_pair(Op1, 1));
1276 }
else if (ElemTy1) {
1277 KnownElemTy = ElemTy1;
1278 Incomplete = Incomplete1;
1279 Ops.push_back(std::make_pair(Op0, 0));
1283 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1284 else if (HaveFunPtrs)
1285 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1290 if (!KnownElemTy ||
Ops.size() == 0)
1295 for (
auto &OpIt :
Ops) {
1299 Type *AskTy =
nullptr;
1300 CallInst *AskCI =
nullptr;
1301 if (IsPostprocessing && AskOps) {
1307 if (Ty == KnownElemTy)
1310 Type *OpTy =
Op->getType();
1311 if (
Op->hasUseList() &&
1318 else if (!IsPostprocessing)
1322 if (AssignCI ==
nullptr) {
1331 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1332 std::make_pair(
I,
Op)};
1333 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1337 CallInst *PtrCastI =
1338 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1339 if (OpIt.second == std::numeric_limits<unsigned>::max())
1342 I->setOperand(OpIt.second, PtrCastI);
1345 TypeValidated.insert(
I);
1348void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1353 if (isAssignTypeInstr(U)) {
1354 B.SetInsertPoint(U);
1355 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1356 CallInst *AssignCI =
1357 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1359 U->eraseFromParent();
1362 U->replaceUsesOfWith(Old, New);
1367 New->copyMetadata(*Old);
1371void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1372 std::queue<Instruction *> Worklist;
1376 while (!Worklist.empty()) {
1378 bool BPrepared =
false;
1381 for (
auto &
Op :
I->operands()) {
1383 if (!AggrUndef || !
Op->getType()->isAggregateType())
1390 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1391 Worklist.push(IntrUndef);
1392 I->replaceUsesOfWith(
Op, IntrUndef);
1393 AggrConsts[IntrUndef] = AggrUndef;
1394 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1399void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1400 std::queue<Instruction *> Worklist;
1404 while (!Worklist.empty()) {
1405 auto *
I = Worklist.front();
1408 bool KeepInst =
false;
1409 for (
const auto &
Op :
I->operands()) {
1411 Type *ResTy =
nullptr;
1414 ResTy = COp->getType();
1426 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1431 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1432 Args.push_back(COp->getElementAsConstant(i));
1436 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1437 :
B.SetInsertPoint(
I);
1441 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1445 AggrConsts[CI] = AggrConst;
1446 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1458 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1463 unsigned RoundingModeDeco,
1470 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1479 MDNode *SaturatedConversionNode =
1481 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1488 if (Fu->isIntrinsic()) {
1489 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1490 switch (IntrinsicId) {
1491 case Intrinsic::fptosi_sat:
1492 case Intrinsic::fptoui_sat:
1511 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1519 B.SetInsertPoint(&
Call);
1520 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1525void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1528 if (!
RM.has_value())
1530 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1531 switch (
RM.value()) {
1535 case RoundingMode::NearestTiesToEven:
1536 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1538 case RoundingMode::TowardNegative:
1539 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1541 case RoundingMode::TowardPositive:
1542 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1544 case RoundingMode::TowardZero:
1545 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1547 case RoundingMode::Dynamic:
1548 case RoundingMode::NearestTiesToAway:
1552 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1558Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1562 B.SetInsertPoint(&
I);
1565 Args.push_back(
I.getCondition());
1568 for (
auto &Case :
I.cases()) {
1569 Args.push_back(Case.getCaseValue());
1570 BBCases.
push_back(Case.getCaseSuccessor());
1573 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1574 {
I.getOperand(0)->getType()}, {
Args});
1578 I.eraseFromParent();
1581 B.SetInsertPoint(ParentBB);
1582 IndirectBrInst *BrI =
B.CreateIndirectBr(
1585 for (BasicBlock *BBCase : BBCases)
1591 if (
GEP->getNumIndices() == 0)
1594 return CI->getZExtValue() == 0;
1599Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1601 B.SetInsertPoint(&
I);
1609 if (
I.getSourceElementType() ==
1610 IntegerType::getInt8Ty(CurrF->
getContext())) {
1611 return buildLogicalAccessChainFromGEP(
I);
1616 Value *PtrOp =
I.getPointerOperand();
1617 Type *SrcElemTy =
I.getSourceElementType();
1618 Type *DeducedPointeeTy = deduceElementType(PtrOp,
true);
1621 if (ArrTy->getElementType() == SrcElemTy) {
1623 Type *FirstIdxType =
I.getOperand(1)->getType();
1624 NewIndices.
push_back(ConstantInt::get(FirstIdxType, 0));
1625 for (
Value *Idx :
I.indices())
1630 Args.push_back(
B.getInt1(
I.isInBounds()));
1631 Args.push_back(
I.getPointerOperand());
1634 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1635 replaceAllUsesWithAndErase(
B, &
I, NewI);
1643 Args.push_back(
B.getInt1(
I.isInBounds()));
1645 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1646 replaceAllUsesWithAndErase(
B, &
I, NewI);
1650Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1652 B.SetInsertPoint(&
I);
1661 I.eraseFromParent();
1667 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1668 replaceAllUsesWithAndErase(
B, &
I, NewI);
1672void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1674 Type *VTy =
V->getType();
1679 if (ElemTy != AssignedType)
1692 if (CurrentType == AssignedType)
1699 " for value " +
V->getName(),
1707void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1708 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1710 TypeValidated.insert(
I);
1713 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1714 if (PointerElemTy == ExpectedElementType ||
1720 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1722 bool FirstPtrCastOrAssignPtrType =
true;
1728 for (
auto User :
Pointer->users()) {
1731 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1732 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1733 II->getOperand(0) != Pointer)
1738 FirstPtrCastOrAssignPtrType =
false;
1739 if (
II->getOperand(1) != VMD ||
1746 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1751 if (
II->getParent() !=
I->getParent())
1754 I->setOperand(OperandToReplace,
II);
1760 if (FirstPtrCastOrAssignPtrType) {
1765 }
else if (isTodoType(Pointer)) {
1766 eraseTodoType(Pointer);
1773 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1774 std::make_pair(
I, Pointer)};
1776 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1788 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1794void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1799 replacePointerOperandWithPtrCast(
1800 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1806 Type *OpTy =
Op->getType();
1809 if (OpTy ==
Op->getType())
1810 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1811 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1816 Type *OpTy = LI->getType();
1821 Type *NewOpTy = OpTy;
1822 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1823 if (OpTy == NewOpTy)
1824 insertTodoType(Pointer);
1827 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1832 Type *OpTy =
nullptr;
1844 OpTy = GEPI->getSourceElementType();
1846 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1848 insertTodoType(Pointer);
1860 std::string DemangledName =
1864 bool HaveTypes =
false;
1882 for (User *U : CalledArg->
users()) {
1884 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1890 HaveTypes |= ElemTy !=
nullptr;
1895 if (DemangledName.empty() && !HaveTypes)
1913 Type *ExpectedType =
1915 if (!ExpectedType && !DemangledName.empty())
1916 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1917 DemangledName,
OpIdx,
I->getContext());
1918 if (!ExpectedType || ExpectedType->
isVoidTy())
1926 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1930Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1937 I.getOperand(1)->getType(),
1938 I.getOperand(2)->getType()};
1940 B.SetInsertPoint(&
I);
1942 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1943 replaceAllUsesWithAndErase(
B, &
I, NewI);
1948SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1955 B.SetInsertPoint(&
I);
1957 I.getIndexOperand()->getType()};
1958 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1959 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1960 replaceAllUsesWithAndErase(
B, &
I, NewI);
1964Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1966 B.SetInsertPoint(&
I);
1969 Value *AggregateOp =
I.getAggregateOperand();
1973 Args.push_back(AggregateOp);
1974 Args.push_back(
I.getInsertedValueOperand());
1975 for (
auto &
Op :
I.indices())
1976 Args.push_back(
B.getInt32(
Op));
1978 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1979 replaceMemInstrUses(&
I, NewI,
B);
1983Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1984 if (
I.getAggregateOperand()->getType()->isAggregateType())
1987 B.SetInsertPoint(&
I);
1989 for (
auto &
Op :
I.indices())
1990 Args.push_back(
B.getInt32(
Op));
1992 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1993 replaceAllUsesWithAndErase(
B, &
I, NewI);
1997Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1998 if (!
I.getType()->isAggregateType())
2001 B.SetInsertPoint(&
I);
2002 TrackConstants =
false;
2007 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
2008 {
I.getPointerOperand(),
B.getInt16(Flags),
2009 B.getInt8(
I.getAlign().value())});
2010 replaceMemInstrUses(&
I, NewI,
B);
2014Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
2018 B.SetInsertPoint(&
I);
2019 TrackConstants =
false;
2023 auto *PtrOp =
I.getPointerOperand();
2024 auto *NewI =
B.CreateIntrinsic(
2025 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->
getType()},
2026 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
2027 B.getInt8(
I.getAlign().value())});
2029 I.eraseFromParent();
2033Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
2034 Value *ArraySize =
nullptr;
2035 if (
I.isArrayAllocation()) {
2038 SPIRV::Extension::SPV_INTEL_variable_length_array))
2040 "array allocation: this instruction requires the following "
2041 "SPIR-V extension: SPV_INTEL_variable_length_array",
2043 ArraySize =
I.getArraySize();
2046 B.SetInsertPoint(&
I);
2047 TrackConstants =
false;
2048 Type *PtrTy =
I.getType();
2051 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
2052 {PtrTy, ArraySize->
getType()},
2053 {ArraySize,
B.getInt8(
I.getAlign().value())})
2054 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
2055 {
B.getInt8(
I.getAlign().value())});
2056 replaceAllUsesWithAndErase(
B, &
I, NewI);
2060Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2061 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2063 B.SetInsertPoint(&
I);
2065 Args.push_back(
B.getInt32(
2066 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2067 Args.push_back(
B.getInt32(
2069 Args.push_back(
B.getInt32(
2071 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2072 {
I.getPointerOperand()->getType()}, {
Args});
2073 replaceMemInstrUses(&
I, NewI,
B);
2077Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2079 B.SetInsertPoint(&
I);
2080 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2084void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2087 static const StringSet<> ArtificialGlobals{
"llvm.global.annotations",
2088 "llvm.compiler.used"};
2098 deduceElementTypeHelper(&GV,
false);
2102 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2104 InitInst->setArgOperand(1, Init);
2107 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2113bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2115 bool UnknownElemTypeI8) {
2121 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2128void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2131 static StringMap<unsigned> ResTypeWellKnown = {
2132 {
"async_work_group_copy", WellKnownTypes::Event},
2133 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2134 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2138 bool IsKnown =
false;
2143 std::string DemangledName =
2146 if (DemangledName.length() > 0)
2148 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2149 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2150 if (ResIt != ResTypeWellKnown.
end()) {
2153 switch (ResIt->second) {
2154 case WellKnownTypes::Event:
2161 switch (DecorationId) {
2164 case FPDecorationId::SAT:
2167 case FPDecorationId::RTE:
2169 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2171 case FPDecorationId::RTZ:
2173 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2175 case FPDecorationId::RTP:
2177 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2179 case FPDecorationId::RTN:
2181 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2187 Type *Ty =
I->getType();
2190 Type *TypeToAssign = Ty;
2192 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2193 II->getIntrinsicID() == Intrinsic::spv_undef) {
2194 auto It = AggrConstTypes.find(
II);
2195 if (It == AggrConstTypes.end())
2197 TypeToAssign = It->second;
2203 for (
const auto &
Op :
I->operands()) {
2210 Type *OpTy =
Op->getType();
2212 CallInst *AssignCI =
2217 Type *OpTy =
Op->getType();
2232 CallInst *AssignCI =
2242bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2243 Instruction *Inst) {
2245 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2256 case Intrinsic::spv_load:
2257 case Intrinsic::spv_store:
2264 const std::string
Prefix =
"__spirv_Atomic";
2265 const bool IsAtomic =
Name.find(Prefix) == 0;
2273void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2275 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2277 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2282 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2283 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2284 if (shouldTryToAddMemAliasingDecoration(
I)) {
2285 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2286 ? SPIRV::Decoration::AliasScopeINTEL
2287 : SPIRV::Decoration::NoAliasINTEL;
2289 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2292 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2293 {
I->getType()}, {
Args});
2297 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2298 processMemAliasingDecoration(LLVMContext::MD_noalias);
2301 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2303 bool AllowFPMaxError =
2305 if (!AllowFPMaxError)
2309 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2318 &FPFastMathDefaultInfoMap,
2320 auto it = FPFastMathDefaultInfoMap.
find(
F);
2321 if (it != FPFastMathDefaultInfoMap.
end())
2329 SPIRV::FPFastMathMode::None);
2331 SPIRV::FPFastMathMode::None);
2333 SPIRV::FPFastMathMode::None);
2334 return FPFastMathDefaultInfoMap[
F] = std::move(FPFastMathDefaultInfoVec);
2340 size_t BitWidth = Ty->getScalarSizeInBits();
2344 assert(Index >= 0 && Index < 3 &&
2345 "Expected FPFastMathDefaultInfo for half, float, or double");
2346 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2347 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2348 return FPFastMathDefaultInfoVec[Index];
2351void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(
Module &M) {
2353 if (!
ST->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2362 auto Node =
M.getNamedMetadata(
"spirv.ExecutionMode");
2364 if (!
M.getNamedMetadata(
"opencl.enable.FP_CONTRACT")) {
2372 ConstantInt::get(Type::getInt32Ty(
M.getContext()), 0);
2375 [[maybe_unused]] GlobalVariable *GV =
2376 new GlobalVariable(M,
2377 Type::getInt32Ty(
M.getContext()),
2391 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2392 FPFastMathDefaultInfoMap;
2394 for (
unsigned i = 0; i <
Node->getNumOperands(); i++) {
2403 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2405 "Expected 4 operands for FPFastMathDefault");
2411 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2413 SPIRV::FPFastMathDefaultInfo &
Info =
2416 Info.FPFastMathDefault =
true;
2417 }
else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2419 "Expected no operands for ContractionOff");
2423 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2425 for (SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2426 Info.ContractionOff =
true;
2428 }
else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2430 "Expected 1 operand for SignedZeroInfNanPreserve");
2431 unsigned TargetWidth =
2436 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2440 assert(Index >= 0 && Index < 3 &&
2441 "Expected FPFastMathDefaultInfo for half, float, or double");
2442 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2443 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2444 FPFastMathDefaultInfoVec[
Index].SignedZeroInfNanPreserve =
true;
2448 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2449 for (
auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2450 if (FPFastMathDefaultInfoVec.
empty())
2453 for (
const SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2454 assert(
Info.Ty &&
"Expected target type for FPFastMathDefaultInfo");
2457 if (Flags == SPIRV::FPFastMathMode::None && !
Info.ContractionOff &&
2458 !
Info.SignedZeroInfNanPreserve && !
Info.FPFastMathDefault)
2462 if (
Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2464 "and AllowContract");
2466 if (
Info.SignedZeroInfNanPreserve &&
2468 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2469 SPIRV::FPFastMathMode::NSZ))) {
2470 if (
Info.FPFastMathDefault)
2472 "SignedZeroInfNanPreserve but at least one of "
2473 "NotNaN/NotInf/NSZ is enabled.");
2476 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2477 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2478 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2480 "AllowTransform requires AllowReassoc and "
2481 "AllowContract to be set.");
2484 auto it = GlobalVars.find(Flags);
2485 GlobalVariable *GV =
nullptr;
2486 if (it != GlobalVars.end()) {
2492 ConstantInt::get(Type::getInt32Ty(
M.getContext()), Flags);
2495 GV =
new GlobalVariable(M,
2496 Type::getInt32Ty(
M.getContext()),
2501 GlobalVars[
Flags] = GV;
2507void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2510 bool IsConstComposite =
2511 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2512 if (IsConstComposite && TrackConstants) {
2514 auto t = AggrConsts.find(
I);
2515 assert(t != AggrConsts.end());
2518 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2520 NewOp->setArgOperand(0,
I);
2523 for (
const auto &
Op :
I->operands()) {
2527 unsigned OpNo =
Op.getOperandNo();
2528 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2529 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2533 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2534 :
B.SetInsertPoint(
I);
2537 Type *OpTy =
Op->getType();
2545 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2547 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2548 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2550 SmallVector<Value *, 2>
Args = {
2553 CallInst *PtrCasted =
2554 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2559 I->setOperand(OpNo, NewOp);
2561 if (Named.insert(
I).second)
2565Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2567 std::unordered_set<Function *> FVisited;
2568 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2571Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2572 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2574 if (!FVisited.insert(
F).second)
2577 std::unordered_set<Value *> Visited;
2580 for (User *U :
F->users()) {
2592 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2595 for (User *OpU : OpArg->
users()) {
2597 if (!Inst || Inst == CI)
2600 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2607 if (FVisited.find(OuterF) != FVisited.end())
2609 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2610 if (OuterF->
getArg(i) == OpArg) {
2611 Lookup.push_back(std::make_pair(OuterF, i));
2618 for (
auto &Pair :
Lookup) {
2619 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2626void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2628 B.SetInsertPointPastAllocas(
F);
2642 for (User *U :
F->users()) {
2658 for (User *U : Arg->
users()) {
2662 CI->
getParent()->getParent() == CurrF) {
2664 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2675void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2676 B.SetInsertPointPastAllocas(
F);
2682 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2684 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2686 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2698 bool IsNewFTy =
false;
2714bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2717 if (
F.isIntrinsic())
2719 if (
F.isDeclaration()) {
2720 for (User *U :
F.users()) {
2733 for (User *U :
F.users()) {
2735 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2737 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2738 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2745 if (Worklist.
empty())
2751 "cannot allocate a name for the internal service function");
2752 LLVMContext &Ctx =
M.getContext();
2760 for (Function *
F : Worklist) {
2762 for (
const auto &Arg :
F->args())
2764 IRB.CreateCall(
F, Args);
2766 IRB.CreateRetVoid();
2772void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2773 DenseMap<Function *, CallInst *> Ptrcasts;
2774 for (
auto It : FDeclPtrTys) {
2776 for (
auto *U :
F->users()) {
2781 for (
auto [Idx, ElemTy] : It.second) {
2789 B.SetInsertPointPastAllocas(Arg->
getParent());
2794 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2803 .getFirstNonPHIOrDbgOrAlloca());
2824SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2831 Type *SrcTy =
GEP->getSourceElementType();
2832 SmallVector<Value *, 8> Indices(
GEP->indices());
2834 if (ArrTy && ArrTy->getNumElements() == 0 &&
2836 Indices.erase(Indices.begin());
2837 SrcTy = ArrTy->getElementType();
2839 GEP->getNoWrapFlags(),
"",
2840 GEP->getIterator());
2845bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2846 if (
Func.isDeclaration())
2850 GR =
ST.getSPIRVGlobalRegistry();
2854 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2859 AggrConstTypes.clear();
2864 SmallPtrSet<Instruction *, 4> DeadInsts;
2867 if (!
Ref || GR->findDeducedElementType(
Ref))
2870 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2872 Ref->replaceAllUsesWith(NewGEP);
2876 if (
Type *GepTy = getGEPType(
Ref))
2880 for (
auto *
I : DeadInsts) {
2881 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2882 I->eraseFromParent();
2885 processParamTypesByFunHeader(CurrF,
B);
2894 Type *ElTy =
SI->getValueOperand()->getType();
2899 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2900 for (
auto &GV :
Func.getParent()->globals())
2901 processGlobalValue(GV,
B);
2903 preprocessUndefs(
B);
2904 preprocessCompositeConstants(
B);
2908 applyDemangledPtrArgTypes(
B);
2911 for (
auto &
I : Worklist) {
2913 if (isConvergenceIntrinsic(
I))
2916 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2918 insertAssignTypeIntrs(
I,
B);
2919 insertPtrCastOrAssignTypeInstr(
I,
B);
2923 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2924 insertAssignPtrTypeIntrs(
I,
B,
true);
2927 useRoundingMode(FPI,
B);
2932 SmallPtrSet<Instruction *, 4> IncompleteRets;
2934 deduceOperandElementType(&
I, &IncompleteRets);
2938 for (BasicBlock &BB : Func)
2939 for (PHINode &Phi : BB.
phis())
2941 deduceOperandElementType(&Phi,
nullptr);
2943 for (
auto *
I : Worklist) {
2944 TrackConstants =
true;
2954 if (isConvergenceIntrinsic(
I))
2958 processInstrAfterVisit(
I,
B);
2965bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2966 if (!GR || TodoTypeSz == 0)
2969 unsigned SzTodo = TodoTypeSz;
2970 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2975 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2976 Type *KnownTy = GR->findDeducedElementType(
Op);
2977 if (!KnownTy || !AssignCI)
2983 std::unordered_set<Value *> Visited;
2984 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2985 if (ElemTy != KnownTy) {
2986 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2987 propagateElemType(CI, ElemTy, VisitedSubst);
2994 if (
Op->hasUseList()) {
2995 for (User *U :
Op->users()) {
3002 if (TodoTypeSz == 0)
3007 SmallPtrSet<Instruction *, 4> IncompleteRets;
3009 auto It = ToProcess.
find(&
I);
3010 if (It == ToProcess.
end())
3012 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
3013 if (It->second.size() == 0)
3015 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
3016 if (TodoTypeSz == 0)
3021 return SzTodo > TodoTypeSz;
3025void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
3027 if (!
F.isDeclaration() ||
F.isIntrinsic())
3031 if (DemangledName.empty())
3035 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
3036 DemangledName,
ST.getPreferredInstructionSet());
3037 if (Opcode != SPIRV::OpGroupAsyncCopy)
3040 SmallVector<unsigned> Idxs;
3049 LLVMContext &Ctx =
F.getContext();
3051 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
3052 if (!TypeStrs.
size())
3055 for (
unsigned Idx : Idxs) {
3056 if (Idx >= TypeStrs.
size())
3059 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3062 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
3067bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
3070 parseFunDeclarations(M);
3071 insertConstantsForFPFastMathDefault(M);
3081 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
3083 processParamTypes(&
F,
B);
3087 CanTodoType =
false;
3088 Changed |= postprocessTypes(M);
3091 Changed |= processFunctionPointers(M);
3097 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static bool isFirstIndexZero(const GetElementPtrInst *GEP)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
StringSet - A set-like wrapper for the StringMap.
static SymbolRef::Type getType(const Symbol *Sym)
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
bool contains(StringRef key) const
Check if the set contains the given key.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
void setOperand(unsigned i, Value *Val)
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
auto enumerate(FirstRange &&First, RestRanges &&...Rest)
Given two or more input ranges, returns a new range whose values are tuples (A, B,...
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
@ Enabled
Convert any .debug_str_offsets tables to DWARF64 if needed.
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
Type * reconstitutePeeledArrayType(Type *Ty)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)