23#include "llvm/IR/IntrinsicsSPIRV.h"
53class SPIRVEmitIntrinsics
55 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
59 bool TrackConstants =
true;
70 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited);
71 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
72 std::unordered_set<Value *> &Visited);
74 std::unordered_set<Value *> &Visited);
77 Type *deduceNestedTypeHelper(
User *U);
79 std::unordered_set<Value *> &Visited);
96 for (
auto *Imm : Imms)
98 return B.CreateIntrinsic(IntrID, {
Types},
Args);
105 void insertAssignTypeInstrForTargetExtTypes(
TargetExtType *AssignedType,
108 Type *ExpectedElementType,
109 unsigned OperandToReplace,
114 Type *deduceFunParamElementType(
Function *F,
unsigned OpIdx);
115 Type *deduceFunParamElementType(
Function *F,
unsigned OpIdx,
116 std::unordered_set<Function *> &FVisited);
151char SPIRVEmitIntrinsics::ID = 0;
157 return isa<IntrinsicInst>(
I) &&
158 cast<IntrinsicInst>(
I)->getIntrinsicID() == Intrinsic::spv_assign_type;
162 return isa<StoreInst>(
I) || isa<LoadInst>(
I) || isa<InsertValueInst>(
I) ||
163 isa<ExtractValueInst>(
I) || isa<AtomicCmpXchgInst>(
I);
167 return isa<ConstantAggregate>(V) || isa<ConstantDataArray>(V) ||
168 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy());
173 B.SetInsertPoint(
I->getParent(),
I->getParent()->getFirstInsertionPt());
181 switch (
Intr->getIntrinsicID()) {
182 case Intrinsic::invariant_start:
183 case Intrinsic::invariant_end:
191 if (
I->getType()->isTokenTy())
193 "does not support token type",
199Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
200 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited) {
203 if (
auto *PtrTy = dyn_cast<PointerType>(Ty)) {
204 if (
Type *NestedTy = deduceElementTypeHelper(Operand, Visited))
207 Ty = deduceNestedTypeHelper(dyn_cast<User>(Operand), Ty, Visited);
214Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
215 Value *
Op, std::unordered_set<Value *> &Visited) {
219 if (
auto PType = dyn_cast<TypedPointerType>(
Op->getType()))
220 return PType->getElementType();
226 for (
User *OpU :
Op->users()) {
227 if (
Instruction *Inst = dyn_cast<Instruction>(OpU)) {
228 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited))
237Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I) {
238 std::unordered_set<Value *> Visited;
239 return deduceElementTypeHelper(
I, Visited);
242Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
243 Value *
I, std::unordered_set<Value *> &Visited) {
253 if (Visited.find(
I) != Visited.end())
260 if (
auto *
Ref = dyn_cast<AllocaInst>(
I)) {
261 Ty =
Ref->getAllocatedType();
262 }
else if (
auto *
Ref = dyn_cast<GetElementPtrInst>(
I)) {
263 Ty =
Ref->getResultElementType();
264 }
else if (
auto *
Ref = dyn_cast<GlobalValue>(
I)) {
265 Ty = deduceElementTypeByValueDeep(
267 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited);
268 }
else if (
auto *
Ref = dyn_cast<AddrSpaceCastInst>(
I)) {
269 Ty = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited);
270 }
else if (
auto *
Ref = dyn_cast<BitCastInst>(
I)) {
271 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
273 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited);
274 }
else if (
auto *
Ref = dyn_cast<AtomicCmpXchgInst>(
I)) {
276 Ty = deduceElementTypeByValueDeep(
Op->getType(),
Op, Visited);
277 }
else if (
auto *
Ref = dyn_cast<AtomicRMWInst>(
I)) {
279 Ty = deduceElementTypeByValueDeep(
Op->getType(),
Op, Visited);
280 }
else if (
auto *
Ref = dyn_cast<PHINode>(
I)) {
281 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
282 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited);
286 }
else if (
auto *
Ref = dyn_cast<SelectInst>(
I)) {
287 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
288 Ty = deduceElementTypeByUsersDeep(
Op, Visited);
306Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
User *U) {
307 std::unordered_set<Value *> Visited;
308 return deduceNestedTypeHelper(U,
U->getType(), Visited);
311Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
312 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited) {
321 if (Visited.find(U) != Visited.end())
325 if (dyn_cast<StructType>(OrigTy)) {
328 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
330 Type *OpTy =
Op->getType();
333 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
334 if (
Type *NestedTy = deduceElementTypeHelper(
Op, Visited))
337 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited);
341 Change |= Ty != OpTy;
348 }
else if (
auto *ArrTy = dyn_cast<ArrayType>(OrigTy)) {
349 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
350 Type *OpTy = ArrTy->getElementType();
352 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
353 if (
Type *NestedTy = deduceElementTypeHelper(
Op, Visited))
356 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited);
359 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
364 }
else if (
auto *VecTy = dyn_cast<VectorType>(OrigTy)) {
365 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
366 Type *OpTy = VecTy->getElementType();
368 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
369 if (
Type *NestedTy = deduceElementTypeHelper(
Op, Visited))
372 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited);
375 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
385Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I) {
386 if (
Type *Ty = deduceElementTypeHelper(
I))
388 return IntegerType::getInt8Ty(
I->getContext());
395void SPIRVEmitIntrinsics::deduceOperandElementType(
Instruction *
I) {
397 Type *KnownElemTy =
nullptr;
399 if (
auto *
Ref = dyn_cast<PHINode>(
I)) {
403 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
408 }
else if (
auto *
Ref = dyn_cast<SelectInst>(
I)) {
412 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
417 }
else if (
auto *
Ref = dyn_cast<ReturnInst>(
I)) {
434 }
else if (
auto *
Ref = dyn_cast<ICmpInst>(
I)) {
442 KnownElemTy = ElemTy0;
444 }
else if (ElemTy1) {
445 KnownElemTy = ElemTy1;
451 if (!KnownElemTy || Ops.
size() == 0)
456 for (
auto &OpIt : Ops) {
461 if (Ty == KnownElemTy)
468 Type *OpTy =
Op->getType();
472 auto It = AssignPtrTypeInstr.
find(
Op);
473 if (It == AssignPtrTypeInstr.
end()) {
475 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal,
Op,
477 AssignPtrTypeInstr[
Op] = CI;
491 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
492 I->setOperand(OpIt.second, PtrCastI);
497void SPIRVEmitIntrinsics::replaceMemInstrUses(
Instruction *Old,
502 if (isAssignTypeInstr(U)) {
505 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
506 U->eraseFromParent();
509 U->replaceUsesOfWith(Old, New);
517void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
518 std::queue<Instruction *> Worklist;
522 while (!Worklist.empty()) {
526 for (
auto &
Op :
I->operands()) {
527 auto *AggrUndef = dyn_cast<UndefValue>(
Op);
528 if (!AggrUndef || !
Op->getType()->isAggregateType())
532 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
533 Worklist.push(IntrUndef);
534 I->replaceUsesOfWith(
Op, IntrUndef);
535 AggrConsts[IntrUndef] = AggrUndef;
536 AggrConstTypes[IntrUndef] = AggrUndef->getType();
541void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
542 std::queue<Instruction *> Worklist;
546 while (!Worklist.empty()) {
547 auto *
I = Worklist.front();
549 bool KeepInst =
false;
550 for (
const auto &
Op :
I->operands()) {
551 auto BuildCompositeIntrinsic =
554 bool &KeepInst, SPIRVEmitIntrinsics &SEI) {
557 B.CreateIntrinsic(Intrinsic::spv_const_composite, {}, {
Args});
559 I->replaceUsesOfWith(
Op, CCI);
561 SEI.AggrConsts[CCI] = AggrC;
562 SEI.AggrConstTypes[CCI] = SEI.deduceNestedTypeHelper(AggrC);
565 if (
auto *AggrC = dyn_cast<ConstantAggregate>(
Op)) {
567 BuildCompositeIntrinsic(AggrC, Args,
Op,
I,
B, Worklist, KeepInst,
569 }
else if (
auto *AggrC = dyn_cast<ConstantDataArray>(
Op)) {
571 for (
unsigned i = 0; i < AggrC->getNumElements(); ++i)
572 Args.push_back(AggrC->getElementAsConstant(i));
573 BuildCompositeIntrinsic(AggrC, Args,
Op,
I,
B, Worklist, KeepInst,
575 }
else if (isa<ConstantAggregateZero>(
Op) &&
576 !
Op->getType()->isVectorTy()) {
577 auto *AggrC = cast<ConstantAggregateZero>(
Op);
579 BuildCompositeIntrinsic(AggrC, Args,
Op,
I,
B, Worklist, KeepInst,
591 B.SetInsertPoint(&
I);
594 for (
auto &
Op :
I.operands()) {
595 if (
Op.get()->getType()->isSized()) {
597 }
else if (
BasicBlock *BB = dyn_cast<BasicBlock>(
Op.get())) {
604 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
608 I.replaceAllUsesWith(NewI);
612 B.SetInsertPoint(ParentBB);
623 B.SetInsertPoint(&
I);
626 Args.push_back(
B.getInt1(
I.isInBounds()));
627 for (
auto &
Op :
I.operands())
629 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
630 I.replaceAllUsesWith(NewI);
637 B.SetInsertPoint(&
I);
645 I.replaceAllUsesWith(Source);
652 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
653 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
654 I.replaceAllUsesWith(NewI);
660void SPIRVEmitIntrinsics::insertAssignTypeInstrForTargetExtTypes(
663 if (
V->getType() == AssignedType)
669 for (
auto User :
V->users()) {
670 auto *II = dyn_cast<IntrinsicInst>(
User);
671 if (!II || II->getIntrinsicID() != Intrinsic::spv_assign_type)
676 dyn_cast<ConstantAsMetadata>(VMD->
getMetadata())->getType();
677 if (BuiltinType != AssignedType)
680 " for value " +
V->getName(),
686 buildIntrWithMD(Intrinsic::spv_assign_type, {
V->getType()},
Const,
V, {},
B);
689void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
694 while (
BitCastInst *BC = dyn_cast<BitCastInst>(Pointer))
698 Type *PointerElemTy = deduceElementTypeHelper(Pointer);
699 if (PointerElemTy == ExpectedElementType)
703 Constant *ExpectedElementTypeConst =
710 bool FirstPtrCastOrAssignPtrType =
true;
716 auto *II = dyn_cast<IntrinsicInst>(
User);
718 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
719 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
720 II->getOperand(0) != Pointer)
725 FirstPtrCastOrAssignPtrType =
false;
726 if (II->getOperand(1) != VMD ||
727 dyn_cast<ConstantInt>(II->getOperand(2))->getSExtValue() !=
733 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast)
738 if (II->getParent() !=
I->getParent())
741 I->setOperand(OperandToReplace, II);
752 if (FirstPtrCastOrAssignPtrType &&
753 (isa<Instruction>(Pointer) || isa<Argument>(Pointer))) {
755 Intrinsic::spv_assign_ptr_type, {
Pointer->getType()},
759 AssignPtrTypeInstr[
Pointer] = CI;
766 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
767 I->setOperand(OperandToReplace, PtrCastI);
770void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(
Instruction *
I,
776 isa<Argument>(
SI->getValueOperand())) {
777 return replacePointerOperandWithPtrCast(
778 I,
SI->getValueOperand(), IntegerType::getInt8Ty(
F->getContext()), 0,
781 return replacePointerOperandWithPtrCast(
782 I,
SI->getPointerOperand(),
SI->getValueOperand()->getType(), 1,
B);
783 }
else if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
784 return replacePointerOperandWithPtrCast(
I, LI->getPointerOperand(),
785 LI->getType(), 0,
B);
787 return replacePointerOperandWithPtrCast(
I, GEPI->getPointerOperand(),
788 GEPI->getSourceElementType(), 0,
B);
800 bool HaveTypes =
false;
801 for (
unsigned OpIdx = 0; OpIdx < CalledF->
arg_size(); ++OpIdx) {
807 CalledArgTys.
push_back(cast<TypedPointerType>(ArgType)->getElementType());
815 if (
Instruction *Inst = dyn_cast<Instruction>(U)) {
816 if ((ElemTy = deduceElementTypeHelper(Inst)) !=
nullptr)
821 HaveTypes |= ElemTy !=
nullptr;
826 std::string DemangledName =
828 if (DemangledName.empty() && !HaveTypes)
831 for (
unsigned OpIdx = 0; OpIdx < CI->
arg_size(); OpIdx++) {
833 if (!isa<PointerType>(ArgOperand->
getType()) &&
834 !isa<TypedPointerType>(ArgOperand->
getType()))
838 if (!isa<Instruction>(ArgOperand) && !isa<Argument>(ArgOperand))
842 OpIdx < CalledArgTys.
size() ? CalledArgTys[OpIdx] :
nullptr;
843 if (!ExpectedType && !DemangledName.empty())
845 DemangledName, OpIdx,
I->getContext());
850 insertAssignTypeInstrForTargetExtTypes(cast<TargetExtType>(ExpectedType),
853 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx,
B);
859 I.getOperand(1)->getType(),
860 I.getOperand(2)->getType()};
862 B.SetInsertPoint(&
I);
864 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
865 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
866 I.replaceAllUsesWith(NewI);
875 B.SetInsertPoint(&
I);
877 I.getIndexOperand()->getType()};
879 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
880 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
881 I.replaceAllUsesWith(NewI);
889 B.SetInsertPoint(&
I);
892 for (
auto &
Op :
I.operands())
893 if (isa<UndefValue>(
Op))
897 for (
auto &
Op :
I.indices())
898 Args.push_back(
B.getInt32(
Op));
900 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
901 replaceMemInstrUses(&
I, NewI,
B);
907 B.SetInsertPoint(&
I);
909 for (
auto &
Op :
I.operands())
911 for (
auto &
Op :
I.indices())
912 Args.push_back(
B.getInt32(
Op));
914 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
915 I.replaceAllUsesWith(NewI);
921 if (!
I.getType()->isAggregateType())
924 B.SetInsertPoint(&
I);
925 TrackConstants =
false;
926 const auto *TLI =
TM->getSubtargetImpl()->getTargetLowering();
928 TLI->getLoadMemOperandFlags(
I,
F->getParent()->getDataLayout());
930 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->
getType()},
931 {
I.getPointerOperand(),
B.getInt16(Flags),
932 B.getInt8(
I.getAlign().value())});
933 replaceMemInstrUses(&
I, NewI,
B);
941 B.SetInsertPoint(&
I);
942 TrackConstants =
false;
943 const auto *TLI =
TM->getSubtargetImpl()->getTargetLowering();
945 TLI->getStoreMemOperandFlags(
I,
F->getParent()->getDataLayout());
946 auto *PtrOp =
I.getPointerOperand();
947 auto *NewI =
B.CreateIntrinsic(
948 Intrinsic::spv_store, {
I.getValueOperand()->
getType(), PtrOp->getType()},
949 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
950 B.getInt8(
I.getAlign().value())});
956 Value *ArraySize =
nullptr;
957 if (
I.isArrayAllocation()) {
960 SPIRV::Extension::SPV_INTEL_variable_length_array))
962 "array allocation: this instruction requires the following "
963 "SPIR-V extension: SPV_INTEL_variable_length_array",
965 ArraySize =
I.getArraySize();
968 B.SetInsertPoint(&
I);
969 TrackConstants =
false;
970 Type *PtrTy =
I.getType();
972 ArraySize ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
973 {PtrTy, ArraySize->
getType()}, {ArraySize})
974 :
B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy}, {});
975 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
976 I.replaceAllUsesWith(NewI);
983 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
985 B.SetInsertPoint(&
I);
987 for (
auto &
Op :
I.operands())
989 Args.push_back(
B.getInt32(
I.getSyncScopeID()));
990 Args.push_back(
B.getInt32(
992 Args.push_back(
B.getInt32(
994 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
996 replaceMemInstrUses(&
I, NewI,
B);
1002 B.SetInsertPoint(&
I);
1003 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1010 if (GV.
getName() ==
"llvm.global.annotations")
1016 deduceElementTypeHelper(&GV);
1020 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
1022 InitInst->setArgOperand(1,
Init);
1026 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
1029void SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(
Instruction *
I,
1033 isa<BitCastInst>(
I))
1038 Type *ElemTy = deduceElementType(
I);
1041 CallInst *CI = buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {
I->getType()},
1044 AssignPtrTypeInstr[
I] = CI;
1047void SPIRVEmitIntrinsics::insertAssignTypeIntrs(
Instruction *
I,
1050 Type *Ty =
I->getType();
1053 Type *TypeToAssign = Ty;
1054 if (
auto *II = dyn_cast<IntrinsicInst>(
I)) {
1055 if (II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1056 II->getIntrinsicID() == Intrinsic::spv_undef) {
1057 auto It = AggrConstTypes.
find(II);
1058 if (It == AggrConstTypes.
end())
1060 TypeToAssign = It->second;
1064 buildIntrWithMD(Intrinsic::spv_assign_type, {Ty},
Const,
I, {},
B);
1066 for (
const auto &
Op :
I->operands()) {
1067 if (isa<ConstantPointerNull>(
Op) || isa<UndefValue>(
Op) ||
1069 (isa<ConstantExpr>(
Op) && isa<GEPOperator>(
Op))) {
1071 if (isa<UndefValue>(
Op) &&
Op->getType()->isAggregateType())
1072 buildIntrWithMD(Intrinsic::spv_assign_type, {
B.getInt32Ty()},
Op,
1074 else if (!isa<Instruction>(
Op))
1075 buildIntrWithMD(Intrinsic::spv_assign_type, {
Op->getType()},
Op,
Op, {},
1081void SPIRVEmitIntrinsics::processInstrAfterVisit(
Instruction *
I,
1083 auto *II = dyn_cast<IntrinsicInst>(
I);
1084 if (II && II->getIntrinsicID() == Intrinsic::spv_const_composite &&
1086 B.SetInsertPoint(
I->getNextNode());
1087 Type *Ty =
B.getInt32Ty();
1088 auto t = AggrConsts.
find(
I);
1090 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant, {Ty, Ty},
1091 t->second,
I, {},
B);
1092 I->replaceAllUsesWith(NewOp);
1093 NewOp->setArgOperand(0,
I);
1095 for (
const auto &
Op :
I->operands()) {
1096 if ((isa<ConstantAggregateZero>(
Op) &&
Op->getType()->isVectorTy()) ||
1097 isa<PHINode>(
I) || isa<SwitchInst>(
I))
1098 TrackConstants =
false;
1099 if ((isa<ConstantData>(
Op) || isa<ConstantExpr>(
Op)) && TrackConstants) {
1100 unsigned OpNo =
Op.getOperandNo();
1101 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
1102 (II->paramHasAttr(OpNo, Attribute::ImmArg))))
1104 B.SetInsertPoint(
I);
1106 buildIntrWithMD(Intrinsic::spv_track_constant,
1107 {
Op->getType(),
Op->getType()},
Op,
Op, {},
B);
1108 I->setOperand(OpNo, NewOp);
1114 std::vector<Value *>
Args = {
I};
1116 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()},
Args);
1120Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
Function *
F,
1122 std::unordered_set<Function *> FVisited;
1123 return deduceFunParamElementType(
F, OpIdx, FVisited);
1126Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
1127 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
1129 if (FVisited.find(
F) != FVisited.end())
1133 std::unordered_set<Value *> Visited;
1136 for (
User *U :
F->users()) {
1137 CallInst *CI = dyn_cast<CallInst>(U);
1138 if (!CI || OpIdx >= CI->
arg_size())
1148 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited))
1153 if (!Inst || Inst == CI)
1156 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited))
1163 if (FVisited.find(OuterF) != FVisited.end())
1165 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
1166 if (OuterF->
getArg(i) == OpArg) {
1167 Lookup.push_back(std::make_pair(OuterF, i));
1174 for (
auto &Pair :
Lookup) {
1175 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
1183 B.SetInsertPointPastAllocas(
F);
1184 for (
unsigned OpIdx = 0; OpIdx <
F->arg_size(); ++OpIdx) {
1194 }
else if ((ElemTy = deduceFunParamElementType(
F, OpIdx)) !=
nullptr) {
1195 CallInst *AssignPtrTyCI = buildIntrWithMD(
1196 Intrinsic::spv_assign_ptr_type, {Arg->
getType()},
1201 AssignPtrTypeInstr[Arg] = AssignPtrTyCI;
1207bool SPIRVEmitIntrinsics::runOnFunction(
Function &Func) {
1208 if (
Func.isDeclaration())
1212 GR =
ST.getSPIRVGlobalRegistry();
1217 AggrConstTypes.
clear();
1226 Type *ElTy =
SI->getValueOperand()->getType();
1231 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
1232 for (
auto &GV :
Func.getParent()->globals())
1233 processGlobalValue(GV,
B);
1235 preprocessUndefs(
B);
1236 preprocessCompositeConstants(
B);
1241 for (
auto &
I : Worklist) {
1242 insertAssignPtrTypeIntrs(
I,
B);
1243 insertAssignTypeIntrs(
I,
B);
1244 insertPtrCastOrAssignTypeInstr(
I,
B);
1248 deduceOperandElementType(&
I);
1250 for (
auto *
I : Worklist) {
1251 TrackConstants =
true;
1252 if (!
I->getType()->isVoidTy() || isa<StoreInst>(
I))
1253 B.SetInsertPoint(
I->getNextNode());
1259 processInstrAfterVisit(
I,
B);
1265bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
1266 bool Changed =
false;
1274 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
1276 GR =
ST.getSPIRVGlobalRegistry();
1278 processParamTypes(&
F,
B);
1286 return new SPIRVEmitIntrinsics(
TM);
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool runOnFunction(Function &F, bool PostInlining)
const char LLVMTargetMachineRef TM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrToReplace(const Value *V)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static bool requireAssignType(Instruction *I)
static SymbolRef::Type getType(const Symbol *Sym)
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
an instruction to allocate memory on the stack
Represent the analysis usage information of a pass.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
An instruction that atomically checks whether a specified value is in a memory location,...
LLVM Basic Block Representation.
const Function * getParent() const
Return the enclosing method, or null if none.
LLVMContext & getContext() const
Get the context in which this basic block lives.
This class represents a no-op cast from one type to another.
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
Implements a dense probed hash-table based set.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
Argument * getArg(unsigned i) const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
PointerType * getType() const
Global values are always pointers.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
bool hasInitializer() const
Definitions have initializers, declarations don't.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Indirect Branch Instruction.
void addDestination(BasicBlock *Dest)
Add a destination.
This instruction inserts a single (scalar) element into a VectorType value.
This instruction inserts a struct field of array element value into an aggregate value.
Base class for instruction visitors.
RetTy visitExtractElementInst(ExtractElementInst &I)
RetTy visitInsertValueInst(InsertValueInst &I)
RetTy visitUnreachableInst(UnreachableInst &I)
RetTy visitAtomicCmpXchgInst(AtomicCmpXchgInst &I)
RetTy visitBitCastInst(BitCastInst &I)
RetTy visitSwitchInst(SwitchInst &I)
RetTy visitExtractValueInst(ExtractValueInst &I)
RetTy visitStoreInst(StoreInst &I)
RetTy visitInsertElementInst(InsertElementInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitGetElementPtrInst(GetElementPtrInst &I)
void visitInstruction(Instruction &I)
RetTy visitLoadInst(LoadInst &I)
const BasicBlock * getParent() const
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
virtual bool runOnModule(Module &M)=0
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
A Module instance is used to store all the information related to an LLVM module.
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Type * findDeducedCompositeType(const Value *Val)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
void addDeducedCompositeType(Value *Val, Type *Ty)
Type * findDeducedElementType(const Value *Val)
bool canUseExtension(SPIRV::Extension::Extension E) const
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
static StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
Class to represent target extensions types, which are generally unintrospectable from target-independ...
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
StringRef getTargetExtName() const
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
bool isVoidTy() const
Return true if this is 'void'.
A few GPU targets, such as DXIL and SPIR-V, have typed pointers.
static TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
unsigned getNumUses() const
This method computes the number of uses of this Value.
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ SPIR_KERNEL
Used for SPIR kernel functions.
Type * parseBuiltinCallArgumentBaseType(const StringRef DemangledCall, unsigned ArgIdx, LLVMContext &Ctx)
Parses the provided ArgIdx argument base type in the DemangledCall skeleton.
NodeAddr< FuncNode * > Func
This is an optimization pass for GlobalISel generic memory operations.
void initializeSPIRVEmitIntrinsicsPass(PassRegistry &)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
bool isTypedPointerTy(const Type *T)
bool isPointerTy(const Type *T)
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
@ Ref
The access may reference the value stored in memory.
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
void addStringImm(const StringRef &Str, MCInst &Inst)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)