53#include "llvm/IR/IntrinsicsSPIRV.h"
67 {Arg->
getType()}, OfType, Arg, {},
B);
68 GR->addAssignPtrTypeInstr(Arg, AssignCI);
76 FixedVectorType *TargetType,
Value *Source) {
78 LoadInst *NewLoad =
B.CreateLoad(SourceType, Source);
79 buildAssignType(
B, SourceType, NewLoad);
80 Value *AssignValue = NewLoad;
82 AssignValue =
B.CreateIntrinsic(Intrinsic::spv_bitcast,
83 {TargetType, SourceType}, {NewLoad});
84 buildAssignType(
B, TargetType, AssignValue);
90 Value *Output =
B.CreateShuffleVector(AssignValue, AssignValue, Mask);
91 buildAssignType(
B, TargetType, Output);
99 Value *Source, LoadInst *BadLoad) {
103 B.getInt32(0),
B.getInt32(0)};
104 auto *
GEP =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
105 GR->buildAssignPtr(
B, ElementType,
GEP);
107 LoadInst *LI =
B.CreateLoad(ElementType,
GEP);
109 buildAssignType(
B, ElementType, LI);
116 Value *OriginalOperand) {
117 Type *FromTy = GR->findDeducedElementType(OriginalOperand);
118 Type *ToTy = GR->findDeducedElementType(CastedOperand);
119 Value *Output =
nullptr;
126 B.SetInsertPoint(LI);
131 if (
SAT &&
SAT->getElementType() == ToTy)
132 Output = loadFirstValueFromAggregate(
B,
SAT->getElementType(),
133 OriginalOperand, LI);
137 else if (!DVT && SVT && SVT->getElementType() == ToTy) {
138 Output = loadFirstValueFromAggregate(
B, SVT->getElementType(),
139 OriginalOperand, LI);
145 Output = loadVectorFromVector(
B, SVT, DVT, OriginalOperand);
149 else if (SST && SST->getTypeAtIndex(0u) == ToTy)
150 Output = loadFirstValueFromAggregate(
B, ToTy, OriginalOperand, LI);
154 GR->replaceAllUsesWith(LI, Output,
true);
155 DeadInstructions.push_back(LI);
166 B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
167 buildAssignType(
B,
Vector->getType(), NewI);
179 B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
180 buildAssignType(
B, ElementType, NewI);
189 FixedVectorType *DstType =
198 [[maybe_unused]]
auto dstBitWidth =
200 [[maybe_unused]]
auto srcBitWidth =
202 assert(dstBitWidth == srcBitWidth &&
203 "Unsupported bitcast between vectors of different sizes.");
206 B.CreateIntrinsic(Intrinsic::spv_bitcast, {DstType, SrcType}, {Src});
207 buildAssignType(
B, DstType, Src);
210 StoreInst *
SI =
B.CreateStore(Src, Dst);
211 SI->setAlignment(Alignment);
216 LoadInst *LI =
B.CreateLoad(DstType, Dst);
218 Value *OldValues = LI;
219 buildAssignType(
B, OldValues->
getType(), OldValues);
220 Value *NewValues = Src;
225 OldValues = makeInsertElement(
B, OldValues, Element,
I);
228 StoreInst *
SI =
B.CreateStore(OldValues, Dst);
229 SI->setAlignment(Alignment);
234 SmallVectorImpl<Value *> &Indices) {
237 if (Search == Aggregate)
241 buildGEPIndexChain(
B, Search,
ST->getTypeAtIndex(0u), Indices);
243 buildGEPIndexChain(
B, Search, AT->getElementType(), Indices);
245 buildGEPIndexChain(
B, Search, VT->getElementType(), Indices);
252 Type *DstPointeeType, Align Alignment) {
255 buildGEPIndexChain(
B, Src->getType(), DstPointeeType, Args);
256 auto *
GEP =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
257 GR->buildAssignPtr(
B, Src->getType(),
GEP);
258 StoreInst *
SI =
B.CreateStore(Src,
GEP);
259 SI->setAlignment(Alignment);
263 bool isTypeFirstElementAggregate(
Type *Search,
Type *Aggregate) {
264 if (Search == Aggregate)
267 return isTypeFirstElementAggregate(Search,
ST->getTypeAtIndex(0u));
269 return isTypeFirstElementAggregate(Search, VT->getElementType());
271 return isTypeFirstElementAggregate(Search, AT->getElementType());
278 Value *Dst, Align Alignment) {
279 Type *ToTy = GR->findDeducedElementType(Dst);
280 Type *FromTy = Src->getType();
286 B.SetInsertPoint(BadStore);
287 if (D_ST && isTypeFirstElementAggregate(FromTy, D_ST))
288 storeToFirstValueAggregate(
B, Src, Dst, D_ST, Alignment);
289 else if (D_VT && S_VT)
290 storeVectorFromVector(
B, Src, Dst, Alignment);
291 else if (D_VT && !S_VT && FromTy == D_VT->getElementType())
292 storeToFirstValueAggregate(
B, Src, Dst, D_VT, Alignment);
296 DeadInstructions.push_back(BadStore);
299 void legalizePointerCast(IntrinsicInst *
II) {
301 Value *OriginalOperand =
II->getOperand(0);
304 std::vector<Value *>
Users;
305 for (Use &U :
II->uses())
306 Users.push_back(
U.getUser());
310 transformLoad(
B, LI, CastedOperand, OriginalOperand);
315 transformStore(
B, SI,
SI->getValueOperand(), OriginalOperand,
321 if (Intrin->getIntrinsicID() == Intrinsic::spv_assign_ptr_type) {
322 DeadInstructions.push_back(Intrin);
326 if (Intrin->getIntrinsicID() == Intrinsic::spv_gep) {
327 GR->replaceAllUsesWith(CastedOperand, OriginalOperand,
332 if (Intrin->getIntrinsicID() == Intrinsic::spv_store) {
335 Alignment =
Align(
C->getZExtValue());
336 transformStore(
B, Intrin, Intrin->getArgOperand(0), OriginalOperand,
345 DeadInstructions.push_back(
II);
349 SPIRVLegalizePointerCast(SPIRVTargetMachine *TM) : FunctionPass(ID), TM(TM) {}
352 const SPIRVSubtarget &
ST = TM->getSubtarget<SPIRVSubtarget>(
F);
353 GR =
ST.getSPIRVGlobalRegistry();
354 DeadInstructions.clear();
356 std::vector<IntrinsicInst *> WorkList;
360 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_ptrcast)
361 WorkList.push_back(
II);
365 for (IntrinsicInst *
II : WorkList)
366 legalizePointerCast(
II);
368 for (Instruction *
I : DeadInstructions)
369 I->eraseFromParent();
371 return DeadInstructions.size() != 0;
375 SPIRVTargetMachine *TM =
nullptr;
376 SPIRVGlobalRegistry *GR =
nullptr;
377 std::vector<Instruction *> DeadInstructions;
384char SPIRVLegalizePointerCast::ID = 0;
386 "SPIRV legalize bitcast pass",
false,
false)
389 return new SPIRVLegalizePointerCast(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
unsigned getNumElements() const
FunctionPass class - This class is used to implement most global optimizations.
void setAlignment(Align Align)
Type * getPointerOperandType() const
Align getAlign() const
Return the alignment of the access that is being performed.
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
void push_back(const T &Elt)
LLVM_ABI unsigned getScalarSizeInBits() const LLVM_READONLY
If this is a vector type, return the getPrimitiveSizeInBits value for the element type.
Type * getType() const
All values are typed, get the type of this value.
Type * getElementType() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
ElementType
The element type of an SRV or UAV resource.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
FunctionAddr VTableAddr Value
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
FunctionPass * createSPIRVLegalizePointerCastPass(SPIRVTargetMachine *TM)