52#include "llvm/IR/IntrinsicsSPIRV.h"
66 {Arg->
getType()}, OfType, Arg, {},
B);
67 GR->addAssignPtrTypeInstr(Arg, AssignCI);
75 FixedVectorType *TargetType,
Value *Source) {
77 LoadInst *NewLoad =
B.CreateLoad(SourceType, Source);
78 buildAssignType(
B, SourceType, NewLoad);
79 Value *AssignValue = NewLoad;
81 AssignValue =
B.CreateIntrinsic(Intrinsic::spv_bitcast,
82 {TargetType, SourceType}, {NewLoad});
83 buildAssignType(
B, TargetType, AssignValue);
89 Value *Output =
B.CreateShuffleVector(AssignValue, AssignValue, Mask);
90 buildAssignType(
B, TargetType, Output);
98 Value *Source, LoadInst *BadLoad) {
102 B.getInt32(0),
B.getInt32(0)};
103 auto *
GEP =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
104 GR->buildAssignPtr(
B, ElementType,
GEP);
106 LoadInst *LI =
B.CreateLoad(ElementType,
GEP);
108 buildAssignType(
B, ElementType, LI);
115 Value *OriginalOperand) {
116 Type *FromTy = GR->findDeducedElementType(OriginalOperand);
117 Type *ToTy = GR->findDeducedElementType(CastedOperand);
118 Value *Output =
nullptr;
125 B.SetInsertPoint(LI);
130 if (
SAT &&
SAT->getElementType() == ToTy)
131 Output = loadFirstValueFromAggregate(
B,
SAT->getElementType(),
132 OriginalOperand, LI);
136 else if (!DVT && SVT && SVT->getElementType() == ToTy) {
137 Output = loadFirstValueFromAggregate(
B, SVT->getElementType(),
138 OriginalOperand, LI);
144 Output = loadVectorFromVector(
B, SVT, DVT, OriginalOperand);
148 else if (SST && SST->getTypeAtIndex(0u) == ToTy)
149 Output = loadFirstValueFromAggregate(
B, ToTy, OriginalOperand, LI);
153 GR->replaceAllUsesWith(LI, Output,
true);
154 DeadInstructions.push_back(LI);
165 B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
166 buildAssignType(
B,
Vector->getType(), NewI);
178 B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
179 buildAssignType(
B, ElementType, NewI);
188 FixedVectorType *DstType =
197 [[maybe_unused]]
auto dstBitWidth =
199 [[maybe_unused]]
auto srcBitWidth =
201 assert(dstBitWidth == srcBitWidth &&
202 "Unsupported bitcast between vectors of different sizes.");
205 B.CreateIntrinsic(Intrinsic::spv_bitcast, {DstType, SrcType}, {Src});
206 buildAssignType(
B, DstType, Src);
209 StoreInst *
SI =
B.CreateStore(Src, Dst);
210 SI->setAlignment(Alignment);
215 LoadInst *LI =
B.CreateLoad(DstType, Dst);
217 Value *OldValues = LI;
218 buildAssignType(
B, OldValues->
getType(), OldValues);
219 Value *NewValues = Src;
224 OldValues = makeInsertElement(
B, OldValues, Element,
I);
227 StoreInst *
SI =
B.CreateStore(OldValues, Dst);
228 SI->setAlignment(Alignment);
233 SmallVectorImpl<Value *> &Indices) {
236 if (Search == Aggregate)
240 buildGEPIndexChain(
B, Search,
ST->getTypeAtIndex(0u), Indices);
242 buildGEPIndexChain(
B, Search, AT->getElementType(), Indices);
244 buildGEPIndexChain(
B, Search, VT->getElementType(), Indices);
251 Type *DstPointeeType, Align Alignment) {
254 buildGEPIndexChain(
B, Src->getType(), DstPointeeType, Args);
255 auto *
GEP =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
256 GR->buildAssignPtr(
B, Src->getType(),
GEP);
257 StoreInst *
SI =
B.CreateStore(Src,
GEP);
258 SI->setAlignment(Alignment);
262 bool isTypeFirstElementAggregate(
Type *Search,
Type *Aggregate) {
263 if (Search == Aggregate)
266 return isTypeFirstElementAggregate(Search,
ST->getTypeAtIndex(0u));
268 return isTypeFirstElementAggregate(Search, VT->getElementType());
270 return isTypeFirstElementAggregate(Search, AT->getElementType());
277 Value *Dst, Align Alignment) {
278 Type *ToTy = GR->findDeducedElementType(Dst);
279 Type *FromTy = Src->getType();
285 B.SetInsertPoint(BadStore);
286 if (D_ST && isTypeFirstElementAggregate(FromTy, D_ST))
287 storeToFirstValueAggregate(
B, Src, Dst, D_ST, Alignment);
288 else if (D_VT && S_VT)
289 storeVectorFromVector(
B, Src, Dst, Alignment);
290 else if (D_VT && !S_VT && FromTy == D_VT->getElementType())
291 storeToFirstValueAggregate(
B, Src, Dst, D_VT, Alignment);
295 DeadInstructions.push_back(BadStore);
298 void legalizePointerCast(IntrinsicInst *
II) {
300 Value *OriginalOperand =
II->getOperand(0);
303 std::vector<Value *>
Users;
304 for (Use &U :
II->uses())
305 Users.push_back(
U.getUser());
309 transformLoad(
B, LI, CastedOperand, OriginalOperand);
314 transformStore(
B, SI,
SI->getValueOperand(), OriginalOperand,
320 if (Intrin->getIntrinsicID() == Intrinsic::spv_assign_ptr_type) {
321 DeadInstructions.push_back(Intrin);
325 if (Intrin->getIntrinsicID() == Intrinsic::spv_gep) {
326 GR->replaceAllUsesWith(CastedOperand, OriginalOperand,
331 if (Intrin->getIntrinsicID() == Intrinsic::spv_store) {
334 Alignment =
Align(
C->getZExtValue());
335 transformStore(
B, Intrin, Intrin->getArgOperand(0), OriginalOperand,
344 DeadInstructions.push_back(
II);
348 SPIRVLegalizePointerCast(SPIRVTargetMachine *TM) : FunctionPass(ID), TM(TM) {}
351 const SPIRVSubtarget &
ST = TM->getSubtarget<SPIRVSubtarget>(
F);
352 GR =
ST.getSPIRVGlobalRegistry();
353 DeadInstructions.clear();
355 std::vector<IntrinsicInst *> WorkList;
359 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_ptrcast)
360 WorkList.push_back(
II);
364 for (IntrinsicInst *
II : WorkList)
365 legalizePointerCast(
II);
367 for (Instruction *
I : DeadInstructions)
368 I->eraseFromParent();
370 return DeadInstructions.size() != 0;
374 SPIRVTargetMachine *TM =
nullptr;
375 SPIRVGlobalRegistry *GR =
nullptr;
376 std::vector<Instruction *> DeadInstructions;
383char SPIRVLegalizePointerCast::ID = 0;
385 "SPIRV legalize bitcast pass",
false,
false)
388 return new SPIRVLegalizePointerCast(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
unsigned getNumElements() const
FunctionPass class - This class is used to implement most global optimizations.
void setAlignment(Align Align)
Type * getPointerOperandType() const
Align getAlign() const
Return the alignment of the access that is being performed.
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
void push_back(const T &Elt)
LLVM_ABI unsigned getScalarSizeInBits() const LLVM_READONLY
If this is a vector type, return the getPrimitiveSizeInBits value for the element type.
Type * getType() const
All values are typed, get the type of this value.
Type * getElementType() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
ElementType
The element type of an SRV or UAV resource.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
FunctionAddr VTableAddr Value
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
FunctionPass * createSPIRVLegalizePointerCastPass(SPIRVTargetMachine *TM)