LLVM 19.0.0git
SPIRVEmitIntrinsics.cpp
Go to the documentation of this file.
1//===-- SPIRVEmitIntrinsics.cpp - emit SPIRV intrinsics ---------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// The pass emits SPIRV intrinsics keeping essential high-level information for
10// the translation of LLVM IR to SPIR-V.
11//
12//===----------------------------------------------------------------------===//
13
14#include "SPIRV.h"
15#include "SPIRVBuiltins.h"
16#include "SPIRVMetadata.h"
17#include "SPIRVSubtarget.h"
18#include "SPIRVTargetMachine.h"
19#include "SPIRVUtils.h"
20#include "llvm/IR/IRBuilder.h"
22#include "llvm/IR/InstVisitor.h"
23#include "llvm/IR/IntrinsicsSPIRV.h"
25
26#include <queue>
27
28// This pass performs the following transformation on LLVM IR level required
29// for the following translation to SPIR-V:
30// - replaces direct usages of aggregate constants with target-specific
31// intrinsics;
32// - replaces aggregates-related instructions (extract/insert, ld/st, etc)
33// with a target-specific intrinsics;
34// - emits intrinsics for the global variable initializers since IRTranslator
35// doesn't handle them and it's not very convenient to translate them
36// ourselves;
37// - emits intrinsics to keep track of the string names assigned to the values;
38// - emits intrinsics to keep track of constants (this is necessary to have an
39// LLVM IR constant after the IRTranslation is completed) for their further
40// deduplication;
41// - emits intrinsics to keep track of original LLVM types of the values
42// to be able to emit proper SPIR-V types eventually.
43//
44// TODO: consider removing spv.track.constant in favor of spv.assign.type.
45
46using namespace llvm;
47
48namespace llvm {
50} // namespace llvm
51
52namespace {
53
54inline MetadataAsValue *buildMD(Value *Arg) {
55 LLVMContext &Ctx = Arg->getContext();
58}
59
60class SPIRVEmitIntrinsics
61 : public ModulePass,
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM = nullptr;
64 SPIRVGlobalRegistry *GR = nullptr;
65 Function *F = nullptr;
66 bool TrackConstants = true;
69 DenseSet<Instruction *> AggrStores;
70
71 // deduce element type of untyped pointers
72 Type *deduceElementType(Value *I);
73 Type *deduceElementTypeHelper(Value *I);
74 Type *deduceElementTypeHelper(Value *I, std::unordered_set<Value *> &Visited);
75 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
76 std::unordered_set<Value *> &Visited);
77 Type *deduceElementTypeByUsersDeep(Value *Op,
78 std::unordered_set<Value *> &Visited);
79
80 // deduce nested types of composites
81 Type *deduceNestedTypeHelper(User *U);
82 Type *deduceNestedTypeHelper(User *U, Type *Ty,
83 std::unordered_set<Value *> &Visited);
84
85 // deduce Types of operands of the Instruction if possible
86 void deduceOperandElementType(Instruction *I);
87
88 void preprocessCompositeConstants(IRBuilder<> &B);
89 void preprocessUndefs(IRBuilder<> &B);
90
91 CallInst *buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef<Type *> Types,
92 Value *Arg, Value *Arg2, ArrayRef<Constant *> Imms,
93 IRBuilder<> &B) {
95 Args.push_back(Arg2);
96 Args.push_back(buildMD(Arg));
97 for (auto *Imm : Imms)
98 Args.push_back(Imm);
99 return B.CreateIntrinsic(IntrID, {Types}, Args);
100 }
101
102 void buildAssignType(IRBuilder<> &B, Type *ElemTy, Value *Arg);
103 void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg);
104 void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType);
105
106 void replaceMemInstrUses(Instruction *Old, Instruction *New, IRBuilder<> &B);
107 void processInstrAfterVisit(Instruction *I, IRBuilder<> &B);
108 void insertAssignPtrTypeIntrs(Instruction *I, IRBuilder<> &B);
109 void insertAssignTypeIntrs(Instruction *I, IRBuilder<> &B);
110 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType, Value *V,
111 IRBuilder<> &B);
112 void replacePointerOperandWithPtrCast(Instruction *I, Value *Pointer,
113 Type *ExpectedElementType,
114 unsigned OperandToReplace,
115 IRBuilder<> &B);
116 void insertPtrCastOrAssignTypeInstr(Instruction *I, IRBuilder<> &B);
118 void processGlobalValue(GlobalVariable &GV, IRBuilder<> &B);
119 void processParamTypes(Function *F, IRBuilder<> &B);
120 void processParamTypesByFunHeader(Function *F, IRBuilder<> &B);
121 Type *deduceFunParamElementType(Function *F, unsigned OpIdx);
122 Type *deduceFunParamElementType(Function *F, unsigned OpIdx,
123 std::unordered_set<Function *> &FVisited);
124
125public:
126 static char ID;
127 SPIRVEmitIntrinsics() : ModulePass(ID) {
129 }
130 SPIRVEmitIntrinsics(SPIRVTargetMachine *_TM) : ModulePass(ID), TM(_TM) {
132 }
147
148 StringRef getPassName() const override { return "SPIRV emit intrinsics"; }
149
150 bool runOnModule(Module &M) override;
151 bool runOnFunction(Function &F);
152
153 void getAnalysisUsage(AnalysisUsage &AU) const override {
155 }
156};
157
158bool isConvergenceIntrinsic(const Instruction *I) {
159 const auto *II = dyn_cast<IntrinsicInst>(I);
160 if (!II)
161 return false;
162
163 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
164 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
165 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
166}
167} // namespace
168
169char SPIRVEmitIntrinsics::ID = 0;
170
171INITIALIZE_PASS(SPIRVEmitIntrinsics, "emit-intrinsics", "SPIRV emit intrinsics",
172 false, false)
173
174static inline bool isAssignTypeInstr(const Instruction *I) {
175 return isa<IntrinsicInst>(I) &&
176 cast<IntrinsicInst>(I)->getIntrinsicID() == Intrinsic::spv_assign_type;
177}
178
180 return isa<StoreInst>(I) || isa<LoadInst>(I) || isa<InsertValueInst>(I) ||
181 isa<ExtractValueInst>(I) || isa<AtomicCmpXchgInst>(I);
182}
183
184static bool isAggrConstForceInt32(const Value *V) {
185 return isa<ConstantArray>(V) || isa<ConstantStruct>(V) ||
186 isa<ConstantDataArray>(V) ||
187 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy());
188}
189
191 if (isa<PHINode>(I))
192 B.SetInsertPoint(I->getParent()->getFirstNonPHIOrDbgOrAlloca());
193 else
194 B.SetInsertPoint(I);
195}
196
198 B.SetCurrentDebugLocation(I->getDebugLoc());
199 if (I->getType()->isVoidTy())
200 B.SetInsertPoint(I->getNextNode());
201 else
202 B.SetInsertPoint(*I->getInsertionPointAfterDef());
203}
204
206 IntrinsicInst *Intr = dyn_cast<IntrinsicInst>(I);
207 if (Intr) {
208 switch (Intr->getIntrinsicID()) {
209 case Intrinsic::invariant_start:
210 case Intrinsic::invariant_end:
211 return false;
212 }
213 }
214 return true;
215}
216
217static inline void reportFatalOnTokenType(const Instruction *I) {
218 if (I->getType()->isTokenTy())
219 report_fatal_error("A token is encountered but SPIR-V without extensions "
220 "does not support token type",
221 false);
222}
223
224void SPIRVEmitIntrinsics::buildAssignType(IRBuilder<> &B, Type *Ty,
225 Value *Arg) {
226 Value *OfType = PoisonValue::get(Ty);
227 CallInst *AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type,
228 {Arg->getType()}, OfType, Arg, {}, B);
229 GR->addAssignPtrTypeInstr(Arg, AssignCI);
230}
231
232void SPIRVEmitIntrinsics::buildAssignPtr(IRBuilder<> &B, Type *ElemTy,
233 Value *Arg) {
234 Value *OfType = PoisonValue::get(ElemTy);
235 CallInst *AssignPtrTyCI = GR->findAssignPtrTypeInstr(Arg);
236 if (AssignPtrTyCI == nullptr ||
237 AssignPtrTyCI->getParent()->getParent() != F) {
238 AssignPtrTyCI = buildIntrWithMD(
239 Intrinsic::spv_assign_ptr_type, {Arg->getType()}, OfType, Arg,
240 {B.getInt32(getPointerAddressSpace(Arg->getType()))}, B);
241 GR->addDeducedElementType(AssignPtrTyCI, ElemTy);
242 GR->addDeducedElementType(Arg, ElemTy);
243 GR->addAssignPtrTypeInstr(Arg, AssignPtrTyCI);
244 } else {
245 updateAssignType(AssignPtrTyCI, Arg, OfType);
246 }
247}
248
249void SPIRVEmitIntrinsics::updateAssignType(CallInst *AssignCI, Value *Arg,
250 Value *OfType) {
251 AssignCI->setArgOperand(1, buildMD(OfType));
252 if (cast<IntrinsicInst>(AssignCI)->getIntrinsicID() !=
253 Intrinsic::spv_assign_ptr_type)
254 return;
255
256 // update association with the pointee type
257 Type *ElemTy = OfType->getType();
258 GR->addDeducedElementType(AssignCI, ElemTy);
259 GR->addDeducedElementType(Arg, ElemTy);
260}
261
262// Set element pointer type to the given value of ValueTy and tries to
263// specify this type further (recursively) by Operand value, if needed.
264Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
265 Type *ValueTy, Value *Operand, std::unordered_set<Value *> &Visited) {
266 Type *Ty = ValueTy;
267 if (Operand) {
268 if (auto *PtrTy = dyn_cast<PointerType>(Ty)) {
269 if (Type *NestedTy = deduceElementTypeHelper(Operand, Visited))
270 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
271 } else {
272 Ty = deduceNestedTypeHelper(dyn_cast<User>(Operand), Ty, Visited);
273 }
274 }
275 return Ty;
276}
277
278// Traverse User instructions to deduce an element pointer type of the operand.
279Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
280 Value *Op, std::unordered_set<Value *> &Visited) {
281 if (!Op || !isPointerTy(Op->getType()))
282 return nullptr;
283
284 if (auto PType = dyn_cast<TypedPointerType>(Op->getType()))
285 return PType->getElementType();
286
287 // maybe we already know operand's element type
288 if (Type *KnownTy = GR->findDeducedElementType(Op))
289 return KnownTy;
290
291 for (User *OpU : Op->users()) {
292 if (Instruction *Inst = dyn_cast<Instruction>(OpU)) {
293 if (Type *Ty = deduceElementTypeHelper(Inst, Visited))
294 return Ty;
295 }
296 }
297 return nullptr;
298}
299
300// Implements what we know in advance about intrinsics and builtin calls
301// TODO: consider feasibility of this particular case to be generalized by
302// encoding knowledge about intrinsics and builtin calls by corresponding
303// specification rules
305 Function *CalledF, unsigned OpIdx) {
306 if ((DemangledName.starts_with("__spirv_ocl_printf(") ||
307 DemangledName.starts_with("printf(")) &&
308 OpIdx == 0)
309 return IntegerType::getInt8Ty(CalledF->getContext());
310 return nullptr;
311}
312
313// Deduce and return a successfully deduced Type of the Instruction,
314// or nullptr otherwise.
315Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(Value *I) {
316 std::unordered_set<Value *> Visited;
317 return deduceElementTypeHelper(I, Visited);
318}
319
320Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
321 Value *I, std::unordered_set<Value *> &Visited) {
322 // allow to pass nullptr as an argument
323 if (!I)
324 return nullptr;
325
326 // maybe already known
327 if (Type *KnownTy = GR->findDeducedElementType(I))
328 return KnownTy;
329
330 // maybe a cycle
331 if (Visited.find(I) != Visited.end())
332 return nullptr;
333 Visited.insert(I);
334
335 // fallback value in case when we fail to deduce a type
336 Type *Ty = nullptr;
337 // look for known basic patterns of type inference
338 if (auto *Ref = dyn_cast<AllocaInst>(I)) {
339 Ty = Ref->getAllocatedType();
340 } else if (auto *Ref = dyn_cast<GetElementPtrInst>(I)) {
341 Ty = Ref->getResultElementType();
342 } else if (auto *Ref = dyn_cast<GlobalValue>(I)) {
343 Ty = deduceElementTypeByValueDeep(
344 Ref->getValueType(),
345 Ref->getNumOperands() > 0 ? Ref->getOperand(0) : nullptr, Visited);
346 } else if (auto *Ref = dyn_cast<AddrSpaceCastInst>(I)) {
347 Ty = deduceElementTypeHelper(Ref->getPointerOperand(), Visited);
348 } else if (auto *Ref = dyn_cast<BitCastInst>(I)) {
349 if (Type *Src = Ref->getSrcTy(), *Dest = Ref->getDestTy();
350 isPointerTy(Src) && isPointerTy(Dest))
351 Ty = deduceElementTypeHelper(Ref->getOperand(0), Visited);
352 } else if (auto *Ref = dyn_cast<AtomicCmpXchgInst>(I)) {
353 Value *Op = Ref->getNewValOperand();
354 Ty = deduceElementTypeByValueDeep(Op->getType(), Op, Visited);
355 } else if (auto *Ref = dyn_cast<AtomicRMWInst>(I)) {
356 Value *Op = Ref->getValOperand();
357 Ty = deduceElementTypeByValueDeep(Op->getType(), Op, Visited);
358 } else if (auto *Ref = dyn_cast<PHINode>(I)) {
359 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
360 Ty = deduceElementTypeByUsersDeep(Ref->getIncomingValue(i), Visited);
361 if (Ty)
362 break;
363 }
364 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
365 for (Value *Op : {Ref->getTrueValue(), Ref->getFalseValue()}) {
366 Ty = deduceElementTypeByUsersDeep(Op, Visited);
367 if (Ty)
368 break;
369 }
370 }
371
372 // remember the found relationship
373 if (Ty) {
374 // specify nested types if needed, otherwise return unchanged
375 GR->addDeducedElementType(I, Ty);
376 }
377
378 return Ty;
379}
380
381// Re-create a type of the value if it has untyped pointer fields, also nested.
382// Return the original value type if no corrections of untyped pointer
383// information is found or needed.
384Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U) {
385 std::unordered_set<Value *> Visited;
386 return deduceNestedTypeHelper(U, U->getType(), Visited);
387}
388
389Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
390 User *U, Type *OrigTy, std::unordered_set<Value *> &Visited) {
391 if (!U)
392 return OrigTy;
393
394 // maybe already known
395 if (Type *KnownTy = GR->findDeducedCompositeType(U))
396 return KnownTy;
397
398 // maybe a cycle
399 if (Visited.find(U) != Visited.end())
400 return OrigTy;
401 Visited.insert(U);
402
403 if (dyn_cast<StructType>(OrigTy)) {
405 bool Change = false;
406 for (unsigned i = 0; i < U->getNumOperands(); ++i) {
407 Value *Op = U->getOperand(i);
408 Type *OpTy = Op->getType();
409 Type *Ty = OpTy;
410 if (Op) {
411 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
412 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
413 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
414 } else {
415 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
416 }
417 }
418 Tys.push_back(Ty);
419 Change |= Ty != OpTy;
420 }
421 if (Change) {
422 Type *NewTy = StructType::create(Tys);
423 GR->addDeducedCompositeType(U, NewTy);
424 return NewTy;
425 }
426 } else if (auto *ArrTy = dyn_cast<ArrayType>(OrigTy)) {
427 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
428 Type *OpTy = ArrTy->getElementType();
429 Type *Ty = OpTy;
430 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
431 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
432 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
433 } else {
434 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
435 }
436 if (Ty != OpTy) {
437 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
438 GR->addDeducedCompositeType(U, NewTy);
439 return NewTy;
440 }
441 }
442 } else if (auto *VecTy = dyn_cast<VectorType>(OrigTy)) {
443 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
444 Type *OpTy = VecTy->getElementType();
445 Type *Ty = OpTy;
446 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
447 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
448 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
449 } else {
450 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
451 }
452 if (Ty != OpTy) {
453 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
454 GR->addDeducedCompositeType(U, NewTy);
455 return NewTy;
456 }
457 }
458 }
459
460 return OrigTy;
461}
462
463Type *SPIRVEmitIntrinsics::deduceElementType(Value *I) {
464 if (Type *Ty = deduceElementTypeHelper(I))
465 return Ty;
466 return IntegerType::getInt8Ty(I->getContext());
467}
468
469// If the Instruction has Pointer operands with unresolved types, this function
470// tries to deduce them. If the Instruction has Pointer operands with known
471// types which differ from expected, this function tries to insert a bitcast to
472// resolve the issue.
473void SPIRVEmitIntrinsics::deduceOperandElementType(Instruction *I) {
475 Type *KnownElemTy = nullptr;
476 // look for known basic patterns of type inference
477 if (auto *Ref = dyn_cast<PHINode>(I)) {
478 if (!isPointerTy(I->getType()) ||
479 !(KnownElemTy = GR->findDeducedElementType(I)))
480 return;
481 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
482 Value *Op = Ref->getIncomingValue(i);
483 if (isPointerTy(Op->getType()))
484 Ops.push_back(std::make_pair(Op, i));
485 }
486 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
487 if (!isPointerTy(I->getType()) ||
488 !(KnownElemTy = GR->findDeducedElementType(I)))
489 return;
490 for (unsigned i = 0; i < Ref->getNumOperands(); i++) {
491 Value *Op = Ref->getOperand(i);
492 if (isPointerTy(Op->getType()))
493 Ops.push_back(std::make_pair(Op, i));
494 }
495 } else if (auto *Ref = dyn_cast<ReturnInst>(I)) {
496 Type *RetTy = F->getReturnType();
497 if (!isPointerTy(RetTy))
498 return;
499 Value *Op = Ref->getReturnValue();
500 if (!Op)
501 return;
502 if (!(KnownElemTy = GR->findDeducedElementType(F))) {
503 if (Type *OpElemTy = GR->findDeducedElementType(Op)) {
504 GR->addDeducedElementType(F, OpElemTy);
505 TypedPointerType *DerivedTy =
507 GR->addReturnType(F, DerivedTy);
508 }
509 return;
510 }
511 Ops.push_back(std::make_pair(Op, 0));
512 } else if (auto *Ref = dyn_cast<ICmpInst>(I)) {
513 if (!isPointerTy(Ref->getOperand(0)->getType()))
514 return;
515 Value *Op0 = Ref->getOperand(0);
516 Value *Op1 = Ref->getOperand(1);
517 Type *ElemTy0 = GR->findDeducedElementType(Op0);
518 Type *ElemTy1 = GR->findDeducedElementType(Op1);
519 if (ElemTy0) {
520 KnownElemTy = ElemTy0;
521 Ops.push_back(std::make_pair(Op1, 1));
522 } else if (ElemTy1) {
523 KnownElemTy = ElemTy1;
524 Ops.push_back(std::make_pair(Op0, 0));
525 }
526 }
527
528 // There is no enough info to deduce types or all is valid.
529 if (!KnownElemTy || Ops.size() == 0)
530 return;
531
532 LLVMContext &Ctx = F->getContext();
533 IRBuilder<> B(Ctx);
534 for (auto &OpIt : Ops) {
535 Value *Op = OpIt.first;
536 if (Op->use_empty())
537 continue;
538 Type *Ty = GR->findDeducedElementType(Op);
539 if (Ty == KnownElemTy)
540 continue;
541 Value *OpTyVal = Constant::getNullValue(KnownElemTy);
542 Type *OpTy = Op->getType();
543 if (!Ty) {
544 GR->addDeducedElementType(Op, KnownElemTy);
545 // check if there is existing Intrinsic::spv_assign_ptr_type instruction
546 CallInst *AssignCI = GR->findAssignPtrTypeInstr(Op);
547 if (AssignCI == nullptr) {
548 Instruction *User = dyn_cast<Instruction>(Op->use_begin()->get());
549 setInsertPointSkippingPhis(B, User ? User->getNextNode() : I);
550 CallInst *CI =
551 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal, Op,
552 {B.getInt32(getPointerAddressSpace(OpTy))}, B);
553 GR->addAssignPtrTypeInstr(Op, CI);
554 } else {
555 updateAssignType(AssignCI, Op, OpTyVal);
556 }
557 } else {
558 if (auto *OpI = dyn_cast<Instruction>(Op)) {
559 // spv_ptrcast's argument Op denotes an instruction that generates
560 // a value, and we may use getInsertionPointAfterDef()
561 B.SetInsertPoint(*OpI->getInsertionPointAfterDef());
562 B.SetCurrentDebugLocation(OpI->getDebugLoc());
563 } else if (auto *OpA = dyn_cast<Argument>(Op)) {
564 B.SetInsertPointPastAllocas(OpA->getParent());
565 B.SetCurrentDebugLocation(DebugLoc());
566 } else {
567 B.SetInsertPoint(F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
568 }
569 SmallVector<Type *, 2> Types = {OpTy, OpTy};
570 SmallVector<Value *, 2> Args = {Op, buildMD(OpTyVal),
571 B.getInt32(getPointerAddressSpace(OpTy))};
572 CallInst *PtrCastI =
573 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
574 I->setOperand(OpIt.second, PtrCastI);
575 }
576 }
577}
578
579void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
580 Instruction *New,
581 IRBuilder<> &B) {
582 while (!Old->user_empty()) {
583 auto *U = Old->user_back();
584 if (isAssignTypeInstr(U)) {
585 B.SetInsertPoint(U);
586 SmallVector<Value *, 2> Args = {New, U->getOperand(1)};
587 CallInst *AssignCI =
588 B.CreateIntrinsic(Intrinsic::spv_assign_type, {New->getType()}, Args);
589 GR->addAssignPtrTypeInstr(New, AssignCI);
590 U->eraseFromParent();
591 } else if (isMemInstrToReplace(U) || isa<ReturnInst>(U) ||
592 isa<CallInst>(U)) {
593 U->replaceUsesOfWith(Old, New);
594 } else {
595 llvm_unreachable("illegal aggregate intrinsic user");
596 }
597 }
598 Old->eraseFromParent();
599}
600
601void SPIRVEmitIntrinsics::preprocessUndefs(IRBuilder<> &B) {
602 std::queue<Instruction *> Worklist;
603 for (auto &I : instructions(F))
604 Worklist.push(&I);
605
606 while (!Worklist.empty()) {
607 Instruction *I = Worklist.front();
608 bool BPrepared = false;
609 Worklist.pop();
610
611 for (auto &Op : I->operands()) {
612 auto *AggrUndef = dyn_cast<UndefValue>(Op);
613 if (!AggrUndef || !Op->getType()->isAggregateType())
614 continue;
615
616 if (!BPrepared) {
618 BPrepared = true;
619 }
620 auto *IntrUndef = B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
621 Worklist.push(IntrUndef);
622 I->replaceUsesOfWith(Op, IntrUndef);
623 AggrConsts[IntrUndef] = AggrUndef;
624 AggrConstTypes[IntrUndef] = AggrUndef->getType();
625 }
626 }
627}
628
629void SPIRVEmitIntrinsics::preprocessCompositeConstants(IRBuilder<> &B) {
630 std::queue<Instruction *> Worklist;
631 for (auto &I : instructions(F))
632 Worklist.push(&I);
633
634 while (!Worklist.empty()) {
635 auto *I = Worklist.front();
636 bool IsPhi = isa<PHINode>(I), BPrepared = false;
637 assert(I);
638 bool KeepInst = false;
639 for (const auto &Op : I->operands()) {
640 Constant *AggrConst = nullptr;
641 Type *ResTy = nullptr;
642 if (auto *COp = dyn_cast<ConstantVector>(Op)) {
643 AggrConst = cast<Constant>(COp);
644 ResTy = COp->getType();
645 } else if (auto *COp = dyn_cast<ConstantArray>(Op)) {
646 AggrConst = cast<Constant>(COp);
647 ResTy = B.getInt32Ty();
648 } else if (auto *COp = dyn_cast<ConstantStruct>(Op)) {
649 AggrConst = cast<Constant>(COp);
650 ResTy = B.getInt32Ty();
651 } else if (auto *COp = dyn_cast<ConstantDataArray>(Op)) {
652 AggrConst = cast<Constant>(COp);
653 ResTy = B.getInt32Ty();
654 } else if (auto *COp = dyn_cast<ConstantAggregateZero>(Op)) {
655 if (!Op->getType()->isVectorTy()) {
656 AggrConst = cast<Constant>(COp);
657 ResTy = B.getInt32Ty();
658 }
659 }
660 if (AggrConst) {
662 if (auto *COp = dyn_cast<ConstantDataSequential>(Op))
663 for (unsigned i = 0; i < COp->getNumElements(); ++i)
664 Args.push_back(COp->getElementAsConstant(i));
665 else
666 for (auto &COp : AggrConst->operands())
667 Args.push_back(COp);
668 if (!BPrepared) {
669 IsPhi ? B.SetInsertPointPastAllocas(I->getParent()->getParent())
670 : B.SetInsertPoint(I);
671 BPrepared = true;
672 }
673 auto *CI =
674 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {Args});
675 Worklist.push(CI);
676 I->replaceUsesOfWith(Op, CI);
677 KeepInst = true;
678 AggrConsts[CI] = AggrConst;
679 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst);
680 }
681 }
682 if (!KeepInst)
683 Worklist.pop();
684 }
685}
686
687Instruction *SPIRVEmitIntrinsics::visitCallInst(CallInst &Call) {
688 if (!Call.isInlineAsm())
689 return &Call;
690
691 const InlineAsm *IA = cast<InlineAsm>(Call.getCalledOperand());
692 LLVMContext &Ctx = F->getContext();
693
694 Constant *TyC = UndefValue::get(IA->getFunctionType());
695 MDString *ConstraintString = MDString::get(Ctx, IA->getConstraintString());
697 buildMD(TyC),
698 MetadataAsValue::get(Ctx, MDNode::get(Ctx, ConstraintString))};
699 for (unsigned OpIdx = 0; OpIdx < Call.arg_size(); OpIdx++)
700 Args.push_back(Call.getArgOperand(OpIdx));
701
702 IRBuilder<> B(Call.getParent());
703 B.SetInsertPoint(&Call);
704 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {}, {Args});
705 return &Call;
706}
707
708Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &I) {
709 BasicBlock *ParentBB = I.getParent();
710 IRBuilder<> B(ParentBB);
711 B.SetInsertPoint(&I);
714 for (auto &Op : I.operands()) {
715 if (Op.get()->getType()->isSized()) {
716 Args.push_back(Op);
717 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(Op.get())) {
718 BBCases.push_back(BB);
719 Args.push_back(BlockAddress::get(BB->getParent(), BB));
720 } else {
721 report_fatal_error("Unexpected switch operand");
722 }
723 }
724 CallInst *NewI = B.CreateIntrinsic(Intrinsic::spv_switch,
725 {I.getOperand(0)->getType()}, {Args});
726 // remove switch to avoid its unneeded and undesirable unwrap into branches
727 // and conditions
728 I.replaceAllUsesWith(NewI);
729 I.eraseFromParent();
730 // insert artificial and temporary instruction to preserve valid CFG,
731 // it will be removed after IR translation pass
732 B.SetInsertPoint(ParentBB);
733 IndirectBrInst *BrI = B.CreateIndirectBr(
734 Constant::getNullValue(PointerType::getUnqual(ParentBB->getContext())),
735 BBCases.size());
736 for (BasicBlock *BBCase : BBCases)
737 BrI->addDestination(BBCase);
738 return BrI;
739}
740
741Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &I) {
742 IRBuilder<> B(I.getParent());
743 B.SetInsertPoint(&I);
744 SmallVector<Type *, 2> Types = {I.getType(), I.getOperand(0)->getType()};
746 Args.push_back(B.getInt1(I.isInBounds()));
747 for (auto &Op : I.operands())
748 Args.push_back(Op);
749 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
750 I.replaceAllUsesWith(NewI);
751 I.eraseFromParent();
752 return NewI;
753}
754
755Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &I) {
756 IRBuilder<> B(I.getParent());
757 B.SetInsertPoint(&I);
758 Value *Source = I.getOperand(0);
759
760 // SPIR-V, contrary to LLVM 17+ IR, supports bitcasts between pointers of
761 // varying element types. In case of IR coming from older versions of LLVM
762 // such bitcasts do not provide sufficient information, should be just skipped
763 // here, and handled in insertPtrCastOrAssignTypeInstr.
764 if (isPointerTy(I.getType())) {
765 I.replaceAllUsesWith(Source);
766 I.eraseFromParent();
767 return nullptr;
768 }
769
770 SmallVector<Type *, 2> Types = {I.getType(), Source->getType()};
771 SmallVector<Value *> Args(I.op_begin(), I.op_end());
772 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_bitcast, {Types}, {Args});
773 std::string InstName = I.hasName() ? I.getName().str() : "";
774 I.replaceAllUsesWith(NewI);
775 I.eraseFromParent();
776 NewI->setName(InstName);
777 return NewI;
778}
779
780void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
781 TargetExtType *AssignedType, Value *V, IRBuilder<> &B) {
782 Type *VTy = V->getType();
783
784 // A couple of sanity checks.
785 assert(isPointerTy(VTy) && "Expect a pointer type!");
786 if (auto PType = dyn_cast<TypedPointerType>(VTy))
787 if (PType->getElementType() != AssignedType)
788 report_fatal_error("Unexpected pointer element type!");
789
790 CallInst *AssignCI = GR->findAssignPtrTypeInstr(V);
791 if (!AssignCI) {
792 buildAssignType(B, AssignedType, V);
793 return;
794 }
795
796 Type *CurrentType =
797 dyn_cast<ConstantAsMetadata>(
798 cast<MetadataAsValue>(AssignCI->getOperand(1))->getMetadata())
799 ->getType();
800 if (CurrentType == AssignedType)
801 return;
802
803 // Builtin types cannot be redeclared or casted.
804 if (CurrentType->isTargetExtTy())
805 report_fatal_error("Type mismatch " + CurrentType->getTargetExtName() +
806 "/" + AssignedType->getTargetExtName() +
807 " for value " + V->getName(),
808 false);
809
810 // Our previous guess about the type seems to be wrong, let's update
811 // inferred type according to a new, more precise type information.
812 updateAssignType(AssignCI, V, PoisonValue::get(AssignedType));
813}
814
815void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
816 Instruction *I, Value *Pointer, Type *ExpectedElementType,
817 unsigned OperandToReplace, IRBuilder<> &B) {
818 // If Pointer is the result of nop BitCastInst (ptr -> ptr), use the source
819 // pointer instead. The BitCastInst should be later removed when visited.
820 while (BitCastInst *BC = dyn_cast<BitCastInst>(Pointer))
821 Pointer = BC->getOperand(0);
822
823 // Do not emit spv_ptrcast if Pointer's element type is ExpectedElementType
824 Type *PointerElemTy = deduceElementTypeHelper(Pointer);
825 if (PointerElemTy == ExpectedElementType)
826 return;
827
829 MetadataAsValue *VMD = buildMD(PoisonValue::get(ExpectedElementType));
830 unsigned AddressSpace = getPointerAddressSpace(Pointer->getType());
831 bool FirstPtrCastOrAssignPtrType = true;
832
833 // Do not emit new spv_ptrcast if equivalent one already exists or when
834 // spv_assign_ptr_type already targets this pointer with the same element
835 // type.
836 for (auto User : Pointer->users()) {
837 auto *II = dyn_cast<IntrinsicInst>(User);
838 if (!II ||
839 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
840 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
841 II->getOperand(0) != Pointer)
842 continue;
843
844 // There is some spv_ptrcast/spv_assign_ptr_type already targeting this
845 // pointer.
846 FirstPtrCastOrAssignPtrType = false;
847 if (II->getOperand(1) != VMD ||
848 dyn_cast<ConstantInt>(II->getOperand(2))->getSExtValue() !=
850 continue;
851
852 // The spv_ptrcast/spv_assign_ptr_type targeting this pointer is of the same
853 // element type and address space.
854 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast)
855 return;
856
857 // This must be a spv_ptrcast, do not emit new if this one has the same BB
858 // as I. Otherwise, search for other spv_ptrcast/spv_assign_ptr_type.
859 if (II->getParent() != I->getParent())
860 continue;
861
862 I->setOperand(OperandToReplace, II);
863 return;
864 }
865
866 // // Do not emit spv_ptrcast if it would cast to the default pointer element
867 // // type (i8) of the same address space.
868 // if (ExpectedElementType->isIntegerTy(8))
869 // return;
870
871 // If this would be the first spv_ptrcast, do not emit spv_ptrcast and emit
872 // spv_assign_ptr_type instead.
873 if (FirstPtrCastOrAssignPtrType &&
874 (isa<Instruction>(Pointer) || isa<Argument>(Pointer))) {
875 buildAssignPtr(B, ExpectedElementType, Pointer);
876 return;
877 }
878
879 // Emit spv_ptrcast
880 SmallVector<Type *, 2> Types = {Pointer->getType(), Pointer->getType()};
882 auto *PtrCastI = B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
883 I->setOperand(OperandToReplace, PtrCastI);
884}
885
886void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *I,
887 IRBuilder<> &B) {
888 // Handle basic instructions:
889 StoreInst *SI = dyn_cast<StoreInst>(I);
890 if (SI && F->getCallingConv() == CallingConv::SPIR_KERNEL &&
891 isPointerTy(SI->getValueOperand()->getType()) &&
892 isa<Argument>(SI->getValueOperand())) {
893 return replacePointerOperandWithPtrCast(
894 I, SI->getValueOperand(), IntegerType::getInt8Ty(F->getContext()), 0,
895 B);
896 } else if (SI) {
897 return replacePointerOperandWithPtrCast(
898 I, SI->getPointerOperand(), SI->getValueOperand()->getType(), 1, B);
899 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
900 return replacePointerOperandWithPtrCast(I, LI->getPointerOperand(),
901 LI->getType(), 0, B);
902 } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I)) {
903 return replacePointerOperandWithPtrCast(I, GEPI->getPointerOperand(),
904 GEPI->getSourceElementType(), 0, B);
905 }
906
907 // Handle calls to builtins (non-intrinsics):
908 CallInst *CI = dyn_cast<CallInst>(I);
909 if (!CI || CI->isIndirectCall() || CI->isInlineAsm() ||
911 return;
912
913 // collect information about formal parameter types
914 std::string DemangledName =
916 Function *CalledF = CI->getCalledFunction();
917 SmallVector<Type *, 4> CalledArgTys;
918 bool HaveTypes = false;
919 for (unsigned OpIdx = 0; OpIdx < CalledF->arg_size(); ++OpIdx) {
920 Argument *CalledArg = CalledF->getArg(OpIdx);
921 Type *ArgType = CalledArg->getType();
922 if (!isPointerTy(ArgType)) {
923 CalledArgTys.push_back(nullptr);
924 } else if (isTypedPointerTy(ArgType)) {
925 CalledArgTys.push_back(cast<TypedPointerType>(ArgType)->getElementType());
926 HaveTypes = true;
927 } else {
928 Type *ElemTy = GR->findDeducedElementType(CalledArg);
929 if (!ElemTy && hasPointeeTypeAttr(CalledArg))
930 ElemTy = getPointeeTypeByAttr(CalledArg);
931 if (!ElemTy) {
932 ElemTy = getPointeeTypeByCallInst(DemangledName, CalledF, OpIdx);
933 if (ElemTy) {
934 GR->addDeducedElementType(CalledArg, ElemTy);
935 } else {
936 for (User *U : CalledArg->users()) {
937 if (Instruction *Inst = dyn_cast<Instruction>(U)) {
938 if ((ElemTy = deduceElementTypeHelper(Inst)) != nullptr)
939 break;
940 }
941 }
942 }
943 }
944 HaveTypes |= ElemTy != nullptr;
945 CalledArgTys.push_back(ElemTy);
946 }
947 }
948
949 if (DemangledName.empty() && !HaveTypes)
950 return;
951
952 for (unsigned OpIdx = 0; OpIdx < CI->arg_size(); OpIdx++) {
953 Value *ArgOperand = CI->getArgOperand(OpIdx);
954 if (!isPointerTy(ArgOperand->getType()))
955 continue;
956
957 // Constants (nulls/undefs) are handled in insertAssignPtrTypeIntrs()
958 if (!isa<Instruction>(ArgOperand) && !isa<Argument>(ArgOperand)) {
959 // However, we may have assumptions about the formal argument's type and
960 // may have a need to insert a ptr cast for the actual parameter of this
961 // call.
962 Argument *CalledArg = CalledF->getArg(OpIdx);
963 if (!GR->findDeducedElementType(CalledArg))
964 continue;
965 }
966
967 Type *ExpectedType =
968 OpIdx < CalledArgTys.size() ? CalledArgTys[OpIdx] : nullptr;
969 if (!ExpectedType && !DemangledName.empty())
971 DemangledName, OpIdx, I->getContext());
972 if (!ExpectedType)
973 continue;
974
975 if (ExpectedType->isTargetExtTy())
976 insertAssignPtrTypeTargetExt(cast<TargetExtType>(ExpectedType),
977 ArgOperand, B);
978 else
979 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx, B);
980 }
981}
982
983Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &I) {
984 SmallVector<Type *, 4> Types = {I.getType(), I.getOperand(0)->getType(),
985 I.getOperand(1)->getType(),
986 I.getOperand(2)->getType()};
987 IRBuilder<> B(I.getParent());
988 B.SetInsertPoint(&I);
989 SmallVector<Value *> Args(I.op_begin(), I.op_end());
990 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_insertelt, {Types}, {Args});
991 std::string InstName = I.hasName() ? I.getName().str() : "";
992 I.replaceAllUsesWith(NewI);
993 I.eraseFromParent();
994 NewI->setName(InstName);
995 return NewI;
996}
997
999SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &I) {
1000 IRBuilder<> B(I.getParent());
1001 B.SetInsertPoint(&I);
1002 SmallVector<Type *, 3> Types = {I.getType(), I.getVectorOperandType(),
1003 I.getIndexOperand()->getType()};
1004 SmallVector<Value *, 2> Args = {I.getVectorOperand(), I.getIndexOperand()};
1005 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_extractelt, {Types}, {Args});
1006 std::string InstName = I.hasName() ? I.getName().str() : "";
1007 I.replaceAllUsesWith(NewI);
1008 I.eraseFromParent();
1009 NewI->setName(InstName);
1010 return NewI;
1011}
1012
1013Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &I) {
1014 IRBuilder<> B(I.getParent());
1015 B.SetInsertPoint(&I);
1016 SmallVector<Type *, 1> Types = {I.getInsertedValueOperand()->getType()};
1018 for (auto &Op : I.operands())
1019 if (isa<UndefValue>(Op))
1020 Args.push_back(UndefValue::get(B.getInt32Ty()));
1021 else
1022 Args.push_back(Op);
1023 for (auto &Op : I.indices())
1024 Args.push_back(B.getInt32(Op));
1025 Instruction *NewI =
1026 B.CreateIntrinsic(Intrinsic::spv_insertv, {Types}, {Args});
1027 replaceMemInstrUses(&I, NewI, B);
1028 return NewI;
1029}
1030
1031Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &I) {
1032 IRBuilder<> B(I.getParent());
1033 B.SetInsertPoint(&I);
1035 for (auto &Op : I.operands())
1036 Args.push_back(Op);
1037 for (auto &Op : I.indices())
1038 Args.push_back(B.getInt32(Op));
1039 auto *NewI =
1040 B.CreateIntrinsic(Intrinsic::spv_extractv, {I.getType()}, {Args});
1041 I.replaceAllUsesWith(NewI);
1042 I.eraseFromParent();
1043 return NewI;
1044}
1045
1046Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &I) {
1047 if (!I.getType()->isAggregateType())
1048 return &I;
1049 IRBuilder<> B(I.getParent());
1050 B.SetInsertPoint(&I);
1051 TrackConstants = false;
1052 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
1054 TLI->getLoadMemOperandFlags(I, F->getParent()->getDataLayout());
1055 auto *NewI =
1056 B.CreateIntrinsic(Intrinsic::spv_load, {I.getOperand(0)->getType()},
1057 {I.getPointerOperand(), B.getInt16(Flags),
1058 B.getInt8(I.getAlign().value())});
1059 replaceMemInstrUses(&I, NewI, B);
1060 return NewI;
1061}
1062
1063Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &I) {
1064 if (!AggrStores.contains(&I))
1065 return &I;
1066 IRBuilder<> B(I.getParent());
1067 B.SetInsertPoint(&I);
1068 TrackConstants = false;
1069 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
1071 TLI->getStoreMemOperandFlags(I, F->getParent()->getDataLayout());
1072 auto *PtrOp = I.getPointerOperand();
1073 auto *NewI = B.CreateIntrinsic(
1074 Intrinsic::spv_store, {I.getValueOperand()->getType(), PtrOp->getType()},
1075 {I.getValueOperand(), PtrOp, B.getInt16(Flags),
1076 B.getInt8(I.getAlign().value())});
1077 I.eraseFromParent();
1078 return NewI;
1079}
1080
1081Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &I) {
1082 Value *ArraySize = nullptr;
1083 if (I.isArrayAllocation()) {
1084 const SPIRVSubtarget *STI = TM->getSubtargetImpl(*I.getFunction());
1085 if (!STI->canUseExtension(
1086 SPIRV::Extension::SPV_INTEL_variable_length_array))
1088 "array allocation: this instruction requires the following "
1089 "SPIR-V extension: SPV_INTEL_variable_length_array",
1090 false);
1091 ArraySize = I.getArraySize();
1092 }
1093 IRBuilder<> B(I.getParent());
1094 B.SetInsertPoint(&I);
1095 TrackConstants = false;
1096 Type *PtrTy = I.getType();
1097 auto *NewI =
1098 ArraySize ? B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1099 {PtrTy, ArraySize->getType()}, {ArraySize})
1100 : B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy}, {});
1101 std::string InstName = I.hasName() ? I.getName().str() : "";
1102 I.replaceAllUsesWith(NewI);
1103 I.eraseFromParent();
1104 NewI->setName(InstName);
1105 return NewI;
1106}
1107
1108Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &I) {
1109 assert(I.getType()->isAggregateType() && "Aggregate result is expected");
1110 IRBuilder<> B(I.getParent());
1111 B.SetInsertPoint(&I);
1113 for (auto &Op : I.operands())
1114 Args.push_back(Op);
1115 Args.push_back(B.getInt32(I.getSyncScopeID()));
1116 Args.push_back(B.getInt32(
1117 static_cast<uint32_t>(getMemSemantics(I.getSuccessOrdering()))));
1118 Args.push_back(B.getInt32(
1119 static_cast<uint32_t>(getMemSemantics(I.getFailureOrdering()))));
1120 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
1121 {I.getPointerOperand()->getType()}, {Args});
1122 replaceMemInstrUses(&I, NewI, B);
1123 return NewI;
1124}
1125
1126Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &I) {
1127 IRBuilder<> B(I.getParent());
1128 B.SetInsertPoint(&I);
1129 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1130 return &I;
1131}
1132
1133void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
1134 IRBuilder<> &B) {
1135 // Skip special artifical variable llvm.global.annotations.
1136 if (GV.getName() == "llvm.global.annotations")
1137 return;
1138 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
1139 // Deduce element type and store results in Global Registry.
1140 // Result is ignored, because TypedPointerType is not supported
1141 // by llvm IR general logic.
1142 deduceElementTypeHelper(&GV);
1144 Type *Ty = isAggrConstForceInt32(Init) ? B.getInt32Ty() : Init->getType();
1145 Constant *Const = isAggrConstForceInt32(Init) ? B.getInt32(1) : Init;
1146 auto *InitInst = B.CreateIntrinsic(Intrinsic::spv_init_global,
1147 {GV.getType(), Ty}, {&GV, Const});
1148 InitInst->setArgOperand(1, Init);
1149 }
1150 if ((!GV.hasInitializer() || isa<UndefValue>(GV.getInitializer())) &&
1151 GV.getNumUses() == 0)
1152 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.getType(), &GV);
1153}
1154
1155void SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *I,
1156 IRBuilder<> &B) {
1158 if (!isPointerTy(I->getType()) || !requireAssignType(I) ||
1159 isa<BitCastInst>(I))
1160 return;
1161
1163 Type *ElemTy = deduceElementType(I);
1164 buildAssignPtr(B, ElemTy, I);
1165}
1166
1167void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *I,
1168 IRBuilder<> &B) {
1170 Type *Ty = I->getType();
1171 if (!Ty->isVoidTy() && !isPointerTy(Ty) && requireAssignType(I)) {
1173 Type *TypeToAssign = Ty;
1174 if (auto *II = dyn_cast<IntrinsicInst>(I)) {
1175 if (II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1176 II->getIntrinsicID() == Intrinsic::spv_undef) {
1177 auto It = AggrConstTypes.find(II);
1178 if (It == AggrConstTypes.end())
1179 report_fatal_error("Unknown composite intrinsic type");
1180 TypeToAssign = It->second;
1181 }
1182 }
1183 buildAssignType(B, TypeToAssign, I);
1184 }
1185 for (const auto &Op : I->operands()) {
1186 if (isa<ConstantPointerNull>(Op) || isa<UndefValue>(Op) ||
1187 // Check GetElementPtrConstantExpr case.
1188 (isa<ConstantExpr>(Op) && isa<GEPOperator>(Op))) {
1190 Type *OpTy = Op->getType();
1191 if (isa<UndefValue>(Op) && OpTy->isAggregateType()) {
1192 CallInst *AssignCI =
1193 buildIntrWithMD(Intrinsic::spv_assign_type, {B.getInt32Ty()}, Op,
1194 UndefValue::get(B.getInt32Ty()), {}, B);
1195 GR->addAssignPtrTypeInstr(Op, AssignCI);
1196 } else if (!isa<Instruction>(Op)) {
1197 Type *OpTy = Op->getType();
1198 if (auto PType = dyn_cast<TypedPointerType>(OpTy)) {
1199 buildAssignPtr(B, PType->getElementType(), Op);
1200 } else if (isPointerTy(OpTy)) {
1201 Type *ElemTy = GR->findDeducedElementType(Op);
1202 buildAssignPtr(B, ElemTy ? ElemTy : deduceElementType(Op), Op);
1203 } else {
1204 CallInst *AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type,
1205 {OpTy}, Op, Op, {}, B);
1206 GR->addAssignPtrTypeInstr(Op, AssignCI);
1207 }
1208 }
1209 }
1210 }
1211}
1212
1213void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *I,
1214 IRBuilder<> &B) {
1215 if (MDNode *MD = I->getMetadata("spirv.Decorations")) {
1217 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {I->getType()},
1218 {I, MetadataAsValue::get(I->getContext(), MD)});
1219 }
1220}
1221
1222void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *I,
1223 IRBuilder<> &B) {
1224 auto *II = dyn_cast<IntrinsicInst>(I);
1225 if (II && II->getIntrinsicID() == Intrinsic::spv_const_composite &&
1226 TrackConstants) {
1228 auto t = AggrConsts.find(I);
1229 assert(t != AggrConsts.end());
1230 auto *NewOp =
1231 buildIntrWithMD(Intrinsic::spv_track_constant,
1232 {II->getType(), II->getType()}, t->second, I, {}, B);
1233 I->replaceAllUsesWith(NewOp);
1234 NewOp->setArgOperand(0, I);
1235 }
1236 bool IsPhi = isa<PHINode>(I), BPrepared = false;
1237 for (const auto &Op : I->operands()) {
1238 if ((isa<ConstantAggregateZero>(Op) && Op->getType()->isVectorTy()) ||
1239 isa<PHINode>(I) || isa<SwitchInst>(I))
1240 TrackConstants = false;
1241 if ((isa<ConstantData>(Op) || isa<ConstantExpr>(Op)) && TrackConstants) {
1242 unsigned OpNo = Op.getOperandNo();
1243 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
1244 (II->paramHasAttr(OpNo, Attribute::ImmArg))))
1245 continue;
1246 if (!BPrepared) {
1247 IsPhi ? B.SetInsertPointPastAllocas(I->getParent()->getParent())
1248 : B.SetInsertPoint(I);
1249 BPrepared = true;
1250 }
1251 Value *OpTyVal = Op;
1252 if (Op->getType()->isTargetExtTy())
1253 OpTyVal = PoisonValue::get(Op->getType());
1254 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant,
1255 {Op->getType(), OpTyVal->getType()}, Op,
1256 OpTyVal, {}, B);
1257 I->setOperand(OpNo, NewOp);
1258 }
1259 }
1260 if (I->hasName()) {
1263 std::vector<Value *> Args = {I};
1264 addStringImm(I->getName(), B, Args);
1265 B.CreateIntrinsic(Intrinsic::spv_assign_name, {I->getType()}, Args);
1266 }
1267}
1268
1269Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *F,
1270 unsigned OpIdx) {
1271 std::unordered_set<Function *> FVisited;
1272 return deduceFunParamElementType(F, OpIdx, FVisited);
1273}
1274
1275Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
1276 Function *F, unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
1277 // maybe a cycle
1278 if (FVisited.find(F) != FVisited.end())
1279 return nullptr;
1280 FVisited.insert(F);
1281
1282 std::unordered_set<Value *> Visited;
1284 // search in function's call sites
1285 for (User *U : F->users()) {
1286 CallInst *CI = dyn_cast<CallInst>(U);
1287 if (!CI || OpIdx >= CI->arg_size())
1288 continue;
1289 Value *OpArg = CI->getArgOperand(OpIdx);
1290 if (!isPointerTy(OpArg->getType()))
1291 continue;
1292 // maybe we already know operand's element type
1293 if (Type *KnownTy = GR->findDeducedElementType(OpArg))
1294 return KnownTy;
1295 // try to deduce from the operand itself
1296 Visited.clear();
1297 if (Type *Ty = deduceElementTypeHelper(OpArg, Visited))
1298 return Ty;
1299 // search in actual parameter's users
1300 for (User *OpU : OpArg->users()) {
1301 Instruction *Inst = dyn_cast<Instruction>(OpU);
1302 if (!Inst || Inst == CI)
1303 continue;
1304 Visited.clear();
1305 if (Type *Ty = deduceElementTypeHelper(Inst, Visited))
1306 return Ty;
1307 }
1308 // check if it's a formal parameter of the outer function
1309 if (!CI->getParent() || !CI->getParent()->getParent())
1310 continue;
1311 Function *OuterF = CI->getParent()->getParent();
1312 if (FVisited.find(OuterF) != FVisited.end())
1313 continue;
1314 for (unsigned i = 0; i < OuterF->arg_size(); ++i) {
1315 if (OuterF->getArg(i) == OpArg) {
1316 Lookup.push_back(std::make_pair(OuterF, i));
1317 break;
1318 }
1319 }
1320 }
1321
1322 // search in function parameters
1323 for (auto &Pair : Lookup) {
1324 if (Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
1325 return Ty;
1326 }
1327
1328 return nullptr;
1329}
1330
1331void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *F,
1332 IRBuilder<> &B) {
1333 B.SetInsertPointPastAllocas(F);
1334 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
1335 Argument *Arg = F->getArg(OpIdx);
1336 if (!isUntypedPointerTy(Arg->getType()))
1337 continue;
1338 Type *ElemTy = GR->findDeducedElementType(Arg);
1339 if (!ElemTy && hasPointeeTypeAttr(Arg) &&
1340 (ElemTy = getPointeeTypeByAttr(Arg)) != nullptr)
1341 buildAssignPtr(B, ElemTy, Arg);
1342 }
1343}
1344
1345void SPIRVEmitIntrinsics::processParamTypes(Function *F, IRBuilder<> &B) {
1346 B.SetInsertPointPastAllocas(F);
1347 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
1348 Argument *Arg = F->getArg(OpIdx);
1349 if (!isUntypedPointerTy(Arg->getType()))
1350 continue;
1351 Type *ElemTy = GR->findDeducedElementType(Arg);
1352 if (!ElemTy && (ElemTy = deduceFunParamElementType(F, OpIdx)) != nullptr)
1353 buildAssignPtr(B, ElemTy, Arg);
1354 }
1355}
1356
1357bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
1358 if (Func.isDeclaration())
1359 return false;
1360
1361 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(Func);
1362 GR = ST.getSPIRVGlobalRegistry();
1363
1364 F = &Func;
1365 IRBuilder<> B(Func.getContext());
1366 AggrConsts.clear();
1367 AggrConstTypes.clear();
1368 AggrStores.clear();
1369
1370 processParamTypesByFunHeader(F, B);
1371
1372 // StoreInst's operand type can be changed during the next transformations,
1373 // so we need to store it in the set. Also store already transformed types.
1374 for (auto &I : instructions(Func)) {
1375 StoreInst *SI = dyn_cast<StoreInst>(&I);
1376 if (!SI)
1377 continue;
1378 Type *ElTy = SI->getValueOperand()->getType();
1379 if (ElTy->isAggregateType() || ElTy->isVectorTy())
1380 AggrStores.insert(&I);
1381 }
1382
1383 B.SetInsertPoint(&Func.getEntryBlock(), Func.getEntryBlock().begin());
1384 for (auto &GV : Func.getParent()->globals())
1385 processGlobalValue(GV, B);
1386
1387 preprocessUndefs(B);
1388 preprocessCompositeConstants(B);
1390 for (auto &I : instructions(Func))
1391 Worklist.push_back(&I);
1392
1393 for (auto &I : Worklist) {
1394 // Don't emit intrinsincs for convergence intrinsics.
1395 if (isConvergenceIntrinsic(I))
1396 continue;
1397
1398 insertAssignPtrTypeIntrs(I, B);
1399 insertAssignTypeIntrs(I, B);
1400 insertPtrCastOrAssignTypeInstr(I, B);
1402 }
1403
1404 for (auto &I : instructions(Func))
1405 deduceOperandElementType(&I);
1406
1407 for (auto *I : Worklist) {
1408 TrackConstants = true;
1409 if (!I->getType()->isVoidTy() || isa<StoreInst>(I))
1411 // Visitors return either the original/newly created instruction for further
1412 // processing, nullptr otherwise.
1413 I = visit(*I);
1414 if (!I)
1415 continue;
1416
1417 // Don't emit intrinsics for convergence operations.
1418 if (isConvergenceIntrinsic(I))
1419 continue;
1420
1421 processInstrAfterVisit(I, B);
1422 }
1423
1424 return true;
1425}
1426
1427bool SPIRVEmitIntrinsics::runOnModule(Module &M) {
1428 bool Changed = false;
1429
1430 for (auto &F : M) {
1431 Changed |= runOnFunction(F);
1432 }
1433
1434 for (auto &F : M) {
1435 // check if function parameter types are set
1436 if (!F.isDeclaration() && !F.isIntrinsic()) {
1437 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(F);
1438 GR = ST.getSPIRVGlobalRegistry();
1439 IRBuilder<> B(F.getContext());
1440 processParamTypes(&F, B);
1441 }
1442 }
1443
1444 return Changed;
1445}
1446
1448 return new SPIRVEmitIntrinsics(TM);
1449}
static unsigned getIntrinsicID(const SDNode *N)
aarch64 promote const
unsigned Intr
always inline
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
return RetTy
static bool runOnFunction(Function &F, bool PostInlining)
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
uint64_t IntrinsicInst * II
const char LLVMTargetMachineRef TM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static bool requireAssignType(Instruction *I)
static void insertSpirvDecorations(MachineFunction &MF, MachineIRBuilder MIB)
static SymbolRef::Type getType(const Symbol *Sym)
Definition: TapiFile.cpp:40
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
an instruction to allocate memory on the stack
Definition: Instructions.h:60
Represent the analysis usage information of a pass.
This class represents an incoming formal argument to a Function.
Definition: Argument.h:31
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
An instruction that atomically checks whether a specified value is in a memory location,...
Definition: Instructions.h:540
LLVM Basic Block Representation.
Definition: BasicBlock.h:60
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:206
LLVMContext & getContext() const
Get the context in which this basic block lives.
Definition: BasicBlock.cpp:168
This class represents a no-op cast from one type to another.
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
Definition: Constants.cpp:1833
bool isInlineAsm() const
Check if this call is an inline asm statement.
Definition: InstrTypes.h:1817
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Definition: InstrTypes.h:1750
bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1695
void setArgOperand(unsigned i, Value *v)
Definition: InstrTypes.h:1700
unsigned arg_size() const
Definition: InstrTypes.h:1693
This class represents a function call, abstracting a target machine's calling convention.
This is an important base class in LLVM.
Definition: Constant.h:41
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Definition: Constants.cpp:370
This class represents an Operation in the Expression.
A debug info location.
Definition: DebugLoc.h:33
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:155
iterator end()
Definition: DenseMap.h:84
Implements a dense probed hash-table based set.
Definition: DenseSet.h:271
This instruction extracts a single (scalar) element from a VectorType value.
This instruction extracts a struct member or array element value from an aggregate value.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
Definition: Function.h:237
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Definition: Function.cpp:358
size_t arg_size() const
Definition: Function.h:854
Argument * getArg(unsigned i) const
Definition: Function.h:839
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Definition: Instructions.h:974
PointerType * getType() const
Global values are always pointers.
Definition: GlobalValue.h:293
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
bool hasInitializer() const
Definitions have initializers, declarations don't.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:2666
Indirect Branch Instruction.
void addDestination(BasicBlock *Dest)
Add a destination.
This instruction inserts a single (scalar) element into a VectorType value.
This instruction inserts a struct field of array element value into an aggregate value.
Base class for instruction visitors.
Definition: InstVisitor.h:78
RetTy visitExtractElementInst(ExtractElementInst &I)
Definition: InstVisitor.h:191
RetTy visitInsertValueInst(InsertValueInst &I)
Definition: InstVisitor.h:195
RetTy visitUnreachableInst(UnreachableInst &I)
Definition: InstVisitor.h:241
RetTy visitAtomicCmpXchgInst(AtomicCmpXchgInst &I)
Definition: InstVisitor.h:171
RetTy visitBitCastInst(BitCastInst &I)
Definition: InstVisitor.h:187
RetTy visitSwitchInst(SwitchInst &I)
Definition: InstVisitor.h:232
RetTy visitExtractValueInst(ExtractValueInst &I)
Definition: InstVisitor.h:194
RetTy visitStoreInst(StoreInst &I)
Definition: InstVisitor.h:170
RetTy visitInsertElementInst(InsertElementInst &I)
Definition: InstVisitor.h:192
RetTy visitAllocaInst(AllocaInst &I)
Definition: InstVisitor.h:168
RetTy visitCallInst(CallInst &I)
Definition: InstVisitor.h:220
RetTy visitGetElementPtrInst(GetElementPtrInst &I)
Definition: InstVisitor.h:174
void visitInstruction(Instruction &I)
Definition: InstVisitor.h:280
RetTy visitLoadInst(LoadInst &I)
Definition: InstVisitor.h:169
const BasicBlock * getParent() const
Definition: Instruction.h:152
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
Definition: Instruction.h:149
A wrapper class for inspecting calls to intrinsic functions.
Definition: IntrinsicInst.h:47
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:67
An instruction for reading from memory.
Definition: Instructions.h:185
Metadata node.
Definition: Metadata.h:1067
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition: Metadata.h:1541
A single uniqued string.
Definition: Metadata.h:720
static MDString * get(LLVMContext &Context, StringRef Str)
Definition: Metadata.cpp:600
Flags
Flags values. These may be or'd together.
Metadata wrapper in the Value hierarchy.
Definition: Metadata.h:176
static MetadataAsValue * get(LLVMContext &Context, Metadata *MD)
Definition: Metadata.cpp:103
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:251
virtual bool runOnModule(Module &M)=0
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
Definition: PassRegistry.h:37
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
Definition: Pass.cpp:98
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Definition: Pass.cpp:81
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Definition: Constants.cpp:1814
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
Type * findDeducedCompositeType(const Value *Val)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
void addDeducedCompositeType(Value *Val, Type *Ty)
Type * findDeducedElementType(const Value *Val)
CallInst * findAssignPtrTypeInstr(const Value *Val)
bool canUseExtension(SPIRV::Extension::Extension E) const
size_t size() const
Definition: SmallVector.h:91
void push_back(const T &Elt)
Definition: SmallVector.h:426
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1209
An instruction for storing to memory.
Definition: Instructions.h:318
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:50
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
Definition: StringRef.h:258
static StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
Definition: Type.cpp:513
Multiway switch.
Class to represent target extensions types, which are generally unintrospectable from target-independ...
Definition: DerivedTypes.h:720
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
bool isVectorTy() const
True if this is an instance of VectorType.
Definition: Type.h:265
StringRef getTargetExtName() const
bool isTargetExtTy() const
Return true if this is a target extension type.
Definition: Type.h:207
bool isAggregateType() const
Return true if the type is an aggregate type.
Definition: Type.h:295
bool isVoidTy() const
Return true if this is 'void'.
Definition: Type.h:140
A few GPU targets, such as DXIL and SPIR-V, have typed pointers.
static TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
Definition: Constants.cpp:1795
This function has undefined behavior.
op_range operands()
Definition: User.h:242
Value * getOperand(unsigned i) const
Definition: User.h:169
static ConstantAsMetadata * getConstant(Value *C)
Definition: Metadata.h:472
LLVM Value Representation.
Definition: Value.h:74
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:255
void setName(const Twine &Name)
Change the name of the value.
Definition: Value.cpp:377
iterator_range< user_iterator > users()
Definition: Value.h:421
LLVMContext & getContext() const
All values hold a context through their type.
Definition: Value.cpp:1074
unsigned getNumUses() const
This method computes the number of uses of this Value.
Definition: Value.cpp:255
StringRef getName() const
Return a constant reference to the value's name.
Definition: Value.cpp:309
bool user_empty() const
Definition: Value.h:385
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:206
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
Definition: DenseSet.h:185
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ SPIR_KERNEL
Used for SPIR kernel functions.
Definition: CallingConv.h:144
Type * parseBuiltinCallArgumentBaseType(const StringRef DemangledCall, unsigned ArgIdx, LLVMContext &Ctx)
Parses the provided ArgIdx argument base type in the DemangledCall skeleton.
NodeAddr< FuncNode * > Func
Definition: RDFGraph.h:393
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
void initializeSPIRVEmitIntrinsicsPass(PassRegistry &)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
Definition: SPIRVUtils.h:126
AddressSpace
Definition: NVPTXBaseInfo.h:21
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
Definition: SPIRVUtils.cpp:335
bool isTypedPointerTy(const Type *T)
Definition: SPIRVUtils.h:110
bool isPointerTy(const Type *T)
Definition: SPIRVUtils.h:120
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
Definition: Error.cpp:159
@ Ref
The access may reference the value stored in memory.
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
Definition: SPIRVUtils.h:139
bool hasPointeeTypeAttr(Argument *Arg)
Definition: SPIRVUtils.h:134
void addStringImm(const StringRef &Str, MCInst &Inst)
Definition: SPIRVUtils.cpp:51
bool isUntypedPointerTy(const Type *T)
Definition: SPIRVUtils.h:115
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
Definition: SPIRVUtils.cpp:236