LLVM 19.0.0git
SPIRVEmitIntrinsics.cpp
Go to the documentation of this file.
1//===-- SPIRVEmitIntrinsics.cpp - emit SPIRV intrinsics ---------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// The pass emits SPIRV intrinsics keeping essential high-level information for
10// the translation of LLVM IR to SPIR-V.
11//
12//===----------------------------------------------------------------------===//
13
14#include "SPIRV.h"
15#include "SPIRVBuiltins.h"
16#include "SPIRVMetadata.h"
17#include "SPIRVSubtarget.h"
18#include "SPIRVTargetMachine.h"
19#include "SPIRVUtils.h"
20#include "llvm/IR/IRBuilder.h"
22#include "llvm/IR/InstVisitor.h"
23#include "llvm/IR/IntrinsicsSPIRV.h"
25
26#include <queue>
27
28// This pass performs the following transformation on LLVM IR level required
29// for the following translation to SPIR-V:
30// - replaces direct usages of aggregate constants with target-specific
31// intrinsics;
32// - replaces aggregates-related instructions (extract/insert, ld/st, etc)
33// with a target-specific intrinsics;
34// - emits intrinsics for the global variable initializers since IRTranslator
35// doesn't handle them and it's not very convenient to translate them
36// ourselves;
37// - emits intrinsics to keep track of the string names assigned to the values;
38// - emits intrinsics to keep track of constants (this is necessary to have an
39// LLVM IR constant after the IRTranslation is completed) for their further
40// deduplication;
41// - emits intrinsics to keep track of original LLVM types of the values
42// to be able to emit proper SPIR-V types eventually.
43//
44// TODO: consider removing spv.track.constant in favor of spv.assign.type.
45
46using namespace llvm;
47
48namespace llvm {
50} // namespace llvm
51
52namespace {
53class SPIRVEmitIntrinsics
54 : public ModulePass,
55 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
56 SPIRVTargetMachine *TM = nullptr;
57 SPIRVGlobalRegistry *GR = nullptr;
58 Function *F = nullptr;
59 bool TrackConstants = true;
62 DenseSet<Instruction *> AggrStores;
63
64 // a registry of created Intrinsic::spv_assign_ptr_type instructions
65 DenseMap<Value *, CallInst *> AssignPtrTypeInstr;
66
67 // deduce element type of untyped pointers
68 Type *deduceElementType(Value *I);
69 Type *deduceElementTypeHelper(Value *I);
70 Type *deduceElementTypeHelper(Value *I, std::unordered_set<Value *> &Visited);
71 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
72 std::unordered_set<Value *> &Visited);
73 Type *deduceElementTypeByUsersDeep(Value *Op,
74 std::unordered_set<Value *> &Visited);
75
76 // deduce nested types of composites
77 Type *deduceNestedTypeHelper(User *U);
78 Type *deduceNestedTypeHelper(User *U, Type *Ty,
79 std::unordered_set<Value *> &Visited);
80
81 // deduce Types of operands of the Instruction if possible
82 void deduceOperandElementType(Instruction *I);
83
84 void preprocessCompositeConstants(IRBuilder<> &B);
85 void preprocessUndefs(IRBuilder<> &B);
86
87 CallInst *buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef<Type *> Types,
88 Value *Arg, Value *Arg2, ArrayRef<Constant *> Imms,
89 IRBuilder<> &B) {
91 MDTuple *TyMD = MDNode::get(F->getContext(), CM);
92 MetadataAsValue *VMD = MetadataAsValue::get(F->getContext(), TyMD);
94 Args.push_back(Arg2);
95 Args.push_back(VMD);
96 for (auto *Imm : Imms)
97 Args.push_back(Imm);
98 return B.CreateIntrinsic(IntrID, {Types}, Args);
99 }
100
101 void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg);
102
103 void replaceMemInstrUses(Instruction *Old, Instruction *New, IRBuilder<> &B);
104 void processInstrAfterVisit(Instruction *I, IRBuilder<> &B);
105 void insertAssignPtrTypeIntrs(Instruction *I, IRBuilder<> &B);
106 void insertAssignTypeIntrs(Instruction *I, IRBuilder<> &B);
107 void insertAssignTypeInstrForTargetExtTypes(TargetExtType *AssignedType,
108 Value *V, IRBuilder<> &B);
109 void replacePointerOperandWithPtrCast(Instruction *I, Value *Pointer,
110 Type *ExpectedElementType,
111 unsigned OperandToReplace,
112 IRBuilder<> &B);
113 void insertPtrCastOrAssignTypeInstr(Instruction *I, IRBuilder<> &B);
115 void processGlobalValue(GlobalVariable &GV, IRBuilder<> &B);
116 void processParamTypes(Function *F, IRBuilder<> &B);
117 void processParamTypesByFunHeader(Function *F, IRBuilder<> &B);
118 Type *deduceFunParamElementType(Function *F, unsigned OpIdx);
119 Type *deduceFunParamElementType(Function *F, unsigned OpIdx,
120 std::unordered_set<Function *> &FVisited);
121
122public:
123 static char ID;
124 SPIRVEmitIntrinsics() : ModulePass(ID) {
126 }
127 SPIRVEmitIntrinsics(SPIRVTargetMachine *_TM) : ModulePass(ID), TM(_TM) {
129 }
143
144 StringRef getPassName() const override { return "SPIRV emit intrinsics"; }
145
146 bool runOnModule(Module &M) override;
147 bool runOnFunction(Function &F);
148
149 void getAnalysisUsage(AnalysisUsage &AU) const override {
151 }
152};
153} // namespace
154
155char SPIRVEmitIntrinsics::ID = 0;
156
157INITIALIZE_PASS(SPIRVEmitIntrinsics, "emit-intrinsics", "SPIRV emit intrinsics",
158 false, false)
159
160static inline bool isAssignTypeInstr(const Instruction *I) {
161 return isa<IntrinsicInst>(I) &&
162 cast<IntrinsicInst>(I)->getIntrinsicID() == Intrinsic::spv_assign_type;
163}
164
166 return isa<StoreInst>(I) || isa<LoadInst>(I) || isa<InsertValueInst>(I) ||
167 isa<ExtractValueInst>(I) || isa<AtomicCmpXchgInst>(I);
168}
169
170static bool isAggrToReplace(const Value *V) {
171 return isa<ConstantAggregate>(V) || isa<ConstantDataArray>(V) ||
172 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy());
173}
174
176 if (isa<PHINode>(I))
177 B.SetInsertPoint(I->getParent(), I->getParent()->getFirstInsertionPt());
178 else
179 B.SetInsertPoint(I);
180}
181
183 IntrinsicInst *Intr = dyn_cast<IntrinsicInst>(I);
184 if (Intr) {
185 switch (Intr->getIntrinsicID()) {
186 case Intrinsic::invariant_start:
187 case Intrinsic::invariant_end:
188 return false;
189 }
190 }
191 return true;
192}
193
194static inline void reportFatalOnTokenType(const Instruction *I) {
195 if (I->getType()->isTokenTy())
196 report_fatal_error("A token is encountered but SPIR-V without extensions "
197 "does not support token type",
198 false);
199}
200
201void SPIRVEmitIntrinsics::buildAssignPtr(IRBuilder<> &B, Type *ElemTy,
202 Value *Arg) {
203 CallInst *AssignPtrTyCI =
204 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {Arg->getType()},
205 Constant::getNullValue(ElemTy), Arg,
206 {B.getInt32(getPointerAddressSpace(Arg->getType()))}, B);
207 GR->addDeducedElementType(AssignPtrTyCI, ElemTy);
208 GR->addDeducedElementType(Arg, ElemTy);
209 AssignPtrTypeInstr[Arg] = AssignPtrTyCI;
210}
211
212// Set element pointer type to the given value of ValueTy and tries to
213// specify this type further (recursively) by Operand value, if needed.
214Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
215 Type *ValueTy, Value *Operand, std::unordered_set<Value *> &Visited) {
216 Type *Ty = ValueTy;
217 if (Operand) {
218 if (auto *PtrTy = dyn_cast<PointerType>(Ty)) {
219 if (Type *NestedTy = deduceElementTypeHelper(Operand, Visited))
220 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
221 } else {
222 Ty = deduceNestedTypeHelper(dyn_cast<User>(Operand), Ty, Visited);
223 }
224 }
225 return Ty;
226}
227
228// Traverse User instructions to deduce an element pointer type of the operand.
229Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
230 Value *Op, std::unordered_set<Value *> &Visited) {
231 if (!Op || !isPointerTy(Op->getType()))
232 return nullptr;
233
234 if (auto PType = dyn_cast<TypedPointerType>(Op->getType()))
235 return PType->getElementType();
236
237 // maybe we already know operand's element type
238 if (Type *KnownTy = GR->findDeducedElementType(Op))
239 return KnownTy;
240
241 for (User *OpU : Op->users()) {
242 if (Instruction *Inst = dyn_cast<Instruction>(OpU)) {
243 if (Type *Ty = deduceElementTypeHelper(Inst, Visited))
244 return Ty;
245 }
246 }
247 return nullptr;
248}
249
250// Implements what we know in advance about intrinsics and builtin calls
251// TODO: consider feasibility of this particular case to be generalized by
252// encoding knowledge about intrinsics and builtin calls by corresponding
253// specification rules
255 Function *CalledF, unsigned OpIdx) {
256 if ((DemangledName.starts_with("__spirv_ocl_printf(") ||
257 DemangledName.starts_with("printf(")) &&
258 OpIdx == 0)
259 return IntegerType::getInt8Ty(CalledF->getContext());
260 return nullptr;
261}
262
263// Deduce and return a successfully deduced Type of the Instruction,
264// or nullptr otherwise.
265Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(Value *I) {
266 std::unordered_set<Value *> Visited;
267 return deduceElementTypeHelper(I, Visited);
268}
269
270Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
271 Value *I, std::unordered_set<Value *> &Visited) {
272 // allow to pass nullptr as an argument
273 if (!I)
274 return nullptr;
275
276 // maybe already known
277 if (Type *KnownTy = GR->findDeducedElementType(I))
278 return KnownTy;
279
280 // maybe a cycle
281 if (Visited.find(I) != Visited.end())
282 return nullptr;
283 Visited.insert(I);
284
285 // fallback value in case when we fail to deduce a type
286 Type *Ty = nullptr;
287 // look for known basic patterns of type inference
288 if (auto *Ref = dyn_cast<AllocaInst>(I)) {
289 Ty = Ref->getAllocatedType();
290 } else if (auto *Ref = dyn_cast<GetElementPtrInst>(I)) {
291 Ty = Ref->getResultElementType();
292 } else if (auto *Ref = dyn_cast<GlobalValue>(I)) {
293 Ty = deduceElementTypeByValueDeep(
294 Ref->getValueType(),
295 Ref->getNumOperands() > 0 ? Ref->getOperand(0) : nullptr, Visited);
296 } else if (auto *Ref = dyn_cast<AddrSpaceCastInst>(I)) {
297 Ty = deduceElementTypeHelper(Ref->getPointerOperand(), Visited);
298 } else if (auto *Ref = dyn_cast<BitCastInst>(I)) {
299 if (Type *Src = Ref->getSrcTy(), *Dest = Ref->getDestTy();
300 isPointerTy(Src) && isPointerTy(Dest))
301 Ty = deduceElementTypeHelper(Ref->getOperand(0), Visited);
302 } else if (auto *Ref = dyn_cast<AtomicCmpXchgInst>(I)) {
303 Value *Op = Ref->getNewValOperand();
304 Ty = deduceElementTypeByValueDeep(Op->getType(), Op, Visited);
305 } else if (auto *Ref = dyn_cast<AtomicRMWInst>(I)) {
306 Value *Op = Ref->getValOperand();
307 Ty = deduceElementTypeByValueDeep(Op->getType(), Op, Visited);
308 } else if (auto *Ref = dyn_cast<PHINode>(I)) {
309 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
310 Ty = deduceElementTypeByUsersDeep(Ref->getIncomingValue(i), Visited);
311 if (Ty)
312 break;
313 }
314 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
315 for (Value *Op : {Ref->getTrueValue(), Ref->getFalseValue()}) {
316 Ty = deduceElementTypeByUsersDeep(Op, Visited);
317 if (Ty)
318 break;
319 }
320 }
321
322 // remember the found relationship
323 if (Ty) {
324 // specify nested types if needed, otherwise return unchanged
325 GR->addDeducedElementType(I, Ty);
326 }
327
328 return Ty;
329}
330
331// Re-create a type of the value if it has untyped pointer fields, also nested.
332// Return the original value type if no corrections of untyped pointer
333// information is found or needed.
334Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U) {
335 std::unordered_set<Value *> Visited;
336 return deduceNestedTypeHelper(U, U->getType(), Visited);
337}
338
339Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
340 User *U, Type *OrigTy, std::unordered_set<Value *> &Visited) {
341 if (!U)
342 return OrigTy;
343
344 // maybe already known
345 if (Type *KnownTy = GR->findDeducedCompositeType(U))
346 return KnownTy;
347
348 // maybe a cycle
349 if (Visited.find(U) != Visited.end())
350 return OrigTy;
351 Visited.insert(U);
352
353 if (dyn_cast<StructType>(OrigTy)) {
355 bool Change = false;
356 for (unsigned i = 0; i < U->getNumOperands(); ++i) {
357 Value *Op = U->getOperand(i);
358 Type *OpTy = Op->getType();
359 Type *Ty = OpTy;
360 if (Op) {
361 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
362 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
363 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
364 } else {
365 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
366 }
367 }
368 Tys.push_back(Ty);
369 Change |= Ty != OpTy;
370 }
371 if (Change) {
372 Type *NewTy = StructType::create(Tys);
373 GR->addDeducedCompositeType(U, NewTy);
374 return NewTy;
375 }
376 } else if (auto *ArrTy = dyn_cast<ArrayType>(OrigTy)) {
377 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
378 Type *OpTy = ArrTy->getElementType();
379 Type *Ty = OpTy;
380 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
381 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
382 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
383 } else {
384 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
385 }
386 if (Ty != OpTy) {
387 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
388 GR->addDeducedCompositeType(U, NewTy);
389 return NewTy;
390 }
391 }
392 } else if (auto *VecTy = dyn_cast<VectorType>(OrigTy)) {
393 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
394 Type *OpTy = VecTy->getElementType();
395 Type *Ty = OpTy;
396 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
397 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
398 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
399 } else {
400 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
401 }
402 if (Ty != OpTy) {
403 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
404 GR->addDeducedCompositeType(U, NewTy);
405 return NewTy;
406 }
407 }
408 }
409
410 return OrigTy;
411}
412
413Type *SPIRVEmitIntrinsics::deduceElementType(Value *I) {
414 if (Type *Ty = deduceElementTypeHelper(I))
415 return Ty;
416 return IntegerType::getInt8Ty(I->getContext());
417}
418
419// If the Instruction has Pointer operands with unresolved types, this function
420// tries to deduce them. If the Instruction has Pointer operands with known
421// types which differ from expected, this function tries to insert a bitcast to
422// resolve the issue.
423void SPIRVEmitIntrinsics::deduceOperandElementType(Instruction *I) {
425 Type *KnownElemTy = nullptr;
426 // look for known basic patterns of type inference
427 if (auto *Ref = dyn_cast<PHINode>(I)) {
428 if (!isPointerTy(I->getType()) ||
429 !(KnownElemTy = GR->findDeducedElementType(I)))
430 return;
431 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
432 Value *Op = Ref->getIncomingValue(i);
433 if (isPointerTy(Op->getType()))
434 Ops.push_back(std::make_pair(Op, i));
435 }
436 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
437 if (!isPointerTy(I->getType()) ||
438 !(KnownElemTy = GR->findDeducedElementType(I)))
439 return;
440 for (unsigned i = 0; i < Ref->getNumOperands(); i++) {
441 Value *Op = Ref->getOperand(i);
442 if (isPointerTy(Op->getType()))
443 Ops.push_back(std::make_pair(Op, i));
444 }
445 } else if (auto *Ref = dyn_cast<ReturnInst>(I)) {
446 Type *RetTy = F->getReturnType();
447 if (!isPointerTy(RetTy))
448 return;
449 Value *Op = Ref->getReturnValue();
450 if (!Op)
451 return;
452 if (!(KnownElemTy = GR->findDeducedElementType(F))) {
453 if (Type *OpElemTy = GR->findDeducedElementType(Op)) {
454 GR->addDeducedElementType(F, OpElemTy);
455 TypedPointerType *DerivedTy =
457 GR->addReturnType(F, DerivedTy);
458 }
459 return;
460 }
461 Ops.push_back(std::make_pair(Op, 0));
462 } else if (auto *Ref = dyn_cast<ICmpInst>(I)) {
463 if (!isPointerTy(Ref->getOperand(0)->getType()))
464 return;
465 Value *Op0 = Ref->getOperand(0);
466 Value *Op1 = Ref->getOperand(1);
467 Type *ElemTy0 = GR->findDeducedElementType(Op0);
468 Type *ElemTy1 = GR->findDeducedElementType(Op1);
469 if (ElemTy0) {
470 KnownElemTy = ElemTy0;
471 Ops.push_back(std::make_pair(Op1, 1));
472 } else if (ElemTy1) {
473 KnownElemTy = ElemTy1;
474 Ops.push_back(std::make_pair(Op0, 0));
475 }
476 }
477
478 // There is no enough info to deduce types or all is valid.
479 if (!KnownElemTy || Ops.size() == 0)
480 return;
481
482 LLVMContext &Ctx = F->getContext();
483 IRBuilder<> B(Ctx);
484 for (auto &OpIt : Ops) {
485 Value *Op = OpIt.first;
486 if (Op->use_empty())
487 continue;
488 Type *Ty = GR->findDeducedElementType(Op);
489 if (Ty == KnownElemTy)
490 continue;
491 if (Instruction *User = dyn_cast<Instruction>(Op->use_begin()->get()))
492 setInsertPointSkippingPhis(B, User->getNextNode());
493 else
494 B.SetInsertPoint(I);
495 Value *OpTyVal = Constant::getNullValue(KnownElemTy);
496 Type *OpTy = Op->getType();
497 if (!Ty) {
498 GR->addDeducedElementType(Op, KnownElemTy);
499 // check if there is existing Intrinsic::spv_assign_ptr_type instruction
500 auto It = AssignPtrTypeInstr.find(Op);
501 if (It == AssignPtrTypeInstr.end()) {
502 CallInst *CI =
503 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal, Op,
504 {B.getInt32(getPointerAddressSpace(OpTy))}, B);
505 AssignPtrTypeInstr[Op] = CI;
506 } else {
507 It->second->setArgOperand(
508 1,
510 Ctx, MDNode::get(Ctx, ValueAsMetadata::getConstant(OpTyVal))));
511 }
512 } else {
513 SmallVector<Type *, 2> Types = {OpTy, OpTy};
515 Ctx, MDNode::get(Ctx, ValueAsMetadata::getConstant(OpTyVal)));
517 B.getInt32(getPointerAddressSpace(OpTy))};
518 CallInst *PtrCastI =
519 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
520 I->setOperand(OpIt.second, PtrCastI);
521 }
522 }
523}
524
525void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
526 Instruction *New,
527 IRBuilder<> &B) {
528 while (!Old->user_empty()) {
529 auto *U = Old->user_back();
530 if (isAssignTypeInstr(U)) {
531 B.SetInsertPoint(U);
532 SmallVector<Value *, 2> Args = {New, U->getOperand(1)};
533 B.CreateIntrinsic(Intrinsic::spv_assign_type, {New->getType()}, Args);
534 U->eraseFromParent();
535 } else if (isMemInstrToReplace(U) || isa<ReturnInst>(U) ||
536 isa<CallInst>(U)) {
537 U->replaceUsesOfWith(Old, New);
538 } else {
539 llvm_unreachable("illegal aggregate intrinsic user");
540 }
541 }
542 Old->eraseFromParent();
543}
544
545void SPIRVEmitIntrinsics::preprocessUndefs(IRBuilder<> &B) {
546 std::queue<Instruction *> Worklist;
547 for (auto &I : instructions(F))
548 Worklist.push(&I);
549
550 while (!Worklist.empty()) {
551 Instruction *I = Worklist.front();
552 Worklist.pop();
553
554 for (auto &Op : I->operands()) {
555 auto *AggrUndef = dyn_cast<UndefValue>(Op);
556 if (!AggrUndef || !Op->getType()->isAggregateType())
557 continue;
558
559 B.SetInsertPoint(I);
560 auto *IntrUndef = B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
561 Worklist.push(IntrUndef);
562 I->replaceUsesOfWith(Op, IntrUndef);
563 AggrConsts[IntrUndef] = AggrUndef;
564 AggrConstTypes[IntrUndef] = AggrUndef->getType();
565 }
566 }
567}
568
569void SPIRVEmitIntrinsics::preprocessCompositeConstants(IRBuilder<> &B) {
570 std::queue<Instruction *> Worklist;
571 for (auto &I : instructions(F))
572 Worklist.push(&I);
573
574 while (!Worklist.empty()) {
575 auto *I = Worklist.front();
576 assert(I);
577 bool KeepInst = false;
578 for (const auto &Op : I->operands()) {
579 auto BuildCompositeIntrinsic =
581 IRBuilder<> &B, std::queue<Instruction *> &Worklist,
582 bool &KeepInst, SPIRVEmitIntrinsics &SEI) {
583 B.SetInsertPoint(I);
584 auto *CCI =
585 B.CreateIntrinsic(Intrinsic::spv_const_composite, {}, {Args});
586 Worklist.push(CCI);
587 I->replaceUsesOfWith(Op, CCI);
588 KeepInst = true;
589 SEI.AggrConsts[CCI] = AggrC;
590 SEI.AggrConstTypes[CCI] = SEI.deduceNestedTypeHelper(AggrC);
591 };
592
593 if (auto *AggrC = dyn_cast<ConstantAggregate>(Op)) {
594 SmallVector<Value *> Args(AggrC->op_begin(), AggrC->op_end());
595 BuildCompositeIntrinsic(AggrC, Args, Op, I, B, Worklist, KeepInst,
596 *this);
597 } else if (auto *AggrC = dyn_cast<ConstantDataArray>(Op)) {
599 for (unsigned i = 0; i < AggrC->getNumElements(); ++i)
600 Args.push_back(AggrC->getElementAsConstant(i));
601 BuildCompositeIntrinsic(AggrC, Args, Op, I, B, Worklist, KeepInst,
602 *this);
603 } else if (isa<ConstantAggregateZero>(Op) &&
604 !Op->getType()->isVectorTy()) {
605 auto *AggrC = cast<ConstantAggregateZero>(Op);
606 SmallVector<Value *> Args(AggrC->op_begin(), AggrC->op_end());
607 BuildCompositeIntrinsic(AggrC, Args, Op, I, B, Worklist, KeepInst,
608 *this);
609 }
610 }
611 if (!KeepInst)
612 Worklist.pop();
613 }
614}
615
616Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &I) {
617 BasicBlock *ParentBB = I.getParent();
618 IRBuilder<> B(ParentBB);
619 B.SetInsertPoint(&I);
622 for (auto &Op : I.operands()) {
623 if (Op.get()->getType()->isSized()) {
624 Args.push_back(Op);
625 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(Op.get())) {
626 BBCases.push_back(BB);
627 Args.push_back(BlockAddress::get(BB->getParent(), BB));
628 } else {
629 report_fatal_error("Unexpected switch operand");
630 }
631 }
632 CallInst *NewI = B.CreateIntrinsic(Intrinsic::spv_switch,
633 {I.getOperand(0)->getType()}, {Args});
634 // remove switch to avoid its unneeded and undesirable unwrap into branches
635 // and conditions
636 I.replaceAllUsesWith(NewI);
637 I.eraseFromParent();
638 // insert artificial and temporary instruction to preserve valid CFG,
639 // it will be removed after IR translation pass
640 B.SetInsertPoint(ParentBB);
641 IndirectBrInst *BrI = B.CreateIndirectBr(
642 Constant::getNullValue(PointerType::getUnqual(ParentBB->getContext())),
643 BBCases.size());
644 for (BasicBlock *BBCase : BBCases)
645 BrI->addDestination(BBCase);
646 return BrI;
647}
648
649Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &I) {
650 IRBuilder<> B(I.getParent());
651 B.SetInsertPoint(&I);
652 SmallVector<Type *, 2> Types = {I.getType(), I.getOperand(0)->getType()};
654 Args.push_back(B.getInt1(I.isInBounds()));
655 for (auto &Op : I.operands())
656 Args.push_back(Op);
657 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
658 I.replaceAllUsesWith(NewI);
659 I.eraseFromParent();
660 return NewI;
661}
662
663Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &I) {
664 IRBuilder<> B(I.getParent());
665 B.SetInsertPoint(&I);
666 Value *Source = I.getOperand(0);
667
668 // SPIR-V, contrary to LLVM 17+ IR, supports bitcasts between pointers of
669 // varying element types. In case of IR coming from older versions of LLVM
670 // such bitcasts do not provide sufficient information, should be just skipped
671 // here, and handled in insertPtrCastOrAssignTypeInstr.
672 if (isPointerTy(I.getType())) {
673 I.replaceAllUsesWith(Source);
674 I.eraseFromParent();
675 return nullptr;
676 }
677
678 SmallVector<Type *, 2> Types = {I.getType(), Source->getType()};
679 SmallVector<Value *> Args(I.op_begin(), I.op_end());
680 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_bitcast, {Types}, {Args});
681 std::string InstName = I.hasName() ? I.getName().str() : "";
682 I.replaceAllUsesWith(NewI);
683 I.eraseFromParent();
684 NewI->setName(InstName);
685 return NewI;
686}
687
688void SPIRVEmitIntrinsics::insertAssignTypeInstrForTargetExtTypes(
689 TargetExtType *AssignedType, Value *V, IRBuilder<> &B) {
690 // Do not emit spv_assign_type if the V is of the AssignedType already.
691 if (V->getType() == AssignedType)
692 return;
693
694 // Do not emit spv_assign_type if there is one already targetting V. If the
695 // found spv_assign_type assigns a type different than AssignedType, report an
696 // error. Builtin types cannot be redeclared or casted.
697 for (auto User : V->users()) {
698 auto *II = dyn_cast<IntrinsicInst>(User);
699 if (!II || II->getIntrinsicID() != Intrinsic::spv_assign_type)
700 continue;
701
702 MetadataAsValue *VMD = cast<MetadataAsValue>(II->getOperand(1));
704 dyn_cast<ConstantAsMetadata>(VMD->getMetadata())->getType();
705 if (BuiltinType != AssignedType)
706 report_fatal_error("Type mismatch " + BuiltinType->getTargetExtName() +
707 "/" + AssignedType->getTargetExtName() +
708 " for value " + V->getName(),
709 false);
710 return;
711 }
712
713 Constant *Const = UndefValue::get(AssignedType);
714 buildIntrWithMD(Intrinsic::spv_assign_type, {V->getType()}, Const, V, {}, B);
715}
716
717void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
718 Instruction *I, Value *Pointer, Type *ExpectedElementType,
719 unsigned OperandToReplace, IRBuilder<> &B) {
720 // If Pointer is the result of nop BitCastInst (ptr -> ptr), use the source
721 // pointer instead. The BitCastInst should be later removed when visited.
722 while (BitCastInst *BC = dyn_cast<BitCastInst>(Pointer))
723 Pointer = BC->getOperand(0);
724
725 // Do not emit spv_ptrcast if Pointer's element type is ExpectedElementType
726 Type *PointerElemTy = deduceElementTypeHelper(Pointer);
727 if (PointerElemTy == ExpectedElementType)
728 return;
729
731 Constant *ExpectedElementTypeConst =
732 Constant::getNullValue(ExpectedElementType);
734 ValueAsMetadata::getConstant(ExpectedElementTypeConst);
735 MDTuple *TyMD = MDNode::get(F->getContext(), CM);
736 MetadataAsValue *VMD = MetadataAsValue::get(F->getContext(), TyMD);
737 unsigned AddressSpace = getPointerAddressSpace(Pointer->getType());
738 bool FirstPtrCastOrAssignPtrType = true;
739
740 // Do not emit new spv_ptrcast if equivalent one already exists or when
741 // spv_assign_ptr_type already targets this pointer with the same element
742 // type.
743 for (auto User : Pointer->users()) {
744 auto *II = dyn_cast<IntrinsicInst>(User);
745 if (!II ||
746 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
747 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
748 II->getOperand(0) != Pointer)
749 continue;
750
751 // There is some spv_ptrcast/spv_assign_ptr_type already targeting this
752 // pointer.
753 FirstPtrCastOrAssignPtrType = false;
754 if (II->getOperand(1) != VMD ||
755 dyn_cast<ConstantInt>(II->getOperand(2))->getSExtValue() !=
757 continue;
758
759 // The spv_ptrcast/spv_assign_ptr_type targeting this pointer is of the same
760 // element type and address space.
761 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast)
762 return;
763
764 // This must be a spv_ptrcast, do not emit new if this one has the same BB
765 // as I. Otherwise, search for other spv_ptrcast/spv_assign_ptr_type.
766 if (II->getParent() != I->getParent())
767 continue;
768
769 I->setOperand(OperandToReplace, II);
770 return;
771 }
772
773 // // Do not emit spv_ptrcast if it would cast to the default pointer element
774 // // type (i8) of the same address space.
775 // if (ExpectedElementType->isIntegerTy(8))
776 // return;
777
778 // If this would be the first spv_ptrcast, do not emit spv_ptrcast and emit
779 // spv_assign_ptr_type instead.
780 if (FirstPtrCastOrAssignPtrType &&
781 (isa<Instruction>(Pointer) || isa<Argument>(Pointer))) {
782 CallInst *CI = buildIntrWithMD(
783 Intrinsic::spv_assign_ptr_type, {Pointer->getType()},
784 ExpectedElementTypeConst, Pointer, {B.getInt32(AddressSpace)}, B);
785 GR->addDeducedElementType(CI, ExpectedElementType);
786 GR->addDeducedElementType(Pointer, ExpectedElementType);
787 AssignPtrTypeInstr[Pointer] = CI;
788 return;
789 }
790
791 // Emit spv_ptrcast
792 SmallVector<Type *, 2> Types = {Pointer->getType(), Pointer->getType()};
794 auto *PtrCastI = B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
795 I->setOperand(OperandToReplace, PtrCastI);
796}
797
798void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *I,
799 IRBuilder<> &B) {
800 // Handle basic instructions:
801 StoreInst *SI = dyn_cast<StoreInst>(I);
802 if (SI && F->getCallingConv() == CallingConv::SPIR_KERNEL &&
803 isPointerTy(SI->getValueOperand()->getType()) &&
804 isa<Argument>(SI->getValueOperand())) {
805 return replacePointerOperandWithPtrCast(
806 I, SI->getValueOperand(), IntegerType::getInt8Ty(F->getContext()), 0,
807 B);
808 } else if (SI) {
809 return replacePointerOperandWithPtrCast(
810 I, SI->getPointerOperand(), SI->getValueOperand()->getType(), 1, B);
811 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
812 return replacePointerOperandWithPtrCast(I, LI->getPointerOperand(),
813 LI->getType(), 0, B);
814 } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I)) {
815 return replacePointerOperandWithPtrCast(I, GEPI->getPointerOperand(),
816 GEPI->getSourceElementType(), 0, B);
817 }
818
819 // Handle calls to builtins (non-intrinsics):
820 CallInst *CI = dyn_cast<CallInst>(I);
821 if (!CI || CI->isIndirectCall() || CI->isInlineAsm() ||
823 return;
824
825 // collect information about formal parameter types
826 std::string DemangledName =
828 Function *CalledF = CI->getCalledFunction();
829 SmallVector<Type *, 4> CalledArgTys;
830 bool HaveTypes = false;
831 for (unsigned OpIdx = 0; OpIdx < CalledF->arg_size(); ++OpIdx) {
832 Argument *CalledArg = CalledF->getArg(OpIdx);
833 Type *ArgType = CalledArg->getType();
834 if (!isPointerTy(ArgType)) {
835 CalledArgTys.push_back(nullptr);
836 } else if (isTypedPointerTy(ArgType)) {
837 CalledArgTys.push_back(cast<TypedPointerType>(ArgType)->getElementType());
838 HaveTypes = true;
839 } else {
840 Type *ElemTy = GR->findDeducedElementType(CalledArg);
841 if (!ElemTy && hasPointeeTypeAttr(CalledArg))
842 ElemTy = getPointeeTypeByAttr(CalledArg);
843 if (!ElemTy) {
844 ElemTy = getPointeeTypeByCallInst(DemangledName, CalledF, OpIdx);
845 if (ElemTy) {
846 GR->addDeducedElementType(CalledArg, ElemTy);
847 } else {
848 for (User *U : CalledArg->users()) {
849 if (Instruction *Inst = dyn_cast<Instruction>(U)) {
850 if ((ElemTy = deduceElementTypeHelper(Inst)) != nullptr)
851 break;
852 }
853 }
854 }
855 }
856 HaveTypes |= ElemTy != nullptr;
857 CalledArgTys.push_back(ElemTy);
858 }
859 }
860
861 if (DemangledName.empty() && !HaveTypes)
862 return;
863
864 for (unsigned OpIdx = 0; OpIdx < CI->arg_size(); OpIdx++) {
865 Value *ArgOperand = CI->getArgOperand(OpIdx);
866 if (!isa<PointerType>(ArgOperand->getType()) &&
867 !isa<TypedPointerType>(ArgOperand->getType()))
868 continue;
869
870 // Constants (nulls/undefs) are handled in insertAssignPtrTypeIntrs()
871 if (!isa<Instruction>(ArgOperand) && !isa<Argument>(ArgOperand)) {
872 // However, we may have assumptions about the formal argument's type and
873 // may have a need to insert a ptr cast for the actual parameter of this
874 // call.
875 Argument *CalledArg = CalledF->getArg(OpIdx);
876 if (!GR->findDeducedElementType(CalledArg))
877 continue;
878 }
879
880 Type *ExpectedType =
881 OpIdx < CalledArgTys.size() ? CalledArgTys[OpIdx] : nullptr;
882 if (!ExpectedType && !DemangledName.empty())
884 DemangledName, OpIdx, I->getContext());
885 if (!ExpectedType)
886 continue;
887
888 if (ExpectedType->isTargetExtTy())
889 insertAssignTypeInstrForTargetExtTypes(cast<TargetExtType>(ExpectedType),
890 ArgOperand, B);
891 else
892 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx, B);
893 }
894}
895
896Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &I) {
897 SmallVector<Type *, 4> Types = {I.getType(), I.getOperand(0)->getType(),
898 I.getOperand(1)->getType(),
899 I.getOperand(2)->getType()};
900 IRBuilder<> B(I.getParent());
901 B.SetInsertPoint(&I);
902 SmallVector<Value *> Args(I.op_begin(), I.op_end());
903 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_insertelt, {Types}, {Args});
904 std::string InstName = I.hasName() ? I.getName().str() : "";
905 I.replaceAllUsesWith(NewI);
906 I.eraseFromParent();
907 NewI->setName(InstName);
908 return NewI;
909}
910
912SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &I) {
913 IRBuilder<> B(I.getParent());
914 B.SetInsertPoint(&I);
915 SmallVector<Type *, 3> Types = {I.getType(), I.getVectorOperandType(),
916 I.getIndexOperand()->getType()};
917 SmallVector<Value *, 2> Args = {I.getVectorOperand(), I.getIndexOperand()};
918 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_extractelt, {Types}, {Args});
919 std::string InstName = I.hasName() ? I.getName().str() : "";
920 I.replaceAllUsesWith(NewI);
921 I.eraseFromParent();
922 NewI->setName(InstName);
923 return NewI;
924}
925
926Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &I) {
927 IRBuilder<> B(I.getParent());
928 B.SetInsertPoint(&I);
929 SmallVector<Type *, 1> Types = {I.getInsertedValueOperand()->getType()};
931 for (auto &Op : I.operands())
932 if (isa<UndefValue>(Op))
933 Args.push_back(UndefValue::get(B.getInt32Ty()));
934 else
935 Args.push_back(Op);
936 for (auto &Op : I.indices())
937 Args.push_back(B.getInt32(Op));
938 Instruction *NewI =
939 B.CreateIntrinsic(Intrinsic::spv_insertv, {Types}, {Args});
940 replaceMemInstrUses(&I, NewI, B);
941 return NewI;
942}
943
944Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &I) {
945 IRBuilder<> B(I.getParent());
946 B.SetInsertPoint(&I);
948 for (auto &Op : I.operands())
949 Args.push_back(Op);
950 for (auto &Op : I.indices())
951 Args.push_back(B.getInt32(Op));
952 auto *NewI =
953 B.CreateIntrinsic(Intrinsic::spv_extractv, {I.getType()}, {Args});
954 I.replaceAllUsesWith(NewI);
955 I.eraseFromParent();
956 return NewI;
957}
958
959Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &I) {
960 if (!I.getType()->isAggregateType())
961 return &I;
962 IRBuilder<> B(I.getParent());
963 B.SetInsertPoint(&I);
964 TrackConstants = false;
965 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
967 TLI->getLoadMemOperandFlags(I, F->getParent()->getDataLayout());
968 auto *NewI =
969 B.CreateIntrinsic(Intrinsic::spv_load, {I.getOperand(0)->getType()},
970 {I.getPointerOperand(), B.getInt16(Flags),
971 B.getInt8(I.getAlign().value())});
972 replaceMemInstrUses(&I, NewI, B);
973 return NewI;
974}
975
976Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &I) {
977 if (!AggrStores.contains(&I))
978 return &I;
979 IRBuilder<> B(I.getParent());
980 B.SetInsertPoint(&I);
981 TrackConstants = false;
982 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
984 TLI->getStoreMemOperandFlags(I, F->getParent()->getDataLayout());
985 auto *PtrOp = I.getPointerOperand();
986 auto *NewI = B.CreateIntrinsic(
987 Intrinsic::spv_store, {I.getValueOperand()->getType(), PtrOp->getType()},
988 {I.getValueOperand(), PtrOp, B.getInt16(Flags),
989 B.getInt8(I.getAlign().value())});
990 I.eraseFromParent();
991 return NewI;
992}
993
994Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &I) {
995 Value *ArraySize = nullptr;
996 if (I.isArrayAllocation()) {
997 const SPIRVSubtarget *STI = TM->getSubtargetImpl(*I.getFunction());
998 if (!STI->canUseExtension(
999 SPIRV::Extension::SPV_INTEL_variable_length_array))
1001 "array allocation: this instruction requires the following "
1002 "SPIR-V extension: SPV_INTEL_variable_length_array",
1003 false);
1004 ArraySize = I.getArraySize();
1005 }
1006 IRBuilder<> B(I.getParent());
1007 B.SetInsertPoint(&I);
1008 TrackConstants = false;
1009 Type *PtrTy = I.getType();
1010 auto *NewI =
1011 ArraySize ? B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1012 {PtrTy, ArraySize->getType()}, {ArraySize})
1013 : B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy}, {});
1014 std::string InstName = I.hasName() ? I.getName().str() : "";
1015 I.replaceAllUsesWith(NewI);
1016 I.eraseFromParent();
1017 NewI->setName(InstName);
1018 return NewI;
1019}
1020
1021Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &I) {
1022 assert(I.getType()->isAggregateType() && "Aggregate result is expected");
1023 IRBuilder<> B(I.getParent());
1024 B.SetInsertPoint(&I);
1026 for (auto &Op : I.operands())
1027 Args.push_back(Op);
1028 Args.push_back(B.getInt32(I.getSyncScopeID()));
1029 Args.push_back(B.getInt32(
1030 static_cast<uint32_t>(getMemSemantics(I.getSuccessOrdering()))));
1031 Args.push_back(B.getInt32(
1032 static_cast<uint32_t>(getMemSemantics(I.getFailureOrdering()))));
1033 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
1034 {I.getPointerOperand()->getType()}, {Args});
1035 replaceMemInstrUses(&I, NewI, B);
1036 return NewI;
1037}
1038
1039Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &I) {
1040 IRBuilder<> B(I.getParent());
1041 B.SetInsertPoint(&I);
1042 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1043 return &I;
1044}
1045
1046void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
1047 IRBuilder<> &B) {
1048 // Skip special artifical variable llvm.global.annotations.
1049 if (GV.getName() == "llvm.global.annotations")
1050 return;
1051 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
1052 // Deduce element type and store results in Global Registry.
1053 // Result is ignored, because TypedPointerType is not supported
1054 // by llvm IR general logic.
1055 deduceElementTypeHelper(&GV);
1057 Type *Ty = isAggrToReplace(Init) ? B.getInt32Ty() : Init->getType();
1058 Constant *Const = isAggrToReplace(Init) ? B.getInt32(1) : Init;
1059 auto *InitInst = B.CreateIntrinsic(Intrinsic::spv_init_global,
1060 {GV.getType(), Ty}, {&GV, Const});
1061 InitInst->setArgOperand(1, Init);
1062 }
1063 if ((!GV.hasInitializer() || isa<UndefValue>(GV.getInitializer())) &&
1064 GV.getNumUses() == 0)
1065 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.getType(), &GV);
1066}
1067
1068void SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *I,
1069 IRBuilder<> &B) {
1071 if (!isPointerTy(I->getType()) || !requireAssignType(I) ||
1072 isa<BitCastInst>(I))
1073 return;
1074
1075 setInsertPointSkippingPhis(B, I->getNextNode());
1076
1077 Type *ElemTy = deduceElementType(I);
1078 Constant *EltTyConst = UndefValue::get(ElemTy);
1079 unsigned AddressSpace = getPointerAddressSpace(I->getType());
1080 CallInst *CI = buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {I->getType()},
1081 EltTyConst, I, {B.getInt32(AddressSpace)}, B);
1082 GR->addDeducedElementType(CI, ElemTy);
1083 AssignPtrTypeInstr[I] = CI;
1084}
1085
1086void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *I,
1087 IRBuilder<> &B) {
1089 Type *Ty = I->getType();
1090 if (!Ty->isVoidTy() && !isPointerTy(Ty) && requireAssignType(I)) {
1091 setInsertPointSkippingPhis(B, I->getNextNode());
1092 Type *TypeToAssign = Ty;
1093 if (auto *II = dyn_cast<IntrinsicInst>(I)) {
1094 if (II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1095 II->getIntrinsicID() == Intrinsic::spv_undef) {
1096 auto It = AggrConstTypes.find(II);
1097 if (It == AggrConstTypes.end())
1098 report_fatal_error("Unknown composite intrinsic type");
1099 TypeToAssign = It->second;
1100 }
1101 }
1102 Constant *Const = UndefValue::get(TypeToAssign);
1103 buildIntrWithMD(Intrinsic::spv_assign_type, {Ty}, Const, I, {}, B);
1104 }
1105 for (const auto &Op : I->operands()) {
1106 if (isa<ConstantPointerNull>(Op) || isa<UndefValue>(Op) ||
1107 // Check GetElementPtrConstantExpr case.
1108 (isa<ConstantExpr>(Op) && isa<GEPOperator>(Op))) {
1110 if (isa<UndefValue>(Op) && Op->getType()->isAggregateType())
1111 buildIntrWithMD(Intrinsic::spv_assign_type, {B.getInt32Ty()}, Op,
1112 UndefValue::get(B.getInt32Ty()), {}, B);
1113 else if (!isa<Instruction>(Op)) // TODO: This case could be removed
1114 buildIntrWithMD(Intrinsic::spv_assign_type, {Op->getType()}, Op, Op, {},
1115 B);
1116 }
1117 }
1118}
1119
1120void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *I,
1121 IRBuilder<> &B) {
1122 if (MDNode *MD = I->getMetadata("spirv.Decorations")) {
1123 B.SetInsertPoint(I->getNextNode());
1124 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {I->getType()},
1125 {I, MetadataAsValue::get(I->getContext(), MD)});
1126 }
1127}
1128
1129void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *I,
1130 IRBuilder<> &B) {
1131 auto *II = dyn_cast<IntrinsicInst>(I);
1132 if (II && II->getIntrinsicID() == Intrinsic::spv_const_composite &&
1133 TrackConstants) {
1134 B.SetInsertPoint(I->getNextNode());
1135 Type *Ty = B.getInt32Ty();
1136 auto t = AggrConsts.find(I);
1137 assert(t != AggrConsts.end());
1138 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant, {Ty, Ty},
1139 t->second, I, {}, B);
1140 I->replaceAllUsesWith(NewOp);
1141 NewOp->setArgOperand(0, I);
1142 }
1143 for (const auto &Op : I->operands()) {
1144 if ((isa<ConstantAggregateZero>(Op) && Op->getType()->isVectorTy()) ||
1145 isa<PHINode>(I) || isa<SwitchInst>(I))
1146 TrackConstants = false;
1147 if ((isa<ConstantData>(Op) || isa<ConstantExpr>(Op)) && TrackConstants) {
1148 unsigned OpNo = Op.getOperandNo();
1149 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
1150 (II->paramHasAttr(OpNo, Attribute::ImmArg))))
1151 continue;
1152 B.SetInsertPoint(I);
1153 Value *OpTyVal = Op;
1154 if (Op->getType()->isTargetExtTy())
1155 OpTyVal = Constant::getNullValue(
1156 IntegerType::get(I->getContext(), GR->getPointerSize()));
1157 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant,
1158 {Op->getType(), OpTyVal->getType()}, Op,
1159 OpTyVal, {}, B);
1160 I->setOperand(OpNo, NewOp);
1161 }
1162 }
1163 if (I->hasName()) {
1165 setInsertPointSkippingPhis(B, I->getNextNode());
1166 std::vector<Value *> Args = {I};
1167 addStringImm(I->getName(), B, Args);
1168 B.CreateIntrinsic(Intrinsic::spv_assign_name, {I->getType()}, Args);
1169 }
1170}
1171
1172Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *F,
1173 unsigned OpIdx) {
1174 std::unordered_set<Function *> FVisited;
1175 return deduceFunParamElementType(F, OpIdx, FVisited);
1176}
1177
1178Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
1179 Function *F, unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
1180 // maybe a cycle
1181 if (FVisited.find(F) != FVisited.end())
1182 return nullptr;
1183 FVisited.insert(F);
1184
1185 std::unordered_set<Value *> Visited;
1187 // search in function's call sites
1188 for (User *U : F->users()) {
1189 CallInst *CI = dyn_cast<CallInst>(U);
1190 if (!CI || OpIdx >= CI->arg_size())
1191 continue;
1192 Value *OpArg = CI->getArgOperand(OpIdx);
1193 if (!isPointerTy(OpArg->getType()))
1194 continue;
1195 // maybe we already know operand's element type
1196 if (Type *KnownTy = GR->findDeducedElementType(OpArg))
1197 return KnownTy;
1198 // try to deduce from the operand itself
1199 Visited.clear();
1200 if (Type *Ty = deduceElementTypeHelper(OpArg, Visited))
1201 return Ty;
1202 // search in actual parameter's users
1203 for (User *OpU : OpArg->users()) {
1204 Instruction *Inst = dyn_cast<Instruction>(OpU);
1205 if (!Inst || Inst == CI)
1206 continue;
1207 Visited.clear();
1208 if (Type *Ty = deduceElementTypeHelper(Inst, Visited))
1209 return Ty;
1210 }
1211 // check if it's a formal parameter of the outer function
1212 if (!CI->getParent() || !CI->getParent()->getParent())
1213 continue;
1214 Function *OuterF = CI->getParent()->getParent();
1215 if (FVisited.find(OuterF) != FVisited.end())
1216 continue;
1217 for (unsigned i = 0; i < OuterF->arg_size(); ++i) {
1218 if (OuterF->getArg(i) == OpArg) {
1219 Lookup.push_back(std::make_pair(OuterF, i));
1220 break;
1221 }
1222 }
1223 }
1224
1225 // search in function parameters
1226 for (auto &Pair : Lookup) {
1227 if (Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
1228 return Ty;
1229 }
1230
1231 return nullptr;
1232}
1233
1234void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *F,
1235 IRBuilder<> &B) {
1236 B.SetInsertPointPastAllocas(F);
1237 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
1238 Argument *Arg = F->getArg(OpIdx);
1239 if (!isUntypedPointerTy(Arg->getType()))
1240 continue;
1241 Type *ElemTy = GR->findDeducedElementType(Arg);
1242 if (!ElemTy && hasPointeeTypeAttr(Arg) &&
1243 (ElemTy = getPointeeTypeByAttr(Arg)) != nullptr)
1244 buildAssignPtr(B, ElemTy, Arg);
1245 }
1246}
1247
1248void SPIRVEmitIntrinsics::processParamTypes(Function *F, IRBuilder<> &B) {
1249 B.SetInsertPointPastAllocas(F);
1250 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
1251 Argument *Arg = F->getArg(OpIdx);
1252 if (!isUntypedPointerTy(Arg->getType()))
1253 continue;
1254 Type *ElemTy = GR->findDeducedElementType(Arg);
1255 if (!ElemTy && (ElemTy = deduceFunParamElementType(F, OpIdx)) != nullptr)
1256 buildAssignPtr(B, ElemTy, Arg);
1257 }
1258}
1259
1260bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
1261 if (Func.isDeclaration())
1262 return false;
1263
1264 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(Func);
1265 GR = ST.getSPIRVGlobalRegistry();
1266
1267 F = &Func;
1268 IRBuilder<> B(Func.getContext());
1269 AggrConsts.clear();
1270 AggrConstTypes.clear();
1271 AggrStores.clear();
1272
1273 processParamTypesByFunHeader(F, B);
1274
1275 // StoreInst's operand type can be changed during the next transformations,
1276 // so we need to store it in the set. Also store already transformed types.
1277 for (auto &I : instructions(Func)) {
1278 StoreInst *SI = dyn_cast<StoreInst>(&I);
1279 if (!SI)
1280 continue;
1281 Type *ElTy = SI->getValueOperand()->getType();
1282 if (ElTy->isAggregateType() || ElTy->isVectorTy())
1283 AggrStores.insert(&I);
1284 }
1285
1286 B.SetInsertPoint(&Func.getEntryBlock(), Func.getEntryBlock().begin());
1287 for (auto &GV : Func.getParent()->globals())
1288 processGlobalValue(GV, B);
1289
1290 preprocessUndefs(B);
1291 preprocessCompositeConstants(B);
1293 for (auto &I : instructions(Func))
1294 Worklist.push_back(&I);
1295
1296 for (auto &I : Worklist) {
1297 insertAssignPtrTypeIntrs(I, B);
1298 insertAssignTypeIntrs(I, B);
1299 insertPtrCastOrAssignTypeInstr(I, B);
1301 }
1302
1303 for (auto &I : instructions(Func))
1304 deduceOperandElementType(&I);
1305
1306 for (auto *I : Worklist) {
1307 TrackConstants = true;
1308 if (!I->getType()->isVoidTy() || isa<StoreInst>(I))
1309 B.SetInsertPoint(I->getNextNode());
1310 // Visitors return either the original/newly created instruction for further
1311 // processing, nullptr otherwise.
1312 I = visit(*I);
1313 if (!I)
1314 continue;
1315 processInstrAfterVisit(I, B);
1316 }
1317
1318 return true;
1319}
1320
1321bool SPIRVEmitIntrinsics::runOnModule(Module &M) {
1322 bool Changed = false;
1323
1324 for (auto &F : M) {
1325 Changed |= runOnFunction(F);
1326 }
1327
1328 for (auto &F : M) {
1329 // check if function parameter types are set
1330 if (!F.isDeclaration() && !F.isIntrinsic()) {
1331 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(F);
1332 GR = ST.getSPIRVGlobalRegistry();
1333 IRBuilder<> B(F.getContext());
1334 processParamTypes(&F, B);
1335 }
1336 }
1337
1338 return Changed;
1339}
1340
1342 return new SPIRVEmitIntrinsics(TM);
1343}
aarch64 promote const
unsigned Intr
always inline
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
return RetTy
static bool runOnFunction(Function &F, bool PostInlining)
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
const char LLVMTargetMachineRef TM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrToReplace(const Value *V)
static void reportFatalOnTokenType(const Instruction *I)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static bool requireAssignType(Instruction *I)
static void insertSpirvDecorations(MachineFunction &MF, MachineIRBuilder MIB)
static SymbolRef::Type getType(const Symbol *Sym)
Definition: TapiFile.cpp:40
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
an instruction to allocate memory on the stack
Definition: Instructions.h:59
Represent the analysis usage information of a pass.
This class represents an incoming formal argument to a Function.
Definition: Argument.h:31
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
An instruction that atomically checks whether a specified value is in a memory location,...
Definition: Instructions.h:539
LLVM Basic Block Representation.
Definition: BasicBlock.h:60
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:206
LLVMContext & getContext() const
Get the context in which this basic block lives.
Definition: BasicBlock.cpp:168
This class represents a no-op cast from one type to another.
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
Definition: Constants.cpp:1846
bool isInlineAsm() const
Check if this call is an inline asm statement.
Definition: InstrTypes.h:1809
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Definition: InstrTypes.h:1742
bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1687
void setArgOperand(unsigned i, Value *v)
Definition: InstrTypes.h:1692
unsigned arg_size() const
Definition: InstrTypes.h:1685
This class represents a function call, abstracting a target machine's calling convention.
This is an important base class in LLVM.
Definition: Constant.h:41
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Definition: Constants.cpp:370
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:155
iterator end()
Definition: DenseMap.h:84
Implements a dense probed hash-table based set.
Definition: DenseSet.h:271
This instruction extracts a single (scalar) element from a VectorType value.
This instruction extracts a struct member or array element value from an aggregate value.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
Definition: Function.h:237
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Definition: Function.cpp:356
size_t arg_size() const
Definition: Function.h:851
Argument * getArg(unsigned i) const
Definition: Function.h:836
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Definition: Instructions.h:973
PointerType * getType() const
Global values are always pointers.
Definition: GlobalValue.h:293
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
bool hasInitializer() const
Definitions have initializers, declarations don't.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:2666
Indirect Branch Instruction.
void addDestination(BasicBlock *Dest)
Add a destination.
This instruction inserts a single (scalar) element into a VectorType value.
This instruction inserts a struct field of array element value into an aggregate value.
Base class for instruction visitors.
Definition: InstVisitor.h:78
RetTy visitExtractElementInst(ExtractElementInst &I)
Definition: InstVisitor.h:191
RetTy visitInsertValueInst(InsertValueInst &I)
Definition: InstVisitor.h:195
RetTy visitUnreachableInst(UnreachableInst &I)
Definition: InstVisitor.h:241
RetTy visitAtomicCmpXchgInst(AtomicCmpXchgInst &I)
Definition: InstVisitor.h:171
RetTy visitBitCastInst(BitCastInst &I)
Definition: InstVisitor.h:187
RetTy visitSwitchInst(SwitchInst &I)
Definition: InstVisitor.h:232
RetTy visitExtractValueInst(ExtractValueInst &I)
Definition: InstVisitor.h:194
RetTy visitStoreInst(StoreInst &I)
Definition: InstVisitor.h:170
RetTy visitInsertElementInst(InsertElementInst &I)
Definition: InstVisitor.h:192
RetTy visitAllocaInst(AllocaInst &I)
Definition: InstVisitor.h:168
RetTy visitGetElementPtrInst(GetElementPtrInst &I)
Definition: InstVisitor.h:174
void visitInstruction(Instruction &I)
Definition: InstVisitor.h:280
RetTy visitLoadInst(LoadInst &I)
Definition: InstVisitor.h:169
const BasicBlock * getParent() const
Definition: Instruction.h:152
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
Definition: Instruction.h:149
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
Definition: Type.cpp:278
A wrapper class for inspecting calls to intrinsic functions.
Definition: IntrinsicInst.h:47
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:67
An instruction for reading from memory.
Definition: Instructions.h:184
Metadata node.
Definition: Metadata.h:1067
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition: Metadata.h:1541
Tuple of metadata.
Definition: Metadata.h:1470
Flags
Flags values. These may be or'd together.
Metadata wrapper in the Value hierarchy.
Definition: Metadata.h:176
static MetadataAsValue * get(LLVMContext &Context, Metadata *MD)
Definition: Metadata.cpp:103
Metadata * getMetadata() const
Definition: Metadata.h:193
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:251
virtual bool runOnModule(Module &M)=0
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
Definition: PassRegistry.h:37
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
Definition: Pass.cpp:98
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Definition: Pass.cpp:81
Type * findDeducedCompositeType(const Value *Val)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
void addDeducedCompositeType(Value *Val, Type *Ty)
Type * findDeducedElementType(const Value *Val)
bool canUseExtension(SPIRV::Extension::Extension E) const
size_t size() const
Definition: SmallVector.h:91
void push_back(const T &Elt)
Definition: SmallVector.h:426
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1209
An instruction for storing to memory.
Definition: Instructions.h:317
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:50
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
Definition: StringRef.h:257
static StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
Definition: Type.cpp:513
Multiway switch.
Class to represent target extensions types, which are generally unintrospectable from target-independ...
Definition: DerivedTypes.h:720
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
bool isVectorTy() const
True if this is an instance of VectorType.
Definition: Type.h:265
StringRef getTargetExtName() const
bool isTargetExtTy() const
Return true if this is a target extension type.
Definition: Type.h:207
bool isAggregateType() const
Return true if the type is an aggregate type.
Definition: Type.h:295
bool isVoidTy() const
Return true if this is 'void'.
Definition: Type.h:140
A few GPU targets, such as DXIL and SPIR-V, have typed pointers.
static TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
Definition: Constants.cpp:1808
This function has undefined behavior.
op_iterator op_begin()
Definition: User.h:234
op_iterator op_end()
Definition: User.h:236
static ConstantAsMetadata * getConstant(Value *C)
Definition: Metadata.h:472
LLVM Value Representation.
Definition: Value.h:74
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:255
void setName(const Twine &Name)
Change the name of the value.
Definition: Value.cpp:377
iterator_range< user_iterator > users()
Definition: Value.h:421
unsigned getNumUses() const
This method computes the number of uses of this Value.
Definition: Value.cpp:255
StringRef getName() const
Return a constant reference to the value's name.
Definition: Value.cpp:309
bool user_empty() const
Definition: Value.h:385
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:206
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
Definition: DenseSet.h:185
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ SPIR_KERNEL
Used for SPIR kernel functions.
Definition: CallingConv.h:144
Type * parseBuiltinCallArgumentBaseType(const StringRef DemangledCall, unsigned ArgIdx, LLVMContext &Ctx)
Parses the provided ArgIdx argument base type in the DemangledCall skeleton.
NodeAddr< FuncNode * > Func
Definition: RDFGraph.h:393
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
void initializeSPIRVEmitIntrinsicsPass(PassRegistry &)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
Definition: SPIRVUtils.h:126
AddressSpace
Definition: NVPTXBaseInfo.h:21
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
Definition: SPIRVUtils.cpp:335
bool isTypedPointerTy(const Type *T)
Definition: SPIRVUtils.h:110
bool isPointerTy(const Type *T)
Definition: SPIRVUtils.h:120
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
Definition: Error.cpp:156
@ Ref
The access may reference the value stored in memory.
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
Definition: SPIRVUtils.h:139
bool hasPointeeTypeAttr(Argument *Arg)
Definition: SPIRVUtils.h:134
void addStringImm(const StringRef &Str, MCInst &Inst)
Definition: SPIRVUtils.cpp:51
bool isUntypedPointerTy(const Type *T)
Definition: SPIRVUtils.h:115
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
Definition: SPIRVUtils.cpp:236