LLVM 20.0.0git
SPIRVEmitIntrinsics.cpp
Go to the documentation of this file.
1//===-- SPIRVEmitIntrinsics.cpp - emit SPIRV intrinsics ---------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// The pass emits SPIRV intrinsics keeping essential high-level information for
10// the translation of LLVM IR to SPIR-V.
11//
12//===----------------------------------------------------------------------===//
13
14#include "SPIRV.h"
15#include "SPIRVBuiltins.h"
16#include "SPIRVMetadata.h"
17#include "SPIRVSubtarget.h"
18#include "SPIRVTargetMachine.h"
19#include "SPIRVUtils.h"
20#include "llvm/ADT/DenseSet.h"
21#include "llvm/IR/IRBuilder.h"
23#include "llvm/IR/InstVisitor.h"
24#include "llvm/IR/IntrinsicsSPIRV.h"
26
27#include <queue>
28#include <unordered_set>
29
30// This pass performs the following transformation on LLVM IR level required
31// for the following translation to SPIR-V:
32// - replaces direct usages of aggregate constants with target-specific
33// intrinsics;
34// - replaces aggregates-related instructions (extract/insert, ld/st, etc)
35// with a target-specific intrinsics;
36// - emits intrinsics for the global variable initializers since IRTranslator
37// doesn't handle them and it's not very convenient to translate them
38// ourselves;
39// - emits intrinsics to keep track of the string names assigned to the values;
40// - emits intrinsics to keep track of constants (this is necessary to have an
41// LLVM IR constant after the IRTranslation is completed) for their further
42// deduplication;
43// - emits intrinsics to keep track of original LLVM types of the values
44// to be able to emit proper SPIR-V types eventually.
45//
46// TODO: consider removing spv.track.constant in favor of spv.assign.type.
47
48using namespace llvm;
49
50namespace llvm {
51namespace SPIRV {
52#define GET_BuiltinGroup_DECL
53#include "SPIRVGenTables.inc"
54} // namespace SPIRV
56} // namespace llvm
57
58namespace {
59
60inline MetadataAsValue *buildMD(Value *Arg) {
61 LLVMContext &Ctx = Arg->getContext();
64}
65
66class SPIRVEmitIntrinsics
67 : public ModulePass,
68 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
69 SPIRVTargetMachine *TM = nullptr;
70 SPIRVGlobalRegistry *GR = nullptr;
71 Function *CurrF = nullptr;
72 bool TrackConstants = true;
73 bool HaveFunPtrs = false;
76 DenseSet<Instruction *> AggrStores;
77 SPIRV::InstructionSet::InstructionSet InstrSet;
78
79 // map of function declarations to <pointer arg index => element type>
81
82 // a register of Instructions that don't have a complete type definition
83 bool CanTodoType = true;
84 unsigned TodoTypeSz = 0;
86 void insertTodoType(Value *Op) {
87 // TODO: add isa<CallInst>(Op) to no-insert
88 if (CanTodoType && !isa<GetElementPtrInst>(Op)) {
89 auto It = TodoType.try_emplace(Op, true);
90 if (It.second)
91 ++TodoTypeSz;
92 }
93 }
94 void eraseTodoType(Value *Op) {
95 auto It = TodoType.find(Op);
96 if (It != TodoType.end() && It->second) {
97 TodoType[Op] = false;
98 --TodoTypeSz;
99 }
100 }
101 bool isTodoType(Value *Op) {
102 if (isa<GetElementPtrInst>(Op))
103 return false;
104 auto It = TodoType.find(Op);
105 return It != TodoType.end() && It->second;
106 }
107 // a register of Instructions that were visited by deduceOperandElementType()
108 // to validate operand types with an instruction
109 std::unordered_set<Instruction *> TypeValidated;
110
111 // well known result types of builtins
112 enum WellKnownTypes { Event };
113
114 // deduce element type of untyped pointers
115 Type *deduceElementType(Value *I, bool UnknownElemTypeI8);
116 Type *deduceElementTypeHelper(Value *I, bool UnknownElemTypeI8);
117 Type *deduceElementTypeHelper(Value *I, std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8,
119 bool IgnoreKnownType = false);
120 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
121 bool UnknownElemTypeI8);
122 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
123 std::unordered_set<Value *> &Visited,
124 bool UnknownElemTypeI8);
125 Type *deduceElementTypeByUsersDeep(Value *Op,
126 std::unordered_set<Value *> &Visited,
127 bool UnknownElemTypeI8);
128 void maybeAssignPtrType(Type *&Ty, Value *I, Type *RefTy,
129 bool UnknownElemTypeI8);
130
131 // deduce nested types of composites
132 Type *deduceNestedTypeHelper(User *U, bool UnknownElemTypeI8);
133 Type *deduceNestedTypeHelper(User *U, Type *Ty,
134 std::unordered_set<Value *> &Visited,
135 bool UnknownElemTypeI8);
136
137 // deduce Types of operands of the Instruction if possible
138 void deduceOperandElementType(Instruction *I,
139 SmallPtrSet<Instruction *, 4> *UncompleteRets,
140 const SmallPtrSet<Value *, 4> *AskOps = nullptr,
141 bool IsPostprocessing = false);
142
143 void preprocessCompositeConstants(IRBuilder<> &B);
144 void preprocessUndefs(IRBuilder<> &B);
145
146 CallInst *buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef<Type *> Types,
147 Value *Arg, Value *Arg2, ArrayRef<Constant *> Imms,
148 IRBuilder<> &B) {
150 Args.push_back(Arg2);
151 Args.push_back(buildMD(Arg));
152 for (auto *Imm : Imms)
153 Args.push_back(Imm);
154 return B.CreateIntrinsic(IntrID, {Types}, Args);
155 }
156
157 Type *reconstructType(Value *Op, bool UnknownElemTypeI8,
158 bool IsPostprocessing);
159
160 void buildAssignType(IRBuilder<> &B, Type *ElemTy, Value *Arg);
161 void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg);
162 void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType);
163
164 void replaceMemInstrUses(Instruction *Old, Instruction *New, IRBuilder<> &B);
165 void processInstrAfterVisit(Instruction *I, IRBuilder<> &B);
166 bool insertAssignPtrTypeIntrs(Instruction *I, IRBuilder<> &B,
167 bool UnknownElemTypeI8);
168 void insertAssignTypeIntrs(Instruction *I, IRBuilder<> &B);
169 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType, Value *V,
170 IRBuilder<> &B);
171 void replacePointerOperandWithPtrCast(Instruction *I, Value *Pointer,
172 Type *ExpectedElementType,
173 unsigned OperandToReplace,
174 IRBuilder<> &B);
175 void insertPtrCastOrAssignTypeInstr(Instruction *I, IRBuilder<> &B);
177 void processGlobalValue(GlobalVariable &GV, IRBuilder<> &B);
178 void processParamTypes(Function *F, IRBuilder<> &B);
179 void processParamTypesByFunHeader(Function *F, IRBuilder<> &B);
180 Type *deduceFunParamElementType(Function *F, unsigned OpIdx);
181 Type *deduceFunParamElementType(Function *F, unsigned OpIdx,
182 std::unordered_set<Function *> &FVisited);
183
184 bool deduceOperandElementTypeCalledFunction(
185 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
186 Type *&KnownElemTy);
187 void deduceOperandElementTypeFunctionPointer(
188 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
189 Type *&KnownElemTy, bool IsPostprocessing);
190 bool deduceOperandElementTypeFunctionRet(
192 const SmallPtrSet<Value *, 4> *AskOps, bool IsPostprocessing,
193 Type *&KnownElemTy, Value *Op, Function *F);
194
195 CallInst *buildSpvPtrcast(Function *F, Value *Op, Type *ElemTy);
196 void replaceUsesOfWithSpvPtrcast(Value *Op, Type *ElemTy, Instruction *I,
198 void propagateElemType(Value *Op, Type *ElemTy,
199 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
200 void
201 propagateElemTypeRec(Value *Op, Type *PtrElemTy, Type *CastElemTy,
202 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
203 void propagateElemTypeRec(Value *Op, Type *PtrElemTy, Type *CastElemTy,
204 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
205 std::unordered_set<Value *> &Visited,
207
208 void replaceAllUsesWith(Value *Src, Value *Dest, bool DeleteOld = true);
209 void replaceAllUsesWithAndErase(IRBuilder<> &B, Instruction *Src,
210 Instruction *Dest, bool DeleteOld = true);
211
212 void applyDemangledPtrArgTypes(IRBuilder<> &B);
213
214 bool runOnFunction(Function &F);
215 bool postprocessTypes(Module &M);
216 bool processFunctionPointers(Module &M);
217 void parseFunDeclarations(Module &M);
218
219 void useRoundingMode(ConstrainedFPIntrinsic *FPI, IRBuilder<> &B);
220
221public:
222 static char ID;
223 SPIRVEmitIntrinsics() : ModulePass(ID) {
225 }
226 SPIRVEmitIntrinsics(SPIRVTargetMachine *_TM) : ModulePass(ID), TM(_TM) {
228 }
243
244 StringRef getPassName() const override { return "SPIRV emit intrinsics"; }
245
246 bool runOnModule(Module &M) override;
247
248 void getAnalysisUsage(AnalysisUsage &AU) const override {
250 }
251};
252
253bool isConvergenceIntrinsic(const Instruction *I) {
254 const auto *II = dyn_cast<IntrinsicInst>(I);
255 if (!II)
256 return false;
257
258 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
259 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
260 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
261}
262
263bool expectIgnoredInIRTranslation(const Instruction *I) {
264 const auto *II = dyn_cast<IntrinsicInst>(I);
265 if (!II)
266 return false;
267 return II->getIntrinsicID() == Intrinsic::invariant_start;
268}
269
270bool allowEmitFakeUse(const Value *Arg) {
271 if (isSpvIntrinsic(Arg))
272 return false;
273 if (dyn_cast<AtomicCmpXchgInst>(Arg) || dyn_cast<InsertValueInst>(Arg) ||
274 dyn_cast<UndefValue>(Arg))
275 return false;
276 if (const auto *LI = dyn_cast<LoadInst>(Arg))
277 if (LI->getType()->isAggregateType())
278 return false;
279 return true;
280}
281
282} // namespace
283
284char SPIRVEmitIntrinsics::ID = 0;
285
286INITIALIZE_PASS(SPIRVEmitIntrinsics, "emit-intrinsics", "SPIRV emit intrinsics",
287 false, false)
288
289static inline bool isAssignTypeInstr(const Instruction *I) {
290 return isa<IntrinsicInst>(I) &&
291 cast<IntrinsicInst>(I)->getIntrinsicID() == Intrinsic::spv_assign_type;
292}
293
295 return isa<StoreInst>(I) || isa<LoadInst>(I) || isa<InsertValueInst>(I) ||
296 isa<ExtractValueInst>(I) || isa<AtomicCmpXchgInst>(I);
297}
298
299static bool isAggrConstForceInt32(const Value *V) {
300 return isa<ConstantArray>(V) || isa<ConstantStruct>(V) ||
301 isa<ConstantDataArray>(V) ||
302 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy());
303}
304
306 if (isa<PHINode>(I))
307 B.SetInsertPoint(I->getParent()->getFirstNonPHIOrDbgOrAlloca());
308 else
309 B.SetInsertPoint(I);
310}
311
313 B.SetCurrentDebugLocation(I->getDebugLoc());
314 if (I->getType()->isVoidTy())
315 B.SetInsertPoint(I->getNextNode());
316 else
317 B.SetInsertPoint(*I->getInsertionPointAfterDef());
318}
319
321 IntrinsicInst *Intr = dyn_cast<IntrinsicInst>(I);
322 if (Intr) {
323 switch (Intr->getIntrinsicID()) {
324 case Intrinsic::invariant_start:
325 case Intrinsic::invariant_end:
326 return false;
327 }
328 }
329 return true;
330}
331
332static inline void reportFatalOnTokenType(const Instruction *I) {
333 if (I->getType()->isTokenTy())
334 report_fatal_error("A token is encountered but SPIR-V without extensions "
335 "does not support token type",
336 false);
337}
338
340 if (!I->hasName() || I->getType()->isAggregateType() ||
341 expectIgnoredInIRTranslation(I))
342 return;
345 std::vector<Value *> Args = {I};
346 addStringImm(I->getName(), B, Args);
347 B.CreateIntrinsic(Intrinsic::spv_assign_name, {I->getType()}, Args);
348}
349
350void SPIRVEmitIntrinsics::replaceAllUsesWith(Value *Src, Value *Dest,
351 bool DeleteOld) {
352 Src->replaceAllUsesWith(Dest);
353 // Update deduced type records
354 GR->updateIfExistDeducedElementType(Src, Dest, DeleteOld);
355 GR->updateIfExistAssignPtrTypeInstr(Src, Dest, DeleteOld);
356 // Update uncomplete type records if any
357 if (isTodoType(Src)) {
358 if (DeleteOld)
359 eraseTodoType(Src);
360 insertTodoType(Dest);
361 }
362}
363
364void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(IRBuilder<> &B,
365 Instruction *Src,
366 Instruction *Dest,
367 bool DeleteOld) {
368 replaceAllUsesWith(Src, Dest, DeleteOld);
369 std::string Name = Src->hasName() ? Src->getName().str() : "";
370 Src->eraseFromParent();
371 if (!Name.empty()) {
372 Dest->setName(Name);
373 emitAssignName(Dest, B);
374 }
375}
376
378 return SI && F->getCallingConv() == CallingConv::SPIR_KERNEL &&
379 isPointerTy(SI->getValueOperand()->getType()) &&
380 isa<Argument>(SI->getValueOperand());
381}
382
383// Maybe restore original function return type.
385 Type *Ty) {
386 CallInst *CI = dyn_cast<CallInst>(I);
387 if (!CI || CI->isIndirectCall() || CI->isInlineAsm() ||
389 return Ty;
390 if (Type *OriginalTy = GR->findMutated(CI->getCalledFunction()))
391 return OriginalTy;
392 return Ty;
393}
394
395// Reconstruct type with nested element types according to deduced type info.
396// Return nullptr if no detailed type info is available.
397Type *SPIRVEmitIntrinsics::reconstructType(Value *Op, bool UnknownElemTypeI8,
398 bool IsPostprocessing) {
399 Type *Ty = Op->getType();
400 if (auto *OpI = dyn_cast<Instruction>(Op))
401 Ty = restoreMutatedType(GR, OpI, Ty);
402 if (!isUntypedPointerTy(Ty))
403 return Ty;
404 // try to find the pointee type
405 if (Type *NestedTy = GR->findDeducedElementType(Op))
407 // not a pointer according to the type info (e.g., Event object)
409 if (CI) {
410 MetadataAsValue *MD = cast<MetadataAsValue>(CI->getArgOperand(1));
411 return cast<ConstantAsMetadata>(MD->getMetadata())->getType();
412 }
413 if (UnknownElemTypeI8) {
414 if (!IsPostprocessing)
415 insertTodoType(Op);
416 return getTypedPointerWrapper(IntegerType::getInt8Ty(Op->getContext()),
418 }
419 return nullptr;
420}
421
422void SPIRVEmitIntrinsics::buildAssignType(IRBuilder<> &B, Type *Ty,
423 Value *Arg) {
424 Value *OfType = PoisonValue::get(Ty);
425 CallInst *AssignCI = nullptr;
426 if (Arg->getType()->isAggregateType() && Ty->isAggregateType() &&
427 allowEmitFakeUse(Arg)) {
428 LLVMContext &Ctx = Arg->getContext();
431 MDString::get(Ctx, Arg->getName())};
432 B.CreateIntrinsic(Intrinsic::spv_value_md, {},
433 {MetadataAsValue::get(Ctx, MDTuple::get(Ctx, ArgMDs))});
434 AssignCI = B.CreateIntrinsic(Intrinsic::fake_use, {}, {Arg});
435 } else {
436 AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type, {Arg->getType()},
437 OfType, Arg, {}, B);
438 }
439 GR->addAssignPtrTypeInstr(Arg, AssignCI);
440}
441
442void SPIRVEmitIntrinsics::buildAssignPtr(IRBuilder<> &B, Type *ElemTy,
443 Value *Arg) {
444 Value *OfType = PoisonValue::get(ElemTy);
445 CallInst *AssignPtrTyCI = GR->findAssignPtrTypeInstr(Arg);
446 if (AssignPtrTyCI == nullptr ||
447 AssignPtrTyCI->getParent()->getParent() != CurrF) {
448 AssignPtrTyCI = buildIntrWithMD(
449 Intrinsic::spv_assign_ptr_type, {Arg->getType()}, OfType, Arg,
450 {B.getInt32(getPointerAddressSpace(Arg->getType()))}, B);
451 GR->addDeducedElementType(AssignPtrTyCI, ElemTy);
452 GR->addDeducedElementType(Arg, ElemTy);
453 GR->addAssignPtrTypeInstr(Arg, AssignPtrTyCI);
454 } else {
455 updateAssignType(AssignPtrTyCI, Arg, OfType);
456 }
457}
458
459void SPIRVEmitIntrinsics::updateAssignType(CallInst *AssignCI, Value *Arg,
460 Value *OfType) {
461 AssignCI->setArgOperand(1, buildMD(OfType));
462 if (cast<IntrinsicInst>(AssignCI)->getIntrinsicID() !=
463 Intrinsic::spv_assign_ptr_type)
464 return;
465
466 // update association with the pointee type
467 Type *ElemTy = OfType->getType();
468 GR->addDeducedElementType(AssignCI, ElemTy);
469 GR->addDeducedElementType(Arg, ElemTy);
470}
471
472CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *F, Value *Op,
473 Type *ElemTy) {
474 IRBuilder<> B(Op->getContext());
475 if (auto *OpI = dyn_cast<Instruction>(Op)) {
476 // spv_ptrcast's argument Op denotes an instruction that generates
477 // a value, and we may use getInsertionPointAfterDef()
479 } else if (auto *OpA = dyn_cast<Argument>(Op)) {
480 B.SetInsertPointPastAllocas(OpA->getParent());
481 B.SetCurrentDebugLocation(DebugLoc());
482 } else {
483 B.SetInsertPoint(F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
484 }
485 Type *OpTy = Op->getType();
486 SmallVector<Type *, 2> Types = {OpTy, OpTy};
487 SmallVector<Value *, 2> Args = {Op, buildMD(PoisonValue::get(ElemTy)),
488 B.getInt32(getPointerAddressSpace(OpTy))};
489 CallInst *PtrCasted =
490 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
491 buildAssignPtr(B, ElemTy, PtrCasted);
492 return PtrCasted;
493}
494
495void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
496 Value *Op, Type *ElemTy, Instruction *I,
498 Function *F = I->getParent()->getParent();
499 CallInst *PtrCastedI = nullptr;
500 auto It = Ptrcasts.find(F);
501 if (It == Ptrcasts.end()) {
502 PtrCastedI = buildSpvPtrcast(F, Op, ElemTy);
503 Ptrcasts[F] = PtrCastedI;
504 } else {
505 PtrCastedI = It->second;
506 }
507 I->replaceUsesOfWith(Op, PtrCastedI);
508}
509
510void SPIRVEmitIntrinsics::propagateElemType(
511 Value *Op, Type *ElemTy,
512 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
514 SmallVector<User *> Users(Op->users());
515 for (auto *U : Users) {
516 if (!isa<Instruction>(U) || isSpvIntrinsic(U))
517 continue;
518 if (!VisitedSubst.insert(std::make_pair(U, Op)).second)
519 continue;
520 Instruction *UI = dyn_cast<Instruction>(U);
521 // If the instruction was validated already, we need to keep it valid by
522 // keeping current Op type.
523 if (isa<GetElementPtrInst>(UI) ||
524 TypeValidated.find(UI) != TypeValidated.end())
525 replaceUsesOfWithSpvPtrcast(Op, ElemTy, UI, Ptrcasts);
526 }
527}
528
529void SPIRVEmitIntrinsics::propagateElemTypeRec(
530 Value *Op, Type *PtrElemTy, Type *CastElemTy,
531 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
532 std::unordered_set<Value *> Visited;
534 propagateElemTypeRec(Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
535 Ptrcasts);
536}
537
538void SPIRVEmitIntrinsics::propagateElemTypeRec(
539 Value *Op, Type *PtrElemTy, Type *CastElemTy,
540 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
541 std::unordered_set<Value *> &Visited,
543 if (!Visited.insert(Op).second)
544 return;
545 SmallVector<User *> Users(Op->users());
546 for (auto *U : Users) {
547 if (!isa<Instruction>(U) || isSpvIntrinsic(U))
548 continue;
549 if (!VisitedSubst.insert(std::make_pair(U, Op)).second)
550 continue;
551 Instruction *UI = dyn_cast<Instruction>(U);
552 // If the instruction was validated already, we need to keep it valid by
553 // keeping current Op type.
554 if (isa<GetElementPtrInst>(UI) ||
555 TypeValidated.find(UI) != TypeValidated.end())
556 replaceUsesOfWithSpvPtrcast(Op, CastElemTy, UI, Ptrcasts);
557 }
558}
559
560// Set element pointer type to the given value of ValueTy and tries to
561// specify this type further (recursively) by Operand value, if needed.
562
563Type *
564SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
565 bool UnknownElemTypeI8) {
566 std::unordered_set<Value *> Visited;
567 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
568 UnknownElemTypeI8);
569}
570
571Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
572 Type *ValueTy, Value *Operand, std::unordered_set<Value *> &Visited,
573 bool UnknownElemTypeI8) {
574 Type *Ty = ValueTy;
575 if (Operand) {
576 if (auto *PtrTy = dyn_cast<PointerType>(Ty)) {
577 if (Type *NestedTy =
578 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
579 Ty = getTypedPointerWrapper(NestedTy, PtrTy->getAddressSpace());
580 } else {
581 Ty = deduceNestedTypeHelper(dyn_cast<User>(Operand), Ty, Visited,
582 UnknownElemTypeI8);
583 }
584 }
585 return Ty;
586}
587
588// Traverse User instructions to deduce an element pointer type of the operand.
589Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
590 Value *Op, std::unordered_set<Value *> &Visited, bool UnknownElemTypeI8) {
591 if (!Op || !isPointerTy(Op->getType()) || isa<ConstantPointerNull>(Op) ||
592 isa<UndefValue>(Op))
593 return nullptr;
594
595 if (auto ElemTy = getPointeeType(Op->getType()))
596 return ElemTy;
597
598 // maybe we already know operand's element type
599 if (Type *KnownTy = GR->findDeducedElementType(Op))
600 return KnownTy;
601
602 for (User *OpU : Op->users()) {
603 if (Instruction *Inst = dyn_cast<Instruction>(OpU)) {
604 if (Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
605 return Ty;
606 }
607 }
608 return nullptr;
609}
610
611// Implements what we know in advance about intrinsics and builtin calls
612// TODO: consider feasibility of this particular case to be generalized by
613// encoding knowledge about intrinsics and builtin calls by corresponding
614// specification rules
616 Function *CalledF, unsigned OpIdx) {
617 if ((DemangledName.starts_with("__spirv_ocl_printf(") ||
618 DemangledName.starts_with("printf(")) &&
619 OpIdx == 0)
620 return IntegerType::getInt8Ty(CalledF->getContext());
621 return nullptr;
622}
623
624// Deduce and return a successfully deduced Type of the Instruction,
625// or nullptr otherwise.
626Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(Value *I,
627 bool UnknownElemTypeI8) {
628 std::unordered_set<Value *> Visited;
629 return deduceElementTypeHelper(I, Visited, UnknownElemTypeI8);
630}
631
632void SPIRVEmitIntrinsics::maybeAssignPtrType(Type *&Ty, Value *Op, Type *RefTy,
633 bool UnknownElemTypeI8) {
634 if (isUntypedPointerTy(RefTy)) {
635 if (!UnknownElemTypeI8)
636 return;
637 insertTodoType(Op);
638 }
639 Ty = RefTy;
640}
641
642Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
643 Value *I, std::unordered_set<Value *> &Visited, bool UnknownElemTypeI8,
644 bool IgnoreKnownType) {
645 // allow to pass nullptr as an argument
646 if (!I)
647 return nullptr;
648
649 // maybe already known
650 if (!IgnoreKnownType)
651 if (Type *KnownTy = GR->findDeducedElementType(I))
652 return KnownTy;
653
654 // maybe a cycle
655 if (!Visited.insert(I).second)
656 return nullptr;
657
658 // fallback value in case when we fail to deduce a type
659 Type *Ty = nullptr;
660 // look for known basic patterns of type inference
661 if (auto *Ref = dyn_cast<AllocaInst>(I)) {
662 maybeAssignPtrType(Ty, I, Ref->getAllocatedType(), UnknownElemTypeI8);
663 } else if (auto *Ref = dyn_cast<GetElementPtrInst>(I)) {
664 // TODO: not sure if GetElementPtrInst::getTypeAtIndex() does anything
665 // useful here
666 if (isNestedPointer(Ref->getSourceElementType())) {
667 Ty = Ref->getSourceElementType();
668 for (Use &U : drop_begin(Ref->indices()))
669 Ty = GetElementPtrInst::getTypeAtIndex(Ty, U.get());
670 } else {
671 Ty = Ref->getResultElementType();
672 }
673 } else if (auto *Ref = dyn_cast<LoadInst>(I)) {
674 Value *Op = Ref->getPointerOperand();
675 Type *KnownTy = GR->findDeducedElementType(Op);
676 if (!KnownTy)
677 KnownTy = Op->getType();
678 if (Type *ElemTy = getPointeeType(KnownTy))
679 maybeAssignPtrType(Ty, I, ElemTy, UnknownElemTypeI8);
680 } else if (auto *Ref = dyn_cast<GlobalValue>(I)) {
681 Ty = deduceElementTypeByValueDeep(
682 Ref->getValueType(),
683 Ref->getNumOperands() > 0 ? Ref->getOperand(0) : nullptr, Visited,
684 UnknownElemTypeI8);
685 } else if (auto *Ref = dyn_cast<AddrSpaceCastInst>(I)) {
686 Type *RefTy = deduceElementTypeHelper(Ref->getPointerOperand(), Visited,
687 UnknownElemTypeI8);
688 maybeAssignPtrType(Ty, I, RefTy, UnknownElemTypeI8);
689 } else if (auto *Ref = dyn_cast<BitCastInst>(I)) {
690 if (Type *Src = Ref->getSrcTy(), *Dest = Ref->getDestTy();
691 isPointerTy(Src) && isPointerTy(Dest))
692 Ty = deduceElementTypeHelper(Ref->getOperand(0), Visited,
693 UnknownElemTypeI8);
694 } else if (auto *Ref = dyn_cast<AtomicCmpXchgInst>(I)) {
695 Value *Op = Ref->getNewValOperand();
696 if (isPointerTy(Op->getType()))
697 Ty = deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8);
698 } else if (auto *Ref = dyn_cast<AtomicRMWInst>(I)) {
699 Value *Op = Ref->getValOperand();
700 if (isPointerTy(Op->getType()))
701 Ty = deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8);
702 } else if (auto *Ref = dyn_cast<PHINode>(I)) {
703 Type *BestTy = nullptr;
704 unsigned MaxN = 1;
706 for (int i = Ref->getNumIncomingValues() - 1; i >= 0; --i) {
707 Ty = deduceElementTypeByUsersDeep(Ref->getIncomingValue(i), Visited,
708 UnknownElemTypeI8);
709 if (!Ty)
710 continue;
711 auto It = PhiTys.try_emplace(Ty, 1);
712 if (!It.second) {
713 ++It.first->second;
714 if (It.first->second > MaxN) {
715 MaxN = It.first->second;
716 BestTy = Ty;
717 }
718 }
719 }
720 if (BestTy)
721 Ty = BestTy;
722 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
723 for (Value *Op : {Ref->getTrueValue(), Ref->getFalseValue()}) {
724 Ty = deduceElementTypeByUsersDeep(Op, Visited, UnknownElemTypeI8);
725 if (Ty)
726 break;
727 }
728 } else if (auto *CI = dyn_cast<CallInst>(I)) {
729 static StringMap<unsigned> ResTypeByArg = {
730 {"to_global", 0},
731 {"to_local", 0},
732 {"to_private", 0},
733 {"__spirv_GenericCastToPtr_ToGlobal", 0},
734 {"__spirv_GenericCastToPtr_ToLocal", 0},
735 {"__spirv_GenericCastToPtr_ToPrivate", 0},
736 {"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
737 {"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
738 {"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
739 // TODO: maybe improve performance by caching demangled names
740 if (Function *CalledF = CI->getCalledFunction()) {
741 std::string DemangledName =
742 getOclOrSpirvBuiltinDemangledName(CalledF->getName());
743 if (DemangledName.length() > 0)
744 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
745 auto AsArgIt = ResTypeByArg.find(DemangledName);
746 if (AsArgIt != ResTypeByArg.end())
747 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
748 Visited, UnknownElemTypeI8);
749 else if (Type *KnownRetTy = GR->findDeducedElementType(CalledF))
750 Ty = KnownRetTy;
751 }
752 }
753
754 // remember the found relationship
755 if (Ty && !IgnoreKnownType) {
756 // specify nested types if needed, otherwise return unchanged
757 GR->addDeducedElementType(I, Ty);
758 }
759
760 return Ty;
761}
762
763// Re-create a type of the value if it has untyped pointer fields, also nested.
764// Return the original value type if no corrections of untyped pointer
765// information is found or needed.
766Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
767 bool UnknownElemTypeI8) {
768 std::unordered_set<Value *> Visited;
769 return deduceNestedTypeHelper(U, U->getType(), Visited, UnknownElemTypeI8);
770}
771
772Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
773 User *U, Type *OrigTy, std::unordered_set<Value *> &Visited,
774 bool UnknownElemTypeI8) {
775 if (!U)
776 return OrigTy;
777
778 // maybe already known
779 if (Type *KnownTy = GR->findDeducedCompositeType(U))
780 return KnownTy;
781
782 // maybe a cycle
783 if (!Visited.insert(U).second)
784 return OrigTy;
785
786 if (dyn_cast<StructType>(OrigTy)) {
788 bool Change = false;
789 for (unsigned i = 0; i < U->getNumOperands(); ++i) {
790 Value *Op = U->getOperand(i);
791 Type *OpTy = Op->getType();
792 Type *Ty = OpTy;
793 if (Op) {
794 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
795 if (Type *NestedTy =
796 deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8))
797 Ty = getTypedPointerWrapper(NestedTy, PtrTy->getAddressSpace());
798 } else {
799 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited,
800 UnknownElemTypeI8);
801 }
802 }
803 Tys.push_back(Ty);
804 Change |= Ty != OpTy;
805 }
806 if (Change) {
807 Type *NewTy = StructType::create(Tys);
808 GR->addDeducedCompositeType(U, NewTy);
809 return NewTy;
810 }
811 } else if (auto *ArrTy = dyn_cast<ArrayType>(OrigTy)) {
812 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
813 Type *OpTy = ArrTy->getElementType();
814 Type *Ty = OpTy;
815 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
816 if (Type *NestedTy =
817 deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8))
818 Ty = getTypedPointerWrapper(NestedTy, PtrTy->getAddressSpace());
819 } else {
820 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited,
821 UnknownElemTypeI8);
822 }
823 if (Ty != OpTy) {
824 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
825 GR->addDeducedCompositeType(U, NewTy);
826 return NewTy;
827 }
828 }
829 } else if (auto *VecTy = dyn_cast<VectorType>(OrigTy)) {
830 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
831 Type *OpTy = VecTy->getElementType();
832 Type *Ty = OpTy;
833 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
834 if (Type *NestedTy =
835 deduceElementTypeHelper(Op, Visited, UnknownElemTypeI8))
836 Ty = getTypedPointerWrapper(NestedTy, PtrTy->getAddressSpace());
837 } else {
838 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited,
839 UnknownElemTypeI8);
840 }
841 if (Ty != OpTy) {
842 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
843 GR->addDeducedCompositeType(U, NewTy);
844 return NewTy;
845 }
846 }
847 }
848
849 return OrigTy;
850}
851
852Type *SPIRVEmitIntrinsics::deduceElementType(Value *I, bool UnknownElemTypeI8) {
853 if (Type *Ty = deduceElementTypeHelper(I, UnknownElemTypeI8))
854 return Ty;
855 if (!UnknownElemTypeI8)
856 return nullptr;
857 insertTodoType(I);
858 return IntegerType::getInt8Ty(I->getContext());
859}
860
862 Value *PointerOperand) {
863 Type *PointeeTy = GR->findDeducedElementType(PointerOperand);
864 if (PointeeTy && !isUntypedPointerTy(PointeeTy))
865 return nullptr;
866 auto *PtrTy = dyn_cast<PointerType>(I->getType());
867 if (!PtrTy)
868 return I->getType();
869 if (Type *NestedTy = GR->findDeducedElementType(I))
870 return getTypedPointerWrapper(NestedTy, PtrTy->getAddressSpace());
871 return nullptr;
872}
873
874// Try to deduce element type for a call base. Returns false if this is an
875// indirect function invocation, and true otherwise.
876bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
877 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
878 Type *&KnownElemTy) {
879 Function *CalledF = CI->getCalledFunction();
880 if (!CalledF)
881 return false;
882 std::string DemangledName =
884 if (DemangledName.length() > 0 &&
885 !StringRef(DemangledName).starts_with("llvm.")) {
886 auto [Grp, Opcode, ExtNo] =
887 SPIRV::mapBuiltinToOpcode(DemangledName, InstrSet);
888 if (Opcode == SPIRV::OpGroupAsyncCopy) {
889 for (unsigned i = 0, PtrCnt = 0; i < CI->arg_size() && PtrCnt < 2; ++i) {
890 Value *Op = CI->getArgOperand(i);
891 if (!isPointerTy(Op->getType()))
892 continue;
893 ++PtrCnt;
894 if (Type *ElemTy = GR->findDeducedElementType(Op))
895 KnownElemTy = ElemTy; // src will rewrite dest if both are defined
896 Ops.push_back(std::make_pair(Op, i));
897 }
898 } else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
899 if (CI->arg_size() < 2)
900 return true;
901 Value *Op = CI->getArgOperand(0);
902 if (!isPointerTy(Op->getType()))
903 return true;
904 switch (Opcode) {
905 case SPIRV::OpAtomicLoad:
906 case SPIRV::OpAtomicCompareExchangeWeak:
907 case SPIRV::OpAtomicCompareExchange:
908 case SPIRV::OpAtomicExchange:
909 case SPIRV::OpAtomicIAdd:
910 case SPIRV::OpAtomicISub:
911 case SPIRV::OpAtomicOr:
912 case SPIRV::OpAtomicXor:
913 case SPIRV::OpAtomicAnd:
914 case SPIRV::OpAtomicUMin:
915 case SPIRV::OpAtomicUMax:
916 case SPIRV::OpAtomicSMin:
917 case SPIRV::OpAtomicSMax: {
918 KnownElemTy = getAtomicElemTy(GR, CI, Op);
919 if (!KnownElemTy)
920 return true;
921 Ops.push_back(std::make_pair(Op, 0));
922 } break;
923 }
924 }
925 }
926 return true;
927}
928
929// Try to deduce element type for a function pointer.
930void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
931 CallInst *CI, SmallVector<std::pair<Value *, unsigned>> &Ops,
932 Type *&KnownElemTy, bool IsPostprocessing) {
933 Value *Op = CI->getCalledOperand();
934 if (!Op || !isPointerTy(Op->getType()))
935 return;
936 Ops.push_back(std::make_pair(Op, std::numeric_limits<unsigned>::max()));
937 FunctionType *FTy = CI->getFunctionType();
938 bool IsNewFTy = false, IsUncomplete = false;
940 for (Value *Arg : CI->args()) {
941 Type *ArgTy = Arg->getType();
942 if (ArgTy->isPointerTy()) {
943 if (Type *ElemTy = GR->findDeducedElementType(Arg)) {
944 IsNewFTy = true;
945 ArgTy = getTypedPointerWrapper(ElemTy, getPointerAddressSpace(ArgTy));
946 if (isTodoType(Arg))
947 IsUncomplete = true;
948 } else {
949 IsUncomplete = true;
950 }
951 }
952 ArgTys.push_back(ArgTy);
953 }
954 Type *RetTy = FTy->getReturnType();
955 if (CI->getType()->isPointerTy()) {
956 if (Type *ElemTy = GR->findDeducedElementType(CI)) {
957 IsNewFTy = true;
958 RetTy =
960 if (isTodoType(CI))
961 IsUncomplete = true;
962 } else {
963 IsUncomplete = true;
964 }
965 }
966 if (!IsPostprocessing && IsUncomplete)
967 insertTodoType(Op);
968 KnownElemTy =
969 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
970}
971
972bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
974 const SmallPtrSet<Value *, 4> *AskOps, bool IsPostprocessing,
975 Type *&KnownElemTy, Value *Op, Function *F) {
976 KnownElemTy = GR->findDeducedElementType(F);
977 if (KnownElemTy)
978 return false;
979 if (Type *OpElemTy = GR->findDeducedElementType(Op)) {
980 GR->addDeducedElementType(F, OpElemTy);
981 GR->addReturnType(
982 F, TypedPointerType::get(OpElemTy,
983 getPointerAddressSpace(F->getReturnType())));
984 // non-recursive update of types in function uses
985 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(I, Op)};
986 for (User *U : F->users()) {
987 CallInst *CI = dyn_cast<CallInst>(U);
988 if (!CI || CI->getCalledFunction() != F)
989 continue;
990 if (CallInst *AssignCI = GR->findAssignPtrTypeInstr(CI)) {
991 if (Type *PrevElemTy = GR->findDeducedElementType(CI)) {
992 updateAssignType(AssignCI, CI, PoisonValue::get(OpElemTy));
993 propagateElemType(CI, PrevElemTy, VisitedSubst);
994 }
995 }
996 }
997 // Non-recursive update of types in the function uncomplete returns.
998 // This may happen just once per a function, the latch is a pair of
999 // findDeducedElementType(F) / addDeducedElementType(F, ...).
1000 // With or without the latch it is a non-recursive call due to
1001 // UncompleteRets set to nullptr in this call.
1002 if (UncompleteRets)
1003 for (Instruction *UncompleteRetI : *UncompleteRets)
1004 deduceOperandElementType(UncompleteRetI, nullptr, AskOps,
1005 IsPostprocessing);
1006 } else if (UncompleteRets) {
1007 UncompleteRets->insert(I);
1008 }
1009 TypeValidated.insert(I);
1010 return true;
1011}
1012
1013// If the Instruction has Pointer operands with unresolved types, this function
1014// tries to deduce them. If the Instruction has Pointer operands with known
1015// types which differ from expected, this function tries to insert a bitcast to
1016// resolve the issue.
1017void SPIRVEmitIntrinsics::deduceOperandElementType(
1019 const SmallPtrSet<Value *, 4> *AskOps, bool IsPostprocessing) {
1021 Type *KnownElemTy = nullptr;
1022 bool Uncomplete = false;
1023 // look for known basic patterns of type inference
1024 if (auto *Ref = dyn_cast<PHINode>(I)) {
1025 if (!isPointerTy(I->getType()) ||
1026 !(KnownElemTy = GR->findDeducedElementType(I)))
1027 return;
1028 Uncomplete = isTodoType(I);
1029 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
1030 Value *Op = Ref->getIncomingValue(i);
1031 if (isPointerTy(Op->getType()))
1032 Ops.push_back(std::make_pair(Op, i));
1033 }
1034 } else if (auto *Ref = dyn_cast<AddrSpaceCastInst>(I)) {
1035 KnownElemTy = GR->findDeducedElementType(I);
1036 if (!KnownElemTy)
1037 return;
1038 Uncomplete = isTodoType(I);
1039 Ops.push_back(std::make_pair(Ref->getPointerOperand(), 0));
1040 } else if (auto *Ref = dyn_cast<BitCastInst>(I)) {
1041 if (!isPointerTy(I->getType()))
1042 return;
1043 KnownElemTy = GR->findDeducedElementType(I);
1044 if (!KnownElemTy)
1045 return;
1046 Uncomplete = isTodoType(I);
1047 Ops.push_back(std::make_pair(Ref->getOperand(0), 0));
1048 } else if (auto *Ref = dyn_cast<GetElementPtrInst>(I)) {
1049 if (GR->findDeducedElementType(Ref->getPointerOperand()))
1050 return;
1051 KnownElemTy = Ref->getSourceElementType();
1052 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1054 } else if (auto *Ref = dyn_cast<LoadInst>(I)) {
1055 KnownElemTy = I->getType();
1056 if (isUntypedPointerTy(KnownElemTy))
1057 return;
1058 Type *PointeeTy = GR->findDeducedElementType(Ref->getPointerOperand());
1059 if (PointeeTy && !isUntypedPointerTy(PointeeTy))
1060 return;
1061 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1063 } else if (auto *Ref = dyn_cast<StoreInst>(I)) {
1064 if (!(KnownElemTy =
1065 reconstructType(Ref->getValueOperand(), false, IsPostprocessing)))
1066 return;
1067 Type *PointeeTy = GR->findDeducedElementType(Ref->getPointerOperand());
1068 if (PointeeTy && !isUntypedPointerTy(PointeeTy))
1069 return;
1070 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1072 } else if (auto *Ref = dyn_cast<AtomicCmpXchgInst>(I)) {
1073 KnownElemTy = getAtomicElemTy(GR, I, Ref->getPointerOperand());
1074 if (!KnownElemTy)
1075 return;
1076 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1078 } else if (auto *Ref = dyn_cast<AtomicRMWInst>(I)) {
1079 KnownElemTy = getAtomicElemTy(GR, I, Ref->getPointerOperand());
1080 if (!KnownElemTy)
1081 return;
1082 Ops.push_back(std::make_pair(Ref->getPointerOperand(),
1084 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
1085 if (!isPointerTy(I->getType()) ||
1086 !(KnownElemTy = GR->findDeducedElementType(I)))
1087 return;
1088 Uncomplete = isTodoType(I);
1089 for (unsigned i = 0; i < Ref->getNumOperands(); i++) {
1090 Value *Op = Ref->getOperand(i);
1091 if (isPointerTy(Op->getType()))
1092 Ops.push_back(std::make_pair(Op, i));
1093 }
1094 } else if (auto *Ref = dyn_cast<ReturnInst>(I)) {
1095 if (!isPointerTy(CurrF->getReturnType()))
1096 return;
1097 Value *Op = Ref->getReturnValue();
1098 if (!Op)
1099 return;
1100 if (deduceOperandElementTypeFunctionRet(I, UncompleteRets, AskOps,
1101 IsPostprocessing, KnownElemTy, Op,
1102 CurrF))
1103 return;
1104 Uncomplete = isTodoType(CurrF);
1105 Ops.push_back(std::make_pair(Op, 0));
1106 } else if (auto *Ref = dyn_cast<ICmpInst>(I)) {
1107 if (!isPointerTy(Ref->getOperand(0)->getType()))
1108 return;
1109 Value *Op0 = Ref->getOperand(0);
1110 Value *Op1 = Ref->getOperand(1);
1111 Type *ElemTy0 = GR->findDeducedElementType(Op0);
1112 Type *ElemTy1 = GR->findDeducedElementType(Op1);
1113 if (ElemTy0) {
1114 KnownElemTy = ElemTy0;
1115 Uncomplete = isTodoType(Op0);
1116 Ops.push_back(std::make_pair(Op1, 1));
1117 } else if (ElemTy1) {
1118 KnownElemTy = ElemTy1;
1119 Uncomplete = isTodoType(Op1);
1120 Ops.push_back(std::make_pair(Op0, 0));
1121 }
1122 } else if (CallInst *CI = dyn_cast<CallInst>(I)) {
1123 if (!CI->isIndirectCall())
1124 deduceOperandElementTypeCalledFunction(CI, Ops, KnownElemTy);
1125 else if (HaveFunPtrs)
1126 deduceOperandElementTypeFunctionPointer(CI, Ops, KnownElemTy,
1127 IsPostprocessing);
1128 }
1129
1130 // There is no enough info to deduce types or all is valid.
1131 if (!KnownElemTy || Ops.size() == 0)
1132 return;
1133
1134 LLVMContext &Ctx = CurrF->getContext();
1135 IRBuilder<> B(Ctx);
1136 for (auto &OpIt : Ops) {
1137 Value *Op = OpIt.first;
1138 if (Op->use_empty())
1139 continue;
1140 if (AskOps && !AskOps->contains(Op))
1141 continue;
1142 Type *AskTy = nullptr;
1143 CallInst *AskCI = nullptr;
1144 if (IsPostprocessing && AskOps) {
1145 AskTy = GR->findDeducedElementType(Op);
1146 AskCI = GR->findAssignPtrTypeInstr(Op);
1147 assert(AskTy && AskCI);
1148 }
1149 Type *Ty = AskTy ? AskTy : GR->findDeducedElementType(Op);
1150 if (Ty == KnownElemTy)
1151 continue;
1152 Value *OpTyVal = PoisonValue::get(KnownElemTy);
1153 Type *OpTy = Op->getType();
1154 if (!Ty || AskTy || isUntypedPointerTy(Ty) || isTodoType(Op)) {
1155 Type *PrevElemTy = GR->findDeducedElementType(Op);
1156 GR->addDeducedElementType(Op, KnownElemTy);
1157 // check if KnownElemTy is complete
1158 if (!Uncomplete)
1159 eraseTodoType(Op);
1160 else if (!IsPostprocessing)
1161 insertTodoType(Op);
1162 // check if there is existing Intrinsic::spv_assign_ptr_type instruction
1163 CallInst *AssignCI = AskCI ? AskCI : GR->findAssignPtrTypeInstr(Op);
1164 if (AssignCI == nullptr) {
1165 Instruction *User = dyn_cast<Instruction>(Op->use_begin()->get());
1166 setInsertPointSkippingPhis(B, User ? User->getNextNode() : I);
1167 CallInst *CI =
1168 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal, Op,
1169 {B.getInt32(getPointerAddressSpace(OpTy))}, B);
1170 GR->addAssignPtrTypeInstr(Op, CI);
1171 } else {
1172 updateAssignType(AssignCI, Op, OpTyVal);
1174 std::make_pair(I, Op)};
1175 propagateElemTypeRec(Op, KnownElemTy, PrevElemTy, VisitedSubst);
1176 }
1177 } else {
1178 eraseTodoType(Op);
1179 CallInst *PtrCastI =
1180 buildSpvPtrcast(I->getParent()->getParent(), Op, KnownElemTy);
1181 if (OpIt.second == std::numeric_limits<unsigned>::max())
1182 dyn_cast<CallInst>(I)->setCalledOperand(PtrCastI);
1183 else
1184 I->setOperand(OpIt.second, PtrCastI);
1185 }
1186 }
1187 TypeValidated.insert(I);
1188}
1189
1190void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1191 Instruction *New,
1192 IRBuilder<> &B) {
1193 while (!Old->user_empty()) {
1194 auto *U = Old->user_back();
1195 if (isAssignTypeInstr(U)) {
1196 B.SetInsertPoint(U);
1197 SmallVector<Value *, 2> Args = {New, U->getOperand(1)};
1198 CallInst *AssignCI =
1199 B.CreateIntrinsic(Intrinsic::spv_assign_type, {New->getType()}, Args);
1200 GR->addAssignPtrTypeInstr(New, AssignCI);
1201 U->eraseFromParent();
1202 } else if (isMemInstrToReplace(U) || isa<ReturnInst>(U) ||
1203 isa<CallInst>(U)) {
1204 U->replaceUsesOfWith(Old, New);
1205 } else {
1206 llvm_unreachable("illegal aggregate intrinsic user");
1207 }
1208 }
1209 Old->eraseFromParent();
1210}
1211
1212void SPIRVEmitIntrinsics::preprocessUndefs(IRBuilder<> &B) {
1213 std::queue<Instruction *> Worklist;
1214 for (auto &I : instructions(CurrF))
1215 Worklist.push(&I);
1216
1217 while (!Worklist.empty()) {
1218 Instruction *I = Worklist.front();
1219 bool BPrepared = false;
1220 Worklist.pop();
1221
1222 for (auto &Op : I->operands()) {
1223 auto *AggrUndef = dyn_cast<UndefValue>(Op);
1224 if (!AggrUndef || !Op->getType()->isAggregateType())
1225 continue;
1226
1227 if (!BPrepared) {
1229 BPrepared = true;
1230 }
1231 auto *IntrUndef = B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
1232 Worklist.push(IntrUndef);
1233 I->replaceUsesOfWith(Op, IntrUndef);
1234 AggrConsts[IntrUndef] = AggrUndef;
1235 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1236 }
1237 }
1238}
1239
1240void SPIRVEmitIntrinsics::preprocessCompositeConstants(IRBuilder<> &B) {
1241 std::queue<Instruction *> Worklist;
1242 for (auto &I : instructions(CurrF))
1243 Worklist.push(&I);
1244
1245 while (!Worklist.empty()) {
1246 auto *I = Worklist.front();
1247 bool IsPhi = isa<PHINode>(I), BPrepared = false;
1248 assert(I);
1249 bool KeepInst = false;
1250 for (const auto &Op : I->operands()) {
1251 Constant *AggrConst = nullptr;
1252 Type *ResTy = nullptr;
1253 if (auto *COp = dyn_cast<ConstantVector>(Op)) {
1254 AggrConst = cast<Constant>(COp);
1255 ResTy = COp->getType();
1256 } else if (auto *COp = dyn_cast<ConstantArray>(Op)) {
1257 AggrConst = cast<Constant>(COp);
1258 ResTy = B.getInt32Ty();
1259 } else if (auto *COp = dyn_cast<ConstantStruct>(Op)) {
1260 AggrConst = cast<Constant>(COp);
1261 ResTy = B.getInt32Ty();
1262 } else if (auto *COp = dyn_cast<ConstantDataArray>(Op)) {
1263 AggrConst = cast<Constant>(COp);
1264 ResTy = B.getInt32Ty();
1265 } else if (auto *COp = dyn_cast<ConstantAggregateZero>(Op)) {
1266 AggrConst = cast<Constant>(COp);
1267 ResTy = Op->getType()->isVectorTy() ? COp->getType() : B.getInt32Ty();
1268 }
1269 if (AggrConst) {
1271 if (auto *COp = dyn_cast<ConstantDataSequential>(Op))
1272 for (unsigned i = 0; i < COp->getNumElements(); ++i)
1273 Args.push_back(COp->getElementAsConstant(i));
1274 else
1275 for (auto &COp : AggrConst->operands())
1276 Args.push_back(COp);
1277 if (!BPrepared) {
1278 IsPhi ? B.SetInsertPointPastAllocas(I->getParent()->getParent())
1279 : B.SetInsertPoint(I);
1280 BPrepared = true;
1281 }
1282 auto *CI =
1283 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {Args});
1284 Worklist.push(CI);
1285 I->replaceUsesOfWith(Op, CI);
1286 KeepInst = true;
1287 AggrConsts[CI] = AggrConst;
1288 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst, false);
1289 }
1290 }
1291 if (!KeepInst)
1292 Worklist.pop();
1293 }
1294}
1295
1297 IRBuilder<> &B) {
1298 LLVMContext &Ctx = I->getContext();
1300 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {I->getType()},
1301 {I, MetadataAsValue::get(Ctx, MDNode::get(Ctx, {Node}))});
1302}
1303
1305 unsigned RoundingModeDeco,
1306 IRBuilder<> &B) {
1307 LLVMContext &Ctx = I->getContext();
1308 Type *Int32Ty = Type::getInt32Ty(Ctx);
1309 MDNode *RoundingModeNode = MDNode::get(
1310 Ctx,
1312 ConstantInt::get(Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1313 ConstantAsMetadata::get(ConstantInt::get(Int32Ty, RoundingModeDeco))});
1314 createDecorationIntrinsic(I, RoundingModeNode, B);
1315}
1316
1318 IRBuilder<> &B) {
1319 LLVMContext &Ctx = I->getContext();
1320 Type *Int32Ty = Type::getInt32Ty(Ctx);
1321 MDNode *SaturatedConversionNode =
1322 MDNode::get(Ctx, {ConstantAsMetadata::get(ConstantInt::get(
1323 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1324 createDecorationIntrinsic(I, SaturatedConversionNode, B);
1325}
1326
1327Instruction *SPIRVEmitIntrinsics::visitCallInst(CallInst &Call) {
1328 if (!Call.isInlineAsm())
1329 return &Call;
1330
1331 const InlineAsm *IA = cast<InlineAsm>(Call.getCalledOperand());
1332 LLVMContext &Ctx = CurrF->getContext();
1333
1334 Constant *TyC = UndefValue::get(IA->getFunctionType());
1335 MDString *ConstraintString = MDString::get(Ctx, IA->getConstraintString());
1337 buildMD(TyC),
1338 MetadataAsValue::get(Ctx, MDNode::get(Ctx, ConstraintString))};
1339 for (unsigned OpIdx = 0; OpIdx < Call.arg_size(); OpIdx++)
1340 Args.push_back(Call.getArgOperand(OpIdx));
1341
1342 IRBuilder<> B(Call.getParent());
1343 B.SetInsertPoint(&Call);
1344 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {}, {Args});
1345 return &Call;
1346}
1347
1348// Use a tip about rounding mode to create a decoration.
1349void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1350 IRBuilder<> &B) {
1351 std::optional<RoundingMode> RM = FPI->getRoundingMode();
1352 if (!RM.has_value())
1353 return;
1354 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1355 switch (RM.value()) {
1356 default:
1357 // ignore unknown rounding modes
1358 break;
1359 case RoundingMode::NearestTiesToEven:
1360 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1361 break;
1362 case RoundingMode::TowardNegative:
1363 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1364 break;
1365 case RoundingMode::TowardPositive:
1366 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1367 break;
1368 case RoundingMode::TowardZero:
1369 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1370 break;
1371 case RoundingMode::Dynamic:
1372 case RoundingMode::NearestTiesToAway:
1373 // TODO: check if supported
1374 break;
1375 }
1376 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1377 return;
1378 // Convert the tip about rounding mode into a decoration record.
1379 createRoundingModeDecoration(FPI, RoundingModeDeco, B);
1380}
1381
1382Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &I) {
1383 BasicBlock *ParentBB = I.getParent();
1384 IRBuilder<> B(ParentBB);
1385 B.SetInsertPoint(&I);
1388 for (auto &Op : I.operands()) {
1389 if (Op.get()->getType()->isSized()) {
1390 Args.push_back(Op);
1391 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(Op.get())) {
1392 BBCases.push_back(BB);
1393 Args.push_back(BlockAddress::get(BB->getParent(), BB));
1394 } else {
1395 report_fatal_error("Unexpected switch operand");
1396 }
1397 }
1398 CallInst *NewI = B.CreateIntrinsic(Intrinsic::spv_switch,
1399 {I.getOperand(0)->getType()}, {Args});
1400 // remove switch to avoid its unneeded and undesirable unwrap into branches
1401 // and conditions
1402 replaceAllUsesWith(&I, NewI);
1403 I.eraseFromParent();
1404 // insert artificial and temporary instruction to preserve valid CFG,
1405 // it will be removed after IR translation pass
1406 B.SetInsertPoint(ParentBB);
1407 IndirectBrInst *BrI = B.CreateIndirectBr(
1408 Constant::getNullValue(PointerType::getUnqual(ParentBB->getContext())),
1409 BBCases.size());
1410 for (BasicBlock *BBCase : BBCases)
1411 BrI->addDestination(BBCase);
1412 return BrI;
1413}
1414
1415Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &I) {
1416 IRBuilder<> B(I.getParent());
1417 B.SetInsertPoint(&I);
1418 SmallVector<Type *, 2> Types = {I.getType(), I.getOperand(0)->getType()};
1420 Args.push_back(B.getInt1(I.isInBounds()));
1421 for (auto &Op : I.operands())
1422 Args.push_back(Op);
1423 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
1424 replaceAllUsesWithAndErase(B, &I, NewI);
1425 return NewI;
1426}
1427
1428Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &I) {
1429 IRBuilder<> B(I.getParent());
1430 B.SetInsertPoint(&I);
1431 Value *Source = I.getOperand(0);
1432
1433 // SPIR-V, contrary to LLVM 17+ IR, supports bitcasts between pointers of
1434 // varying element types. In case of IR coming from older versions of LLVM
1435 // such bitcasts do not provide sufficient information, should be just skipped
1436 // here, and handled in insertPtrCastOrAssignTypeInstr.
1437 if (isPointerTy(I.getType())) {
1438 replaceAllUsesWith(&I, Source);
1439 I.eraseFromParent();
1440 return nullptr;
1441 }
1442
1443 SmallVector<Type *, 2> Types = {I.getType(), Source->getType()};
1444 SmallVector<Value *> Args(I.op_begin(), I.op_end());
1445 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_bitcast, {Types}, {Args});
1446 replaceAllUsesWithAndErase(B, &I, NewI);
1447 return NewI;
1448}
1449
1450void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1451 TargetExtType *AssignedType, Value *V, IRBuilder<> &B) {
1452 Type *VTy = V->getType();
1453
1454 // A couple of sanity checks.
1455 assert((isPointerTy(VTy)) && "Expect a pointer type!");
1456 if (Type *ElemTy = getPointeeType(VTy))
1457 if (ElemTy != AssignedType)
1458 report_fatal_error("Unexpected pointer element type!");
1459
1460 CallInst *AssignCI = GR->findAssignPtrTypeInstr(V);
1461 if (!AssignCI) {
1462 buildAssignType(B, AssignedType, V);
1463 return;
1464 }
1465
1466 Type *CurrentType =
1467 dyn_cast<ConstantAsMetadata>(
1468 cast<MetadataAsValue>(AssignCI->getOperand(1))->getMetadata())
1469 ->getType();
1470 if (CurrentType == AssignedType)
1471 return;
1472
1473 // Builtin types cannot be redeclared or casted.
1474 if (CurrentType->isTargetExtTy())
1475 report_fatal_error("Type mismatch " + CurrentType->getTargetExtName() +
1476 "/" + AssignedType->getTargetExtName() +
1477 " for value " + V->getName(),
1478 false);
1479
1480 // Our previous guess about the type seems to be wrong, let's update
1481 // inferred type according to a new, more precise type information.
1482 updateAssignType(AssignCI, V, PoisonValue::get(AssignedType));
1483}
1484
1485void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1486 Instruction *I, Value *Pointer, Type *ExpectedElementType,
1487 unsigned OperandToReplace, IRBuilder<> &B) {
1488 TypeValidated.insert(I);
1489
1490 // Do not emit spv_ptrcast if Pointer's element type is ExpectedElementType
1491 Type *PointerElemTy = deduceElementTypeHelper(Pointer, false);
1492 if (PointerElemTy == ExpectedElementType ||
1493 isEquivalentTypes(PointerElemTy, ExpectedElementType))
1494 return;
1495
1497 Value *ExpectedElementVal = PoisonValue::get(ExpectedElementType);
1498 MetadataAsValue *VMD = buildMD(ExpectedElementVal);
1499 unsigned AddressSpace = getPointerAddressSpace(Pointer->getType());
1500 bool FirstPtrCastOrAssignPtrType = true;
1501
1502 // Do not emit new spv_ptrcast if equivalent one already exists or when
1503 // spv_assign_ptr_type already targets this pointer with the same element
1504 // type.
1505 for (auto User : Pointer->users()) {
1506 auto *II = dyn_cast<IntrinsicInst>(User);
1507 if (!II ||
1508 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1509 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1510 II->getOperand(0) != Pointer)
1511 continue;
1512
1513 // There is some spv_ptrcast/spv_assign_ptr_type already targeting this
1514 // pointer.
1515 FirstPtrCastOrAssignPtrType = false;
1516 if (II->getOperand(1) != VMD ||
1517 dyn_cast<ConstantInt>(II->getOperand(2))->getSExtValue() !=
1519 continue;
1520
1521 // The spv_ptrcast/spv_assign_ptr_type targeting this pointer is of the same
1522 // element type and address space.
1523 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1524 return;
1525
1526 // This must be a spv_ptrcast, do not emit new if this one has the same BB
1527 // as I. Otherwise, search for other spv_ptrcast/spv_assign_ptr_type.
1528 if (II->getParent() != I->getParent())
1529 continue;
1530
1531 I->setOperand(OperandToReplace, II);
1532 return;
1533 }
1534
1535 if (isa<Instruction>(Pointer) || isa<Argument>(Pointer)) {
1536 if (FirstPtrCastOrAssignPtrType) {
1537 // If this would be the first spv_ptrcast, do not emit spv_ptrcast and
1538 // emit spv_assign_ptr_type instead.
1539 buildAssignPtr(B, ExpectedElementType, Pointer);
1540 return;
1541 } else if (isTodoType(Pointer)) {
1542 eraseTodoType(Pointer);
1543 if (!isa<CallInst>(Pointer) && !isa<GetElementPtrInst>(Pointer)) {
1544 // If this wouldn't be the first spv_ptrcast but existing type info is
1545 // uncomplete, update spv_assign_ptr_type arguments.
1546 if (CallInst *AssignCI = GR->findAssignPtrTypeInstr(Pointer)) {
1547 Type *PrevElemTy = GR->findDeducedElementType(Pointer);
1548 assert(PrevElemTy);
1550 std::make_pair(I, Pointer)};
1551 updateAssignType(AssignCI, Pointer, ExpectedElementVal);
1552 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1553 } else {
1554 buildAssignPtr(B, ExpectedElementType, Pointer);
1555 }
1556 return;
1557 }
1558 }
1559 }
1560
1561 // Emit spv_ptrcast
1562 SmallVector<Type *, 2> Types = {Pointer->getType(), Pointer->getType()};
1564 auto *PtrCastI = B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
1565 I->setOperand(OperandToReplace, PtrCastI);
1566 // We need to set up a pointee type for the newly created spv_ptrcast.
1567 buildAssignPtr(B, ExpectedElementType, PtrCastI);
1568}
1569
1570void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *I,
1571 IRBuilder<> &B) {
1572 // Handle basic instructions:
1573 StoreInst *SI = dyn_cast<StoreInst>(I);
1574 if (IsKernelArgInt8(CurrF, SI)) {
1575 replacePointerOperandWithPtrCast(
1576 I, SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->getContext()),
1577 0, B);
1578 }
1579 if (SI) {
1580 Value *Op = SI->getValueOperand();
1581 Value *Pointer = SI->getPointerOperand();
1582 Type *OpTy = Op->getType();
1583 if (auto *OpI = dyn_cast<Instruction>(Op))
1584 OpTy = restoreMutatedType(GR, OpI, OpTy);
1585 if (OpTy == Op->getType())
1586 OpTy = deduceElementTypeByValueDeep(OpTy, Op, false);
1587 replacePointerOperandWithPtrCast(I, Pointer, OpTy, 1, B);
1588 return;
1589 }
1590 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1591 Value *Pointer = LI->getPointerOperand();
1592 Type *OpTy = LI->getType();
1593 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
1594 if (Type *ElemTy = GR->findDeducedElementType(LI)) {
1595 OpTy = getTypedPointerWrapper(ElemTy, PtrTy->getAddressSpace());
1596 } else {
1597 Type *NewOpTy = OpTy;
1598 OpTy = deduceElementTypeByValueDeep(OpTy, LI, false);
1599 if (OpTy == NewOpTy)
1600 insertTodoType(Pointer);
1601 }
1602 }
1603 replacePointerOperandWithPtrCast(I, Pointer, OpTy, 0, B);
1604 return;
1605 }
1606 if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I)) {
1607 Value *Pointer = GEPI->getPointerOperand();
1608 Type *OpTy = GEPI->getSourceElementType();
1609 replacePointerOperandWithPtrCast(I, Pointer, OpTy, 0, B);
1610 if (isNestedPointer(OpTy))
1611 insertTodoType(Pointer);
1612 return;
1613 }
1614
1615 // TODO: review and merge with existing logics:
1616 // Handle calls to builtins (non-intrinsics):
1617 CallInst *CI = dyn_cast<CallInst>(I);
1618 if (!CI || CI->isIndirectCall() || CI->isInlineAsm() ||
1620 return;
1621
1622 // collect information about formal parameter types
1623 std::string DemangledName =
1625 Function *CalledF = CI->getCalledFunction();
1626 SmallVector<Type *, 4> CalledArgTys;
1627 bool HaveTypes = false;
1628 for (unsigned OpIdx = 0; OpIdx < CalledF->arg_size(); ++OpIdx) {
1629 Argument *CalledArg = CalledF->getArg(OpIdx);
1630 Type *ArgType = CalledArg->getType();
1631 if (!isPointerTy(ArgType)) {
1632 CalledArgTys.push_back(nullptr);
1633 } else if (Type *ArgTypeElem = getPointeeType(ArgType)) {
1634 CalledArgTys.push_back(ArgTypeElem);
1635 HaveTypes = true;
1636 } else {
1637 Type *ElemTy = GR->findDeducedElementType(CalledArg);
1638 if (!ElemTy && hasPointeeTypeAttr(CalledArg))
1639 ElemTy = getPointeeTypeByAttr(CalledArg);
1640 if (!ElemTy) {
1641 ElemTy = getPointeeTypeByCallInst(DemangledName, CalledF, OpIdx);
1642 if (ElemTy) {
1643 GR->addDeducedElementType(CalledArg, ElemTy);
1644 } else {
1645 for (User *U : CalledArg->users()) {
1646 if (Instruction *Inst = dyn_cast<Instruction>(U)) {
1647 if ((ElemTy = deduceElementTypeHelper(Inst, false)) != nullptr)
1648 break;
1649 }
1650 }
1651 }
1652 }
1653 HaveTypes |= ElemTy != nullptr;
1654 CalledArgTys.push_back(ElemTy);
1655 }
1656 }
1657
1658 if (DemangledName.empty() && !HaveTypes)
1659 return;
1660
1661 for (unsigned OpIdx = 0; OpIdx < CI->arg_size(); OpIdx++) {
1662 Value *ArgOperand = CI->getArgOperand(OpIdx);
1663 if (!isPointerTy(ArgOperand->getType()))
1664 continue;
1665
1666 // Constants (nulls/undefs) are handled in insertAssignPtrTypeIntrs()
1667 if (!isa<Instruction>(ArgOperand) && !isa<Argument>(ArgOperand)) {
1668 // However, we may have assumptions about the formal argument's type and
1669 // may have a need to insert a ptr cast for the actual parameter of this
1670 // call.
1671 Argument *CalledArg = CalledF->getArg(OpIdx);
1672 if (!GR->findDeducedElementType(CalledArg))
1673 continue;
1674 }
1675
1676 Type *ExpectedType =
1677 OpIdx < CalledArgTys.size() ? CalledArgTys[OpIdx] : nullptr;
1678 if (!ExpectedType && !DemangledName.empty())
1680 DemangledName, OpIdx, I->getContext());
1681 if (!ExpectedType || ExpectedType->isVoidTy())
1682 continue;
1683
1684 if (ExpectedType->isTargetExtTy() &&
1685 !isTypedPointerWrapper(cast<TargetExtType>(ExpectedType)))
1686 insertAssignPtrTypeTargetExt(cast<TargetExtType>(ExpectedType),
1687 ArgOperand, B);
1688 else
1689 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx, B);
1690 }
1691}
1692
1693Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &I) {
1694 SmallVector<Type *, 4> Types = {I.getType(), I.getOperand(0)->getType(),
1695 I.getOperand(1)->getType(),
1696 I.getOperand(2)->getType()};
1697 IRBuilder<> B(I.getParent());
1698 B.SetInsertPoint(&I);
1699 SmallVector<Value *> Args(I.op_begin(), I.op_end());
1700 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_insertelt, {Types}, {Args});
1701 replaceAllUsesWithAndErase(B, &I, NewI);
1702 return NewI;
1703}
1704
1706SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &I) {
1707 IRBuilder<> B(I.getParent());
1708 B.SetInsertPoint(&I);
1709 SmallVector<Type *, 3> Types = {I.getType(), I.getVectorOperandType(),
1710 I.getIndexOperand()->getType()};
1711 SmallVector<Value *, 2> Args = {I.getVectorOperand(), I.getIndexOperand()};
1712 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_extractelt, {Types}, {Args});
1713 replaceAllUsesWithAndErase(B, &I, NewI);
1714 return NewI;
1715}
1716
1717Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &I) {
1718 IRBuilder<> B(I.getParent());
1719 B.SetInsertPoint(&I);
1720 SmallVector<Type *, 1> Types = {I.getInsertedValueOperand()->getType()};
1722 for (auto &Op : I.operands())
1723 if (isa<UndefValue>(Op))
1724 Args.push_back(UndefValue::get(B.getInt32Ty()));
1725 else
1726 Args.push_back(Op);
1727 for (auto &Op : I.indices())
1728 Args.push_back(B.getInt32(Op));
1729 Instruction *NewI =
1730 B.CreateIntrinsic(Intrinsic::spv_insertv, {Types}, {Args});
1731 replaceMemInstrUses(&I, NewI, B);
1732 return NewI;
1733}
1734
1735Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &I) {
1736 if (I.getAggregateOperand()->getType()->isAggregateType())
1737 return &I;
1738 IRBuilder<> B(I.getParent());
1739 B.SetInsertPoint(&I);
1741 for (auto &Op : I.operands())
1742 Args.push_back(Op);
1743 for (auto &Op : I.indices())
1744 Args.push_back(B.getInt32(Op));
1745 auto *NewI =
1746 B.CreateIntrinsic(Intrinsic::spv_extractv, {I.getType()}, {Args});
1747 replaceAllUsesWithAndErase(B, &I, NewI);
1748 return NewI;
1749}
1750
1751Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &I) {
1752 if (!I.getType()->isAggregateType())
1753 return &I;
1754 IRBuilder<> B(I.getParent());
1755 B.SetInsertPoint(&I);
1756 TrackConstants = false;
1757 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
1759 TLI->getLoadMemOperandFlags(I, CurrF->getDataLayout());
1760 auto *NewI =
1761 B.CreateIntrinsic(Intrinsic::spv_load, {I.getOperand(0)->getType()},
1762 {I.getPointerOperand(), B.getInt16(Flags),
1763 B.getInt8(I.getAlign().value())});
1764 replaceMemInstrUses(&I, NewI, B);
1765 return NewI;
1766}
1767
1768Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &I) {
1769 if (!AggrStores.contains(&I))
1770 return &I;
1771 IRBuilder<> B(I.getParent());
1772 B.SetInsertPoint(&I);
1773 TrackConstants = false;
1774 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
1776 TLI->getStoreMemOperandFlags(I, CurrF->getDataLayout());
1777 auto *PtrOp = I.getPointerOperand();
1778 auto *NewI = B.CreateIntrinsic(
1779 Intrinsic::spv_store, {I.getValueOperand()->getType(), PtrOp->getType()},
1780 {I.getValueOperand(), PtrOp, B.getInt16(Flags),
1781 B.getInt8(I.getAlign().value())});
1782 I.eraseFromParent();
1783 return NewI;
1784}
1785
1786Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &I) {
1787 Value *ArraySize = nullptr;
1788 if (I.isArrayAllocation()) {
1789 const SPIRVSubtarget *STI = TM->getSubtargetImpl(*I.getFunction());
1790 if (!STI->canUseExtension(
1791 SPIRV::Extension::SPV_INTEL_variable_length_array))
1793 "array allocation: this instruction requires the following "
1794 "SPIR-V extension: SPV_INTEL_variable_length_array",
1795 false);
1796 ArraySize = I.getArraySize();
1797 }
1798 IRBuilder<> B(I.getParent());
1799 B.SetInsertPoint(&I);
1800 TrackConstants = false;
1801 Type *PtrTy = I.getType();
1802 auto *NewI =
1803 ArraySize
1804 ? B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1805 {PtrTy, ArraySize->getType()},
1806 {ArraySize, B.getInt8(I.getAlign().value())})
1807 : B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy},
1808 {B.getInt8(I.getAlign().value())});
1809 replaceAllUsesWithAndErase(B, &I, NewI);
1810 return NewI;
1811}
1812
1813Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &I) {
1814 assert(I.getType()->isAggregateType() && "Aggregate result is expected");
1815 IRBuilder<> B(I.getParent());
1816 B.SetInsertPoint(&I);
1818 for (auto &Op : I.operands())
1819 Args.push_back(Op);
1820 Args.push_back(B.getInt32(
1821 static_cast<uint32_t>(getMemScope(I.getContext(), I.getSyncScopeID()))));
1822 Args.push_back(B.getInt32(
1823 static_cast<uint32_t>(getMemSemantics(I.getSuccessOrdering()))));
1824 Args.push_back(B.getInt32(
1825 static_cast<uint32_t>(getMemSemantics(I.getFailureOrdering()))));
1826 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
1827 {I.getPointerOperand()->getType()}, {Args});
1828 replaceMemInstrUses(&I, NewI, B);
1829 return NewI;
1830}
1831
1832Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &I) {
1833 IRBuilder<> B(I.getParent());
1834 B.SetInsertPoint(&I);
1835 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1836 return &I;
1837}
1838
1839void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
1840 IRBuilder<> &B) {
1841 // Skip special artifical variable llvm.global.annotations.
1842 if (GV.getName() == "llvm.global.annotations")
1843 return;
1844 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
1845 // Deduce element type and store results in Global Registry.
1846 // Result is ignored, because TypedPointerType is not supported
1847 // by llvm IR general logic.
1848 deduceElementTypeHelper(&GV, false);
1850 Type *Ty = isAggrConstForceInt32(Init) ? B.getInt32Ty() : Init->getType();
1851 Constant *Const = isAggrConstForceInt32(Init) ? B.getInt32(1) : Init;
1852 auto *InitInst = B.CreateIntrinsic(Intrinsic::spv_init_global,
1853 {GV.getType(), Ty}, {&GV, Const});
1854 InitInst->setArgOperand(1, Init);
1855 }
1856 if ((!GV.hasInitializer() || isa<UndefValue>(GV.getInitializer())) &&
1857 GV.getNumUses() == 0)
1858 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.getType(), &GV);
1859}
1860
1861// Return true, if we can't decide what is the pointee type now and will get
1862// back to the question later. Return false is spv_assign_ptr_type is not needed
1863// or can be inserted immediately.
1864bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *I,
1865 IRBuilder<> &B,
1866 bool UnknownElemTypeI8) {
1868 if (!isPointerTy(I->getType()) || !requireAssignType(I))
1869 return false;
1870
1872 if (Type *ElemTy = deduceElementType(I, UnknownElemTypeI8)) {
1873 buildAssignPtr(B, ElemTy, I);
1874 return false;
1875 }
1876 return true;
1877}
1878
1879void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *I,
1880 IRBuilder<> &B) {
1881 // TODO: extend the list of functions with known result types
1882 static StringMap<unsigned> ResTypeWellKnown = {
1883 {"async_work_group_copy", WellKnownTypes::Event},
1884 {"async_work_group_strided_copy", WellKnownTypes::Event},
1885 {"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
1886
1888
1889 bool IsKnown = false;
1890 if (auto *CI = dyn_cast<CallInst>(I)) {
1891 if (!CI->isIndirectCall() && !CI->isInlineAsm() &&
1892 CI->getCalledFunction() && !CI->getCalledFunction()->isIntrinsic()) {
1893 Function *CalledF = CI->getCalledFunction();
1894 std::string DemangledName =
1896 FPDecorationId DecorationId = FPDecorationId::NONE;
1897 if (DemangledName.length() > 0)
1898 DemangledName =
1899 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
1900 auto ResIt = ResTypeWellKnown.find(DemangledName);
1901 if (ResIt != ResTypeWellKnown.end()) {
1902 IsKnown = true;
1904 switch (ResIt->second) {
1905 case WellKnownTypes::Event:
1906 buildAssignType(B, TargetExtType::get(I->getContext(), "spirv.Event"),
1907 I);
1908 break;
1909 }
1910 }
1911 // check if a floating rounding mode or saturation info is present
1912 switch (DecorationId) {
1913 default:
1914 break;
1915 case FPDecorationId::SAT:
1917 break;
1918 case FPDecorationId::RTE:
1920 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE, B);
1921 break;
1922 case FPDecorationId::RTZ:
1924 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ, B);
1925 break;
1926 case FPDecorationId::RTP:
1928 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP, B);
1929 break;
1930 case FPDecorationId::RTN:
1932 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN, B);
1933 break;
1934 }
1935 }
1936 }
1937
1938 Type *Ty = I->getType();
1939 if (!IsKnown && !Ty->isVoidTy() && !isPointerTy(Ty) && requireAssignType(I)) {
1941 Type *TypeToAssign = Ty;
1942 if (auto *II = dyn_cast<IntrinsicInst>(I)) {
1943 if (II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1944 II->getIntrinsicID() == Intrinsic::spv_undef) {
1945 auto It = AggrConstTypes.find(II);
1946 if (It == AggrConstTypes.end())
1947 report_fatal_error("Unknown composite intrinsic type");
1948 TypeToAssign = It->second;
1949 }
1950 }
1951 TypeToAssign = restoreMutatedType(GR, I, TypeToAssign);
1952 buildAssignType(B, TypeToAssign, I);
1953 }
1954 for (const auto &Op : I->operands()) {
1955 if (isa<ConstantPointerNull>(Op) || isa<UndefValue>(Op) ||
1956 // Check GetElementPtrConstantExpr case.
1957 (isa<ConstantExpr>(Op) && isa<GEPOperator>(Op))) {
1959 Type *OpTy = Op->getType();
1960 if (isa<UndefValue>(Op) && OpTy->isAggregateType()) {
1961 CallInst *AssignCI =
1962 buildIntrWithMD(Intrinsic::spv_assign_type, {B.getInt32Ty()}, Op,
1963 UndefValue::get(B.getInt32Ty()), {}, B);
1964 GR->addAssignPtrTypeInstr(Op, AssignCI);
1965 } else if (!isa<Instruction>(Op)) {
1966 Type *OpTy = Op->getType();
1967 Type *OpTyElem = getPointeeType(OpTy);
1968 if (OpTyElem) {
1969 buildAssignPtr(B, OpTyElem, Op);
1970 } else if (isPointerTy(OpTy)) {
1971 Type *ElemTy = GR->findDeducedElementType(Op);
1972 buildAssignPtr(B, ElemTy ? ElemTy : deduceElementType(Op, true), Op);
1973 } else {
1974 CallInst *AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type,
1975 {OpTy}, Op, Op, {}, B);
1976 GR->addAssignPtrTypeInstr(Op, AssignCI);
1977 }
1978 }
1979 }
1980 }
1981}
1982
1983void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *I,
1984 IRBuilder<> &B) {
1985 if (MDNode *MD = I->getMetadata("spirv.Decorations")) {
1987 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {I->getType()},
1988 {I, MetadataAsValue::get(I->getContext(), MD)});
1989 }
1990}
1991
1992void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *I,
1993 IRBuilder<> &B) {
1994 auto *II = dyn_cast<IntrinsicInst>(I);
1995 bool IsConstComposite =
1996 II && II->getIntrinsicID() == Intrinsic::spv_const_composite;
1997 if (IsConstComposite && TrackConstants) {
1999 auto t = AggrConsts.find(I);
2000 assert(t != AggrConsts.end());
2001 auto *NewOp =
2002 buildIntrWithMD(Intrinsic::spv_track_constant,
2003 {II->getType(), II->getType()}, t->second, I, {}, B);
2004 replaceAllUsesWith(I, NewOp, false);
2005 NewOp->setArgOperand(0, I);
2006 }
2007 bool IsPhi = isa<PHINode>(I), BPrepared = false;
2008 for (const auto &Op : I->operands()) {
2009 if (isa<PHINode>(I) || isa<SwitchInst>(I))
2010 TrackConstants = false;
2011 if ((isa<ConstantData>(Op) || isa<ConstantExpr>(Op)) && TrackConstants) {
2012 unsigned OpNo = Op.getOperandNo();
2013 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2014 (II->paramHasAttr(OpNo, Attribute::ImmArg))))
2015 continue;
2016 if (!BPrepared) {
2017 IsPhi ? B.SetInsertPointPastAllocas(I->getParent()->getParent())
2018 : B.SetInsertPoint(I);
2019 BPrepared = true;
2020 }
2021 Type *OpTy = Op->getType();
2022 Value *OpTyVal = Op;
2023 if (OpTy->isTargetExtTy())
2024 OpTyVal = PoisonValue::get(OpTy);
2025 CallInst *NewOp =
2026 buildIntrWithMD(Intrinsic::spv_track_constant,
2027 {OpTy, OpTyVal->getType()}, Op, OpTyVal, {}, B);
2028 Type *OpElemTy = nullptr;
2029 if (!IsConstComposite && isPointerTy(OpTy) &&
2030 (OpElemTy = GR->findDeducedElementType(Op)) != nullptr &&
2031 OpElemTy != IntegerType::getInt8Ty(I->getContext())) {
2032 buildAssignPtr(B, IntegerType::getInt8Ty(I->getContext()), NewOp);
2033 SmallVector<Type *, 2> Types = {OpTy, OpTy};
2035 NewOp, buildMD(PoisonValue::get(OpElemTy)),
2036 B.getInt32(getPointerAddressSpace(OpTy))};
2037 CallInst *PtrCasted =
2038 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
2039 buildAssignPtr(B, OpElemTy, PtrCasted);
2040 NewOp = PtrCasted;
2041 }
2042 I->setOperand(OpNo, NewOp);
2043 }
2044 }
2045 emitAssignName(I, B);
2046}
2047
2048Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *F,
2049 unsigned OpIdx) {
2050 std::unordered_set<Function *> FVisited;
2051 return deduceFunParamElementType(F, OpIdx, FVisited);
2052}
2053
2054Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2055 Function *F, unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2056 // maybe a cycle
2057 if (!FVisited.insert(F).second)
2058 return nullptr;
2059
2060 std::unordered_set<Value *> Visited;
2062 // search in function's call sites
2063 for (User *U : F->users()) {
2064 CallInst *CI = dyn_cast<CallInst>(U);
2065 if (!CI || OpIdx >= CI->arg_size())
2066 continue;
2067 Value *OpArg = CI->getArgOperand(OpIdx);
2068 if (!isPointerTy(OpArg->getType()))
2069 continue;
2070 // maybe we already know operand's element type
2071 if (Type *KnownTy = GR->findDeducedElementType(OpArg))
2072 return KnownTy;
2073 // try to deduce from the operand itself
2074 Visited.clear();
2075 if (Type *Ty = deduceElementTypeHelper(OpArg, Visited, false))
2076 return Ty;
2077 // search in actual parameter's users
2078 for (User *OpU : OpArg->users()) {
2079 Instruction *Inst = dyn_cast<Instruction>(OpU);
2080 if (!Inst || Inst == CI)
2081 continue;
2082 Visited.clear();
2083 if (Type *Ty = deduceElementTypeHelper(Inst, Visited, false))
2084 return Ty;
2085 }
2086 // check if it's a formal parameter of the outer function
2087 if (!CI->getParent() || !CI->getParent()->getParent())
2088 continue;
2089 Function *OuterF = CI->getParent()->getParent();
2090 if (FVisited.find(OuterF) != FVisited.end())
2091 continue;
2092 for (unsigned i = 0; i < OuterF->arg_size(); ++i) {
2093 if (OuterF->getArg(i) == OpArg) {
2094 Lookup.push_back(std::make_pair(OuterF, i));
2095 break;
2096 }
2097 }
2098 }
2099
2100 // search in function parameters
2101 for (auto &Pair : Lookup) {
2102 if (Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2103 return Ty;
2104 }
2105
2106 return nullptr;
2107}
2108
2109void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *F,
2110 IRBuilder<> &B) {
2111 B.SetInsertPointPastAllocas(F);
2112 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
2113 Argument *Arg = F->getArg(OpIdx);
2114 if (!isUntypedPointerTy(Arg->getType()))
2115 continue;
2116 Type *ElemTy = GR->findDeducedElementType(Arg);
2117 if (ElemTy)
2118 continue;
2119 if (hasPointeeTypeAttr(Arg) &&
2120 (ElemTy = getPointeeTypeByAttr(Arg)) != nullptr) {
2121 buildAssignPtr(B, ElemTy, Arg);
2122 continue;
2123 }
2124 // search in function's call sites
2125 for (User *U : F->users()) {
2126 CallInst *CI = dyn_cast<CallInst>(U);
2127 if (!CI || OpIdx >= CI->arg_size())
2128 continue;
2129 Value *OpArg = CI->getArgOperand(OpIdx);
2130 if (!isPointerTy(OpArg->getType()))
2131 continue;
2132 // maybe we already know operand's element type
2133 if ((ElemTy = GR->findDeducedElementType(OpArg)) != nullptr)
2134 break;
2135 }
2136 if (ElemTy) {
2137 buildAssignPtr(B, ElemTy, Arg);
2138 continue;
2139 }
2140 if (HaveFunPtrs) {
2141 for (User *U : Arg->users()) {
2142 CallInst *CI = dyn_cast<CallInst>(U);
2143 if (CI && !isa<IntrinsicInst>(CI) && CI->isIndirectCall() &&
2144 CI->getCalledOperand() == Arg &&
2145 CI->getParent()->getParent() == CurrF) {
2147 deduceOperandElementTypeFunctionPointer(CI, Ops, ElemTy, false);
2148 if (ElemTy) {
2149 buildAssignPtr(B, ElemTy, Arg);
2150 break;
2151 }
2152 }
2153 }
2154 }
2155 }
2156}
2157
2158void SPIRVEmitIntrinsics::processParamTypes(Function *F, IRBuilder<> &B) {
2159 B.SetInsertPointPastAllocas(F);
2160 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
2161 Argument *Arg = F->getArg(OpIdx);
2162 if (!isUntypedPointerTy(Arg->getType()))
2163 continue;
2164 Type *ElemTy = GR->findDeducedElementType(Arg);
2165 if (!ElemTy && (ElemTy = deduceFunParamElementType(F, OpIdx)) != nullptr) {
2166 if (CallInst *AssignCI = GR->findAssignPtrTypeInstr(Arg)) {
2168 updateAssignType(AssignCI, Arg, PoisonValue::get(ElemTy));
2169 propagateElemType(Arg, IntegerType::getInt8Ty(F->getContext()),
2170 VisitedSubst);
2171 } else {
2172 buildAssignPtr(B, ElemTy, Arg);
2173 }
2174 }
2175 }
2176}
2177
2179 SPIRVGlobalRegistry *GR) {
2180 FunctionType *FTy = F->getFunctionType();
2181 bool IsNewFTy = false;
2183 for (Argument &Arg : F->args()) {
2184 Type *ArgTy = Arg.getType();
2185 if (ArgTy->isPointerTy())
2186 if (Type *ElemTy = GR->findDeducedElementType(&Arg)) {
2187 IsNewFTy = true;
2188 ArgTy = getTypedPointerWrapper(ElemTy, getPointerAddressSpace(ArgTy));
2189 }
2190 ArgTys.push_back(ArgTy);
2191 }
2192 return IsNewFTy
2193 ? FunctionType::get(FTy->getReturnType(), ArgTys, FTy->isVarArg())
2194 : FTy;
2195}
2196
2197bool SPIRVEmitIntrinsics::processFunctionPointers(Module &M) {
2198 SmallVector<Function *> Worklist;
2199 for (auto &F : M) {
2200 if (F.isIntrinsic())
2201 continue;
2202 if (F.isDeclaration()) {
2203 for (User *U : F.users()) {
2204 CallInst *CI = dyn_cast<CallInst>(U);
2205 if (!CI || CI->getCalledFunction() != &F) {
2206 Worklist.push_back(&F);
2207 break;
2208 }
2209 }
2210 } else {
2211 if (F.user_empty())
2212 continue;
2213 Type *FPElemTy = GR->findDeducedElementType(&F);
2214 if (!FPElemTy)
2215 FPElemTy = getFunctionPointerElemType(&F, GR);
2216 for (User *U : F.users()) {
2217 IntrinsicInst *II = dyn_cast<IntrinsicInst>(U);
2218 if (!II || II->arg_size() != 3 || II->getOperand(0) != &F)
2219 continue;
2220 if (II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2221 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2222 updateAssignType(II, &F, PoisonValue::get(FPElemTy));
2223 break;
2224 }
2225 }
2226 }
2227 }
2228 if (Worklist.empty())
2229 return false;
2230
2231 std::string ServiceFunName = SPIRV_BACKEND_SERVICE_FUN_NAME;
2232 if (!getVacantFunctionName(M, ServiceFunName))
2234 "cannot allocate a name for the internal service function");
2235 LLVMContext &Ctx = M.getContext();
2236 Function *SF =
2237 Function::Create(FunctionType::get(Type::getVoidTy(Ctx), {}, false),
2238 GlobalValue::PrivateLinkage, ServiceFunName, M);
2240 BasicBlock *BB = BasicBlock::Create(Ctx, "entry", SF);
2241 IRBuilder<> IRB(BB);
2242
2243 for (Function *F : Worklist) {
2245 for (const auto &Arg : F->args())
2246 Args.push_back(PoisonValue::get(Arg.getType()));
2247 IRB.CreateCall(F, Args);
2248 }
2249 IRB.CreateRetVoid();
2250
2251 return true;
2252}
2253
2254// Apply types parsed from demangled function declarations.
2255void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(IRBuilder<> &B) {
2256 for (auto It : FDeclPtrTys) {
2257 Function *F = It.first;
2258 for (auto *U : F->users()) {
2259 CallInst *CI = dyn_cast<CallInst>(U);
2260 if (!CI || CI->getCalledFunction() != F)
2261 continue;
2262 unsigned Sz = CI->arg_size();
2263 for (auto [Idx, ElemTy] : It.second) {
2264 if (Idx >= Sz)
2265 continue;
2266 Value *Param = CI->getArgOperand(Idx);
2267 if (GR->findDeducedElementType(Param) || isa<GlobalValue>(Param))
2268 continue;
2269 if (Argument *Arg = dyn_cast<Argument>(Param)) {
2270 if (!hasPointeeTypeAttr(Arg)) {
2271 B.SetInsertPointPastAllocas(Arg->getParent());
2272 B.SetCurrentDebugLocation(DebugLoc());
2273 buildAssignPtr(B, ElemTy, Arg);
2274 }
2275 } else if (isa<Instruction>(Param)) {
2276 GR->addDeducedElementType(Param, ElemTy);
2277 // insertAssignTypeIntrs() will complete buildAssignPtr()
2278 } else {
2279 B.SetInsertPoint(CI->getParent()
2280 ->getParent()
2281 ->getEntryBlock()
2282 .getFirstNonPHIOrDbgOrAlloca());
2283 buildAssignPtr(B, ElemTy, Param);
2284 }
2285 CallInst *Ref = dyn_cast<CallInst>(Param);
2286 if (!Ref)
2287 continue;
2288 Function *RefF = Ref->getCalledFunction();
2289 if (!RefF || !isPointerTy(RefF->getReturnType()) ||
2290 GR->findDeducedElementType(RefF))
2291 continue;
2292 GR->addDeducedElementType(RefF, ElemTy);
2293 GR->addReturnType(
2295 ElemTy, getPointerAddressSpace(RefF->getReturnType())));
2296 }
2297 }
2298 }
2299}
2300
2301bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2302 if (Func.isDeclaration())
2303 return false;
2304
2305 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(Func);
2306 GR = ST.getSPIRVGlobalRegistry();
2307 InstrSet = ST.isOpenCLEnv() ? SPIRV::InstructionSet::OpenCL_std
2308 : SPIRV::InstructionSet::GLSL_std_450;
2309
2310 if (!CurrF)
2311 HaveFunPtrs =
2312 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2313
2314 CurrF = &Func;
2315 IRBuilder<> B(Func.getContext());
2316 AggrConsts.clear();
2317 AggrConstTypes.clear();
2318 AggrStores.clear();
2319
2320 processParamTypesByFunHeader(CurrF, B);
2321
2322 // StoreInst's operand type can be changed during the next transformations,
2323 // so we need to store it in the set. Also store already transformed types.
2324 for (auto &I : instructions(Func)) {
2325 StoreInst *SI = dyn_cast<StoreInst>(&I);
2326 if (!SI)
2327 continue;
2328 Type *ElTy = SI->getValueOperand()->getType();
2329 if (ElTy->isAggregateType() || ElTy->isVectorTy())
2330 AggrStores.insert(&I);
2331 }
2332
2333 B.SetInsertPoint(&Func.getEntryBlock(), Func.getEntryBlock().begin());
2334 for (auto &GV : Func.getParent()->globals())
2335 processGlobalValue(GV, B);
2336
2337 preprocessUndefs(B);
2338 preprocessCompositeConstants(B);
2340 for (auto &I : instructions(Func))
2341 Worklist.push_back(&I);
2342
2343 applyDemangledPtrArgTypes(B);
2344
2345 // Pass forward: use operand to deduce instructions result.
2346 for (auto &I : Worklist) {
2347 // Don't emit intrinsincs for convergence intrinsics.
2348 if (isConvergenceIntrinsic(I))
2349 continue;
2350
2351 bool Postpone = insertAssignPtrTypeIntrs(I, B, false);
2352 // if Postpone is true, we can't decide on pointee type yet
2353 insertAssignTypeIntrs(I, B);
2354 insertPtrCastOrAssignTypeInstr(I, B);
2356 // if instruction requires a pointee type set, let's check if we know it
2357 // already, and force it to be i8 if not
2358 if (Postpone && !GR->findAssignPtrTypeInstr(I))
2359 insertAssignPtrTypeIntrs(I, B, true);
2360
2361 if (auto *FPI = dyn_cast<ConstrainedFPIntrinsic>(I))
2362 useRoundingMode(FPI, B);
2363 }
2364
2365 // Pass backward: use instructions results to specify/update/cast operands
2366 // where needed.
2367 SmallPtrSet<Instruction *, 4> UncompleteRets;
2368 for (auto &I : llvm::reverse(instructions(Func)))
2369 deduceOperandElementType(&I, &UncompleteRets);
2370
2371 // Pass forward for PHIs only, their operands are not preceed the instruction
2372 // in meaning of `instructions(Func)`.
2373 for (BasicBlock &BB : Func)
2374 for (PHINode &Phi : BB.phis())
2375 if (isPointerTy(Phi.getType()))
2376 deduceOperandElementType(&Phi, nullptr);
2377
2378 for (auto *I : Worklist) {
2379 TrackConstants = true;
2380 if (!I->getType()->isVoidTy() || isa<StoreInst>(I))
2382 // Visitors return either the original/newly created instruction for further
2383 // processing, nullptr otherwise.
2384 I = visit(*I);
2385 if (!I)
2386 continue;
2387
2388 // Don't emit intrinsics for convergence operations.
2389 if (isConvergenceIntrinsic(I))
2390 continue;
2391
2392 processInstrAfterVisit(I, B);
2393 }
2394
2395 return true;
2396}
2397
2398// Try to deduce a better type for pointers to untyped ptr.
2399bool SPIRVEmitIntrinsics::postprocessTypes(Module &M) {
2400 if (!GR || TodoTypeSz == 0)
2401 return false;
2402
2403 unsigned SzTodo = TodoTypeSz;
2405 for (auto [Op, Enabled] : TodoType) {
2406 // TODO: add isa<CallInst>(Op) to continue
2407 if (!Enabled || isa<GetElementPtrInst>(Op))
2408 continue;
2409 CallInst *AssignCI = GR->findAssignPtrTypeInstr(Op);
2410 Type *KnownTy = GR->findDeducedElementType(Op);
2411 if (!KnownTy || !AssignCI)
2412 continue;
2413 assert(Op == AssignCI->getArgOperand(0));
2414 // Try to improve the type deduced after all Functions are processed.
2415 if (auto *CI = dyn_cast<Instruction>(Op)) {
2416 CurrF = CI->getParent()->getParent();
2417 std::unordered_set<Value *> Visited;
2418 if (Type *ElemTy = deduceElementTypeHelper(Op, Visited, false, true)) {
2419 if (ElemTy != KnownTy) {
2421 propagateElemType(CI, ElemTy, VisitedSubst);
2422 eraseTodoType(Op);
2423 continue;
2424 }
2425 }
2426 }
2427 for (User *U : Op->users()) {
2428 Instruction *Inst = dyn_cast<Instruction>(U);
2429 if (Inst && !isa<IntrinsicInst>(Inst))
2430 ToProcess[Inst].insert(Op);
2431 }
2432 }
2433 if (TodoTypeSz == 0)
2434 return true;
2435
2436 for (auto &F : M) {
2437 CurrF = &F;
2438 SmallPtrSet<Instruction *, 4> UncompleteRets;
2439 for (auto &I : llvm::reverse(instructions(F))) {
2440 auto It = ToProcess.find(&I);
2441 if (It == ToProcess.end())
2442 continue;
2443 It->second.remove_if([this](Value *V) { return !isTodoType(V); });
2444 if (It->second.size() == 0)
2445 continue;
2446 deduceOperandElementType(&I, &UncompleteRets, &It->second, true);
2447 if (TodoTypeSz == 0)
2448 return true;
2449 }
2450 }
2451
2452 return SzTodo > TodoTypeSz;
2453}
2454
2455// Parse and store argument types of function declarations where needed.
2456void SPIRVEmitIntrinsics::parseFunDeclarations(Module &M) {
2457 for (auto &F : M) {
2458 if (!F.isDeclaration() || F.isIntrinsic())
2459 continue;
2460 // get the demangled name
2461 std::string DemangledName = getOclOrSpirvBuiltinDemangledName(F.getName());
2462 if (DemangledName.empty())
2463 continue;
2464 // allow only OpGroupAsyncCopy use case at the moment
2465 auto [Grp, Opcode, ExtNo] =
2466 SPIRV::mapBuiltinToOpcode(DemangledName, InstrSet);
2467 if (Opcode != SPIRV::OpGroupAsyncCopy)
2468 continue;
2469 // find pointer arguments
2471 for (unsigned OpIdx = 0; OpIdx < F.arg_size(); ++OpIdx) {
2472 Argument *Arg = F.getArg(OpIdx);
2473 if (isPointerTy(Arg->getType()) && !hasPointeeTypeAttr(Arg))
2474 Idxs.push_back(OpIdx);
2475 }
2476 if (!Idxs.size())
2477 continue;
2478 // parse function arguments
2479 LLVMContext &Ctx = F.getContext();
2481 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
2482 if (!TypeStrs.size())
2483 continue;
2484 // find type info for pointer arguments
2485 for (unsigned Idx : Idxs) {
2486 if (Idx >= TypeStrs.size())
2487 continue;
2488 if (Type *ElemTy =
2489 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
2491 !ElemTy->isTargetExtTy())
2492 FDeclPtrTys[&F].push_back(std::make_pair(Idx, ElemTy));
2493 }
2494 }
2495}
2496
2497bool SPIRVEmitIntrinsics::runOnModule(Module &M) {
2498 bool Changed = false;
2499
2500 parseFunDeclarations(M);
2501
2502 TodoType.clear();
2503 for (auto &F : M)
2504 Changed |= runOnFunction(F);
2505
2506 // Specify function parameters after all functions were processed.
2507 for (auto &F : M) {
2508 // check if function parameter types are set
2509 CurrF = &F;
2510 if (!F.isDeclaration() && !F.isIntrinsic()) {
2511 IRBuilder<> B(F.getContext());
2512 processParamTypes(&F, B);
2513 }
2514 }
2515
2516 CanTodoType = false;
2517 Changed |= postprocessTypes(M);
2518
2519 if (HaveFunPtrs)
2520 Changed |= processFunctionPointers(M);
2521
2522 return Changed;
2523}
2524
2526 return new SPIRVEmitIntrinsics(TM);
2527}
static unsigned getIntrinsicID(const SDNode *N)
aarch64 promote const
unsigned Intr
always inline
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static void replaceAllUsesWith(Value *Old, Value *New, SmallSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
return RetTy
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseSet and SmallDenseSet classes.
std::string Name
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
Definition: IVUsers.cpp:48
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, MachineIRBuilder MIB)
spirv structurize SPIRV
#define SPIRV_BACKEND_SERVICE_FUN_NAME
Definition: SPIRVUtils.h:382
static bool Enabled
Definition: Statistic.cpp:46
DEMANGLE_NAMESPACE_BEGIN bool starts_with(std::string_view self, char C) noexcept
static SymbolRef::Type getType(const Symbol *Sym)
Definition: TapiFile.cpp:39
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
an instruction to allocate memory on the stack
Definition: Instructions.h:63
Represent the analysis usage information of a pass.
This class represents an incoming formal argument to a Function.
Definition: Argument.h:31
const Function * getParent() const
Definition: Argument.h:43
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
An instruction that atomically checks whether a specified value is in a memory location,...
Definition: Instructions.h:501
static unsigned getPointerOperandIndex()
Definition: Instructions.h:631
static unsigned getPointerOperandIndex()
Definition: Instructions.h:872
LLVM Basic Block Representation.
Definition: BasicBlock.h:61
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
Definition: BasicBlock.h:517
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition: BasicBlock.h:212
LLVMContext & getContext() const
Get the context in which this basic block lives.
Definition: BasicBlock.cpp:168
This class represents a no-op cast from one type to another.
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
Definition: Constants.cpp:1897
bool isInlineAsm() const
Check if this call is an inline asm statement.
Definition: InstrTypes.h:1416
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Definition: InstrTypes.h:1349
bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Definition: InstrTypes.h:1342
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1294
void setArgOperand(unsigned i, Value *v)
Definition: InstrTypes.h:1299
FunctionType * getFunctionType() const
Definition: InstrTypes.h:1207
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
Definition: InstrTypes.h:1285
unsigned arg_size() const
Definition: InstrTypes.h:1292
This class represents a function call, abstracting a target machine's calling convention.
static ConstantAsMetadata * get(Constant *C)
Definition: Metadata.h:528
This is an important base class in LLVM.
Definition: Constant.h:42
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Definition: Constants.cpp:373
This is the common base class for constrained floating point intrinsics.
std::optional< RoundingMode > getRoundingMode() const
This class represents an Operation in the Expression.
A debug info location.
Definition: DebugLoc.h:33
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:156
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
Definition: DenseMap.h:226
iterator end()
Definition: DenseMap.h:84
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:211
Implements a dense probed hash-table based set.
Definition: DenseSet.h:278
This instruction extracts a single (scalar) element from a VectorType value.
This instruction extracts a struct member or array element value from an aggregate value.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
Definition: Function.cpp:641
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Definition: Function.h:173
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Definition: Function.cpp:373
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
Definition: Function.h:256
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Definition: Function.cpp:369
size_t arg_size() const
Definition: Function.h:901
Type * getReturnType() const
Returns the type of the ret val.
Definition: Function.h:221
Argument * getArg(unsigned i) const
Definition: Function.h:886
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Definition: Instructions.h:933
static Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
Definition: GlobalValue.h:294
@ PrivateLinkage
Like Internal, but omit from symbol table.
Definition: GlobalValue.h:60
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
bool hasInitializer() const
Definitions have initializers, declarations don't.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:2697
Indirect Branch Instruction.
void addDestination(BasicBlock *Dest)
Add a destination.
This instruction inserts a single (scalar) element into a VectorType value.
This instruction inserts a struct field of array element value into an aggregate value.
Base class for instruction visitors.
Definition: InstVisitor.h:78
RetTy visitExtractElementInst(ExtractElementInst &I)
Definition: InstVisitor.h:191
RetTy visitInsertValueInst(InsertValueInst &I)
Definition: InstVisitor.h:195
RetTy visitUnreachableInst(UnreachableInst &I)
Definition: InstVisitor.h:244
RetTy visitAtomicCmpXchgInst(AtomicCmpXchgInst &I)
Definition: InstVisitor.h:171
RetTy visitBitCastInst(BitCastInst &I)
Definition: InstVisitor.h:187
RetTy visitSwitchInst(SwitchInst &I)
Definition: InstVisitor.h:235
RetTy visitExtractValueInst(ExtractValueInst &I)
Definition: InstVisitor.h:194
RetTy visitStoreInst(StoreInst &I)
Definition: InstVisitor.h:170
RetTy visitInsertElementInst(InsertElementInst &I)
Definition: InstVisitor.h:192
RetTy visitAllocaInst(AllocaInst &I)
Definition: InstVisitor.h:168
RetTy visitCallInst(CallInst &I)
Definition: InstVisitor.h:223
RetTy visitGetElementPtrInst(GetElementPtrInst &I)
Definition: InstVisitor.h:174
void visitInstruction(Instruction &I)
Definition: InstVisitor.h:283
RetTy visitLoadInst(LoadInst &I)
Definition: InstVisitor.h:169
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Definition: Instruction.cpp:94
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
Definition: Instruction.h:169
A wrapper class for inspecting calls to intrinsic functions.
Definition: IntrinsicInst.h:48
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:67
An instruction for reading from memory.
Definition: Instructions.h:176
static unsigned getPointerOperandIndex()
Definition: Instructions.h:257
Metadata node.
Definition: Metadata.h:1069
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition: Metadata.h:1543
A single uniqued string.
Definition: Metadata.h:720
static MDString * get(LLVMContext &Context, StringRef Str)
Definition: Metadata.cpp:606
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition: Metadata.h:1500
Flags
Flags values. These may be or'd together.
Metadata wrapper in the Value hierarchy.
Definition: Metadata.h:176
static MetadataAsValue * get(LLVMContext &Context, Metadata *MD)
Definition: Metadata.cpp:103
Metadata * getMetadata() const
Definition: Metadata.h:193
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:251
virtual bool runOnModule(Module &M)=0
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
Definition: PassRegistry.h:37
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
Definition: Pass.cpp:98
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Definition: Pass.cpp:81
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Definition: Constants.cpp:1878
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
Type * findDeducedCompositeType(const Value *Val)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
void updateIfExistDeducedElementType(Value *OldVal, Value *NewVal, bool DeleteOld)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void updateIfExistAssignPtrTypeInstr(Value *OldVal, Value *NewVal, bool DeleteOld)
Type * findDeducedElementType(const Value *Val)
CallInst * findAssignPtrTypeInstr(const Value *Val)
bool canUseExtension(SPIRV::Extension::Extension E) const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:384
bool contains(ConstPtrType Ptr) const
Definition: SmallPtrSet.h:458
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
Definition: SmallPtrSet.h:519
bool empty() const
Definition: SmallVector.h:81
size_t size() const
Definition: SmallVector.h:78
void push_back(const T &Elt)
Definition: SmallVector.h:413
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1196
An instruction for storing to memory.
Definition: Instructions.h:292
static unsigned getPointerOperandIndex()
Definition: Instructions.h:383
StringMap - This is an unconventional map that is specialized for handling keys that are "strings",...
Definition: StringMap.h:128
iterator end()
Definition: StringMap.h:220
iterator find(StringRef Key)
Definition: StringMap.h:233
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:51
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
Definition: StringRef.h:265
static StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
Definition: Type.cpp:612
Multiway switch.
Class to represent target extensions types, which are generally unintrospectable from target-independ...
Definition: DerivedTypes.h:744
static TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
Definition: Type.cpp:895
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
bool isVectorTy() const
True if this is an instance of VectorType.
Definition: Type.h:270
bool isPointerTy() const
True if this is an instance of PointerType.
Definition: Type.h:264
StringRef getTargetExtName() const
static Type * getVoidTy(LLVMContext &C)
bool isTargetExtTy() const
Return true if this is a target extension type.
Definition: Type.h:203
bool isAggregateType() const
Return true if the type is an aggregate type.
Definition: Type.h:303
static IntegerType * getInt32Ty(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
Definition: Type.h:139
static bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
Definition: Constants.cpp:1859
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
op_range operands()
Definition: User.h:288
Value * getOperand(unsigned i) const
Definition: User.h:228
static ConstantAsMetadata * getConstant(Value *C)
Definition: Metadata.h:472
LLVM Value Representation.
Definition: Value.h:74
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:255
void setName(const Twine &Name)
Change the name of the value.
Definition: Value.cpp:377
iterator_range< user_iterator > users()
Definition: Value.h:421
LLVMContext & getContext() const
All values hold a context through their type.
Definition: Value.cpp:1075
unsigned getNumUses() const
This method computes the number of uses of this Value.
Definition: Value.cpp:255
StringRef getName() const
Return a constant reference to the value's name.
Definition: Value.cpp:309
bool user_empty() const
Definition: Value.h:385
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:213
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
Definition: DenseSet.h:193
const ParentTy * getParent() const
Definition: ilist_node.h:32
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
Definition: CallingConv.h:144
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
ID ArrayRef< Type * > Tys
Definition: Intrinsics.h:102
std::string lookupBuiltinNameHelper(StringRef DemangledCall, FPDecorationId *DecorationId)
Parses the name part of the demangled builtin call.
Type * parseBuiltinCallArgumentType(StringRef TypeStr, LLVMContext &Ctx)
bool parseBuiltinTypeStr(SmallVector< StringRef, 10 > &BuiltinArgsTypeStrs, const StringRef DemangledCall, LLVMContext &Ctx)
std::tuple< int, unsigned, unsigned > mapBuiltinToOpcode(const StringRef DemangledCall, SPIRV::InstructionSet::InstructionSet Set)
Helper function for finding a builtin function attributes by a demangled function name.
Type * parseBuiltinCallArgumentBaseType(const StringRef DemangledCall, unsigned ArgIdx, LLVMContext &Ctx)
Parses the provided ArgIdx argument base type in the DemangledCall skeleton.
NodeAddr< PhiNode * > Phi
Definition: RDFGraph.h:390
NodeAddr< FuncNode * > Func
Definition: RDFGraph.h:393
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:329
bool getVacantFunctionName(Module &M, std::string &Name)
Definition: SPIRVUtils.cpp:711
void initializeSPIRVEmitIntrinsicsPass(PassRegistry &)
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
Definition: SPIRVUtils.h:292
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
Definition: SPIRVUtils.h:256
FPDecorationId
Definition: SPIRVUtils.h:403
bool isNestedPointer(const Type *Ty)
Definition: SPIRVUtils.cpp:774
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
Definition: SPIRVUtils.cpp:392
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:420
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
Definition: SPIRVUtils.h:287
bool isPointerTy(const Type *T)
Definition: SPIRVUtils.h:250
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
Definition: Error.cpp:167
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
Definition: SPIRVUtils.cpp:281
@ Ref
The access may reference the value stored in memory.
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
Definition: SPIRVUtils.h:269
bool hasPointeeTypeAttr(Argument *Arg)
Definition: SPIRVUtils.h:264
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
Definition: SPIRVUtils.h:342
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Definition: SPIRVUtils.cpp:332
Type * getPointeeType(const Type *Ty)
Definition: SPIRVUtils.h:319
void addStringImm(const StringRef &Str, MCInst &Inst)
Definition: SPIRVUtils.cpp:54
bool isUntypedPointerTy(const Type *T)
Definition: SPIRVUtils.h:245
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
Definition: SPIRVUtils.cpp:263