LLVM 18.0.0git
TypeMetadataUtils.cpp
Go to the documentation of this file.
1//===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file contains functions that make it easier to manipulate type metadata
10// for devirtualization.
11//
12//===----------------------------------------------------------------------===//
13
15#include "llvm/IR/Constants.h"
16#include "llvm/IR/Dominators.h"
19#include "llvm/IR/Module.h"
20
21using namespace llvm;
22
23// Search for virtual calls that call FPtr and add them to DevirtCalls.
24static void
26 bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
27 const CallInst *CI, DominatorTree &DT) {
28 for (const Use &U : FPtr->uses()) {
29 Instruction *User = cast<Instruction>(U.getUser());
30 // Ignore this instruction if it is not dominated by the type intrinsic
31 // being analyzed. Otherwise we may transform a call sharing the same
32 // vtable pointer incorrectly. Specifically, this situation can arise
33 // after indirect call promotion and inlining, where we may have uses
34 // of the vtable pointer guarded by a function pointer check, and a fallback
35 // indirect call.
36 if (!DT.dominates(CI, User))
37 continue;
38 if (isa<BitCastInst>(User)) {
39 findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
40 DT);
41 } else if (auto *CI = dyn_cast<CallInst>(User)) {
42 DevirtCalls.push_back({Offset, *CI});
43 } else if (auto *II = dyn_cast<InvokeInst>(User)) {
44 DevirtCalls.push_back({Offset, *II});
45 } else if (HasNonCallUses) {
46 *HasNonCallUses = true;
47 }
48 }
49}
50
51// Search for virtual calls that load from VPtr and add them to DevirtCalls.
53 const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
54 int64_t Offset, const CallInst *CI, DominatorTree &DT) {
55 for (const Use &U : VPtr->uses()) {
56 Value *User = U.getUser();
57 if (isa<BitCastInst>(User)) {
58 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
59 } else if (isa<LoadInst>(User)) {
60 findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
61 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
62 // Take into account the GEP offset.
63 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
64 SmallVector<Value *, 8> Indices(drop_begin(GEP->operands()));
65 int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
66 GEP->getSourceElementType(), Indices);
67 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
68 CI, DT);
69 }
70 }
71 }
72}
73
76 SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
77 DominatorTree &DT) {
78 assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test ||
80 Intrinsic::public_type_test);
81
82 const Module *M = CI->getParent()->getParent()->getParent();
83
84 // Find llvm.assume intrinsics for this llvm.type.test call.
85 for (const Use &CIU : CI->uses())
86 if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
87 Assumes.push_back(Assume);
88
89 // If we found any, search for virtual calls based on %p and add them to
90 // DevirtCalls.
91 if (!Assumes.empty())
93 M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
94}
95
99 SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
100 const CallInst *CI, DominatorTree &DT) {
102 Intrinsic::type_checked_load ||
104 Intrinsic::type_checked_load_relative);
105
106 auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1));
107 if (!Offset) {
108 HasNonCallUses = true;
109 return;
110 }
111
112 for (const Use &U : CI->uses()) {
113 auto CIU = U.getUser();
114 if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
115 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
116 LoadedPtrs.push_back(EVI);
117 continue;
118 }
119 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
120 Preds.push_back(EVI);
121 continue;
122 }
123 }
124 HasNonCallUses = true;
125 }
126
127 for (Value *LoadedPtr : LoadedPtrs)
128 findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
129 Offset->getZExtValue(), CI, DT);
130}
131
133 Constant *TopLevelGlobal) {
134 if (I->getType()->isPointerTy()) {
135 if (Offset == 0)
136 return I;
137 return nullptr;
138 }
139
140 const DataLayout &DL = M.getDataLayout();
141
142 if (auto *C = dyn_cast<ConstantStruct>(I)) {
143 const StructLayout *SL = DL.getStructLayout(C->getType());
144 if (Offset >= SL->getSizeInBytes())
145 return nullptr;
146
147 unsigned Op = SL->getElementContainingOffset(Offset);
148 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
149 Offset - SL->getElementOffset(Op), M,
150 TopLevelGlobal);
151 }
152 if (auto *C = dyn_cast<ConstantArray>(I)) {
153 ArrayType *VTableTy = C->getType();
154 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
155
156 unsigned Op = Offset / ElemSize;
157 if (Op >= C->getNumOperands())
158 return nullptr;
159
160 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
161 Offset % ElemSize, M, TopLevelGlobal);
162 }
163
164 // (Swift-specific) relative-pointer support starts here.
165 if (auto *CI = dyn_cast<ConstantInt>(I)) {
166 if (Offset == 0 && CI->isZero()) {
167 return I;
168 }
169 }
170 if (auto *C = dyn_cast<ConstantExpr>(I)) {
171 switch (C->getOpcode()) {
172 case Instruction::Trunc:
173 case Instruction::PtrToInt:
174 return getPointerAtOffset(cast<Constant>(C->getOperand(0)), Offset, M,
175 TopLevelGlobal);
176 case Instruction::Sub: {
177 auto *Operand0 = cast<Constant>(C->getOperand(0));
178 auto *Operand1 = cast<Constant>(C->getOperand(1));
179
180 auto StripGEP = [](Constant *C) {
181 auto *CE = dyn_cast<ConstantExpr>(C);
182 if (!CE)
183 return C;
184 if (CE->getOpcode() != Instruction::GetElementPtr)
185 return C;
186 return CE->getOperand(0);
187 };
188 auto *Operand1TargetGlobal = StripGEP(getPointerAtOffset(Operand1, 0, M));
189
190 // Check that in the "sub (@a, @b)" expression, @b points back to the top
191 // level global (or a GEP thereof) that we're processing. Otherwise bail.
192 if (Operand1TargetGlobal != TopLevelGlobal)
193 return nullptr;
194
195 return getPointerAtOffset(Operand0, Offset, M, TopLevelGlobal);
196 }
197 default:
198 return nullptr;
199 }
200 }
201 return nullptr;
202}
203
205 for (auto *U : F->users()) {
206 auto *PtrExpr = dyn_cast<ConstantExpr>(U);
207 if (!PtrExpr || PtrExpr->getOpcode() != Instruction::PtrToInt)
208 continue;
209
210 for (auto *PtrToIntUser : PtrExpr->users()) {
211 auto *SubExpr = dyn_cast<ConstantExpr>(PtrToIntUser);
212 if (!SubExpr || SubExpr->getOpcode() != Instruction::Sub)
213 continue;
214
215 SubExpr->replaceNonMetadataUsesWith(
216 ConstantInt::get(SubExpr->getType(), 0));
217 }
218 }
219}
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Hexagon Common GEP
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
Module.h This file contains the declarations for the Module class.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static void findLoadCallsAtConstantOffset(const Module *M, SmallVectorImpl< DevirtCallSite > &DevirtCalls, Value *VPtr, int64_t Offset, const CallInst *CI, DominatorTree &DT)
static void findCallsAtConstantOffset(SmallVectorImpl< DevirtCallSite > &DevirtCalls, bool *HasNonCallUses, Value *FPtr, uint64_t Offset, const CallInst *CI, DominatorTree &DT)
Class to represent array types.
Definition: DerivedTypes.h:371
Type * getElementType() const
Definition: DerivedTypes.h:384
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:213
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Definition: InstrTypes.h:1449
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1394
This class represents a function call, abstracting a target machine's calling convention.
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
Definition: Constants.cpp:888
This is an important base class in LLVM.
Definition: Constant.h:41
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
Definition: DataLayout.h:110
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
Definition: Dominators.h:164
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
Definition: Dominators.cpp:123
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
Definition: Function.h:230
Module * getParent()
Get the module that this global value is contained inside of...
Definition: GlobalValue.h:652
const BasicBlock * getParent() const
Definition: Instruction.h:139
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
bool empty() const
Definition: SmallVector.h:94
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:577
void push_back(const T &Elt)
Definition: SmallVector.h:416
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1200
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
Definition: DataLayout.h:622
TypeSize getSizeInBytes() const
Definition: DataLayout.h:629
unsigned getElementContainingOffset(uint64_t FixedOffset) const
Given a valid byte offset into the structure, returns the structure index that contains it.
Definition: DataLayout.cpp:92
TypeSize getElementOffset(unsigned Idx) const
Definition: DataLayout.h:651
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
LLVM Value Representation.
Definition: Value.h:74
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
Definition: Value.cpp:693
iterator_range< use_iterator > uses()
Definition: Value.h:376
@ C
The default llvm calling convention, compatible with C.
Definition: CallingConv.h:34
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:329
@ Offset
Definition: DWP.cpp:456
void replaceRelativePointerUsersWithZero(Function *F)
Finds the same "relative pointer" pattern as described above, where the target is F,...
void findDevirtualizableCallsForTypeCheckedLoad(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< Instruction * > &LoadedPtrs, SmallVectorImpl< Instruction * > &Preds, bool &HasNonCallUses, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.checked.load, find all devirtualizable call sites based on t...
void findDevirtualizableCallsForTypeTest(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< CallInst * > &Assumes, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.test, find all devirtualizable call sites based on the call ...
Constant * getPointerAtOffset(Constant *I, uint64_t Offset, Module &M, Constant *TopLevelGlobal=nullptr)
Processes a Constant recursively looking into elements of arrays, structs and expressions to find a t...