LLVM 17.0.0git
TypeMetadataUtils.cpp
Go to the documentation of this file.
1//===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file contains functions that make it easier to manipulate type metadata
10// for devirtualization.
11//
12//===----------------------------------------------------------------------===//
13
15#include "llvm/IR/Constants.h"
16#include "llvm/IR/Dominators.h"
19#include "llvm/IR/Module.h"
20
21using namespace llvm;
22
23// Search for virtual calls that call FPtr and add them to DevirtCalls.
24static void
26 bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
27 const CallInst *CI, DominatorTree &DT) {
28 for (const Use &U : FPtr->uses()) {
29 Instruction *User = cast<Instruction>(U.getUser());
30 // Ignore this instruction if it is not dominated by the type intrinsic
31 // being analyzed. Otherwise we may transform a call sharing the same
32 // vtable pointer incorrectly. Specifically, this situation can arise
33 // after indirect call promotion and inlining, where we may have uses
34 // of the vtable pointer guarded by a function pointer check, and a fallback
35 // indirect call.
36 if (!DT.dominates(CI, User))
37 continue;
38 if (isa<BitCastInst>(User)) {
39 findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
40 DT);
41 } else if (auto *CI = dyn_cast<CallInst>(User)) {
42 DevirtCalls.push_back({Offset, *CI});
43 } else if (auto *II = dyn_cast<InvokeInst>(User)) {
44 DevirtCalls.push_back({Offset, *II});
45 } else if (HasNonCallUses) {
46 *HasNonCallUses = true;
47 }
48 }
49}
50
51// Search for virtual calls that load from VPtr and add them to DevirtCalls.
53 const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
54 int64_t Offset, const CallInst *CI, DominatorTree &DT) {
55 for (const Use &U : VPtr->uses()) {
56 Value *User = U.getUser();
57 if (isa<BitCastInst>(User)) {
58 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
59 } else if (isa<LoadInst>(User)) {
60 findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
61 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
62 // Take into account the GEP offset.
63 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
64 SmallVector<Value *, 8> Indices(drop_begin(GEP->operands()));
65 int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
66 GEP->getSourceElementType(), Indices);
67 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
68 CI, DT);
69 }
70 }
71 }
72}
73
76 SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
77 DominatorTree &DT) {
78 assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test ||
80 Intrinsic::public_type_test);
81
82 const Module *M = CI->getParent()->getParent()->getParent();
83
84 // Find llvm.assume intrinsics for this llvm.type.test call.
85 for (const Use &CIU : CI->uses())
86 if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
87 Assumes.push_back(Assume);
88
89 // If we found any, search for virtual calls based on %p and add them to
90 // DevirtCalls.
91 if (!Assumes.empty())
93 M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
94}
95
99 SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
100 const CallInst *CI, DominatorTree &DT) {
102 Intrinsic::type_checked_load);
103
104 auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1));
105 if (!Offset) {
106 HasNonCallUses = true;
107 return;
108 }
109
110 for (const Use &U : CI->uses()) {
111 auto CIU = U.getUser();
112 if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
113 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
114 LoadedPtrs.push_back(EVI);
115 continue;
116 }
117 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
118 Preds.push_back(EVI);
119 continue;
120 }
121 }
122 HasNonCallUses = true;
123 }
124
125 for (Value *LoadedPtr : LoadedPtrs)
126 findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
127 Offset->getZExtValue(), CI, DT);
128}
129
131 Constant *TopLevelGlobal) {
132 if (I->getType()->isPointerTy()) {
133 if (Offset == 0)
134 return I;
135 return nullptr;
136 }
137
138 const DataLayout &DL = M.getDataLayout();
139
140 if (auto *C = dyn_cast<ConstantStruct>(I)) {
141 const StructLayout *SL = DL.getStructLayout(C->getType());
142 if (Offset >= SL->getSizeInBytes())
143 return nullptr;
144
145 unsigned Op = SL->getElementContainingOffset(Offset);
146 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
147 Offset - SL->getElementOffset(Op), M,
148 TopLevelGlobal);
149 }
150 if (auto *C = dyn_cast<ConstantArray>(I)) {
151 ArrayType *VTableTy = C->getType();
152 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
153
154 unsigned Op = Offset / ElemSize;
155 if (Op >= C->getNumOperands())
156 return nullptr;
157
158 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
159 Offset % ElemSize, M, TopLevelGlobal);
160 }
161
162 // (Swift-specific) relative-pointer support starts here.
163 if (auto *CI = dyn_cast<ConstantInt>(I)) {
164 if (Offset == 0 && CI->getZExtValue() == 0) {
165 return I;
166 }
167 }
168 if (auto *C = dyn_cast<ConstantExpr>(I)) {
169 switch (C->getOpcode()) {
170 case Instruction::Trunc:
171 case Instruction::PtrToInt:
172 return getPointerAtOffset(cast<Constant>(C->getOperand(0)), Offset, M,
173 TopLevelGlobal);
174 case Instruction::Sub: {
175 auto *Operand0 = cast<Constant>(C->getOperand(0));
176 auto *Operand1 = cast<Constant>(C->getOperand(1));
177
178 auto StripGEP = [](Constant *C) {
179 auto *CE = dyn_cast<ConstantExpr>(C);
180 if (!CE)
181 return C;
182 if (CE->getOpcode() != Instruction::GetElementPtr)
183 return C;
184 return CE->getOperand(0);
185 };
186 auto *Operand1TargetGlobal = StripGEP(getPointerAtOffset(Operand1, 0, M));
187
188 // Check that in the "sub (@a, @b)" expression, @b points back to the top
189 // level global (or a GEP thereof) that we're processing. Otherwise bail.
190 if (Operand1TargetGlobal != TopLevelGlobal)
191 return nullptr;
192
193 return getPointerAtOffset(Operand0, Offset, M, TopLevelGlobal);
194 }
195 default:
196 return nullptr;
197 }
198 }
199 return nullptr;
200}
201
203 for (auto *U : F->users()) {
204 auto *PtrExpr = dyn_cast<ConstantExpr>(U);
205 if (!PtrExpr || PtrExpr->getOpcode() != Instruction::PtrToInt)
206 continue;
207
208 for (auto *PtrToIntUser : PtrExpr->users()) {
209 auto *SubExpr = dyn_cast<ConstantExpr>(PtrToIntUser);
210 if (!SubExpr || SubExpr->getOpcode() != Instruction::Sub)
211 continue;
212
213 SubExpr->replaceNonMetadataUsesWith(
214 ConstantInt::get(SubExpr->getType(), 0));
215 }
216 }
217}
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Hexagon Common GEP
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
Module.h This file contains the declarations for the Module class.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static void findLoadCallsAtConstantOffset(const Module *M, SmallVectorImpl< DevirtCallSite > &DevirtCalls, Value *VPtr, int64_t Offset, const CallInst *CI, DominatorTree &DT)
static void findCallsAtConstantOffset(SmallVectorImpl< DevirtCallSite > &DevirtCalls, bool *HasNonCallUses, Value *FPtr, uint64_t Offset, const CallInst *CI, DominatorTree &DT)
Class to represent array types.
Definition: DerivedTypes.h:357
Type * getElementType() const
Definition: DerivedTypes.h:370
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:112
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Definition: InstrTypes.h:1406
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1351
This class represents a function call, abstracting a target machine's calling convention.
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
Definition: Constants.cpp:887
This is an important base class in LLVM.
Definition: Constant.h:41
A parsed version of the target data layout string in and methods for querying it.
Definition: DataLayout.h:114
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
Definition: Dominators.h:166
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
Definition: Dominators.cpp:122
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
Definition: Function.h:204
Module * getParent()
Get the module that this global value is contained inside of...
Definition: GlobalValue.h:652
const BasicBlock * getParent() const
Definition: Instruction.h:90
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
bool empty() const
Definition: SmallVector.h:94
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:577
void push_back(const T &Elt)
Definition: SmallVector.h:416
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1200
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
Definition: DataLayout.h:625
uint64_t getSizeInBytes() const
Definition: DataLayout.h:632
uint64_t getElementOffset(unsigned Idx) const
Definition: DataLayout.h:655
unsigned getElementContainingOffset(uint64_t Offset) const
Given a valid byte offset into the structure, returns the structure index that contains it.
Definition: DataLayout.cpp:83
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
LLVM Value Representation.
Definition: Value.h:74
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
Definition: Value.cpp:685
iterator_range< use_iterator > uses()
Definition: Value.h:376
@ C
The default llvm calling convention, compatible with C.
Definition: CallingConv.h:34
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:386
@ Offset
Definition: DWP.cpp:406
void replaceRelativePointerUsersWithZero(Function *F)
Finds the same "relative pointer" pattern as described above, where the target is F,...
void findDevirtualizableCallsForTypeCheckedLoad(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< Instruction * > &LoadedPtrs, SmallVectorImpl< Instruction * > &Preds, bool &HasNonCallUses, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.checked.load, find all devirtualizable call sites based on t...
void findDevirtualizableCallsForTypeTest(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< CallInst * > &Assumes, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.test, find all devirtualizable call sites based on the call ...
Constant * getPointerAtOffset(Constant *I, uint64_t Offset, Module &M, Constant *TopLevelGlobal=nullptr)
Processes a Constant recursively looking into elements of arrays, structs and expressions to find a t...