Bug Summary

File:lib/Transforms/IPO/WholeProgramDevirt.cpp
Warning:line 1148, column 20
Access to field 'TheKind' results in a dereference of a null pointer (loaded from variable 'Res')

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name WholeProgramDevirt.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -mrelocation-model pic -pic-level 2 -mthread-model posix -fmath-errno -masm-verbose -mconstructor-aliases -munwind-tables -fuse-init-array -target-cpu x86-64 -dwarf-column-info -debugger-tuning=gdb -momit-leaf-frame-pointer -ffunction-sections -fdata-sections -resource-dir /usr/lib/llvm-8/lib/clang/8.0.0 -D _DEBUG -D _GNU_SOURCE -D __STDC_CONSTANT_MACROS -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -I /build/llvm-toolchain-snapshot-8~svn345461/build-llvm/lib/Transforms/IPO -I /build/llvm-toolchain-snapshot-8~svn345461/lib/Transforms/IPO -I /build/llvm-toolchain-snapshot-8~svn345461/build-llvm/include -I /build/llvm-toolchain-snapshot-8~svn345461/include -U NDEBUG -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/6.3.0/../../../../include/c++/6.3.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/6.3.0/../../../../include/x86_64-linux-gnu/c++/6.3.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/6.3.0/../../../../include/x86_64-linux-gnu/c++/6.3.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/6.3.0/../../../../include/c++/6.3.0/backward -internal-isystem /usr/include/clang/8.0.0/include/ -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-8/lib/clang/8.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-unused-parameter -Wwrite-strings -Wno-missing-field-initializers -Wno-long-long -Wno-maybe-uninitialized -Wno-comment -std=c++11 -fdeprecated-macro -fdebug-compilation-dir /build/llvm-toolchain-snapshot-8~svn345461/build-llvm/lib/Transforms/IPO -ferror-limit 19 -fmessage-length 0 -fvisibility-inlines-hidden -fobjc-runtime=gcc -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -analyzer-config stable-report-filename=true -o /tmp/scan-build-2018-10-27-211344-32123-1 -x c++ /build/llvm-toolchain-snapshot-8~svn345461/lib/Transforms/IPO/WholeProgramDevirt.cpp -faddrsig
1//===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass implements whole program optimization of virtual calls in cases
11// where we know (via !type metadata) that the list of callees is fixed. This
12// includes the following:
13// - Single implementation devirtualization: if a virtual call has a single
14// possible callee, replace all calls with a direct call to that callee.
15// - Virtual constant propagation: if the virtual function's return type is an
16// integer <=64 bits and all possible callees are readnone, for each class and
17// each list of constant arguments: evaluate the function, store the return
18// value alongside the virtual table, and rewrite each virtual call as a load
19// from the virtual table.
20// - Uniform return value optimization: if the conditions for virtual constant
21// propagation hold and each function returns the same constant value, replace
22// each virtual call with that constant.
23// - Unique return value optimization for i1 return values: if the conditions
24// for virtual constant propagation hold and a single vtable's function
25// returns 0, or a single vtable's function returns 1, replace each virtual
26// call with a comparison of the vptr against that vtable's address.
27//
28// This pass is intended to be used during the regular and thin LTO pipelines.
29// During regular LTO, the pass determines the best optimization for each
30// virtual call and applies the resolutions directly to virtual calls that are
31// eligible for virtual call optimization (i.e. calls that use either of the
32// llvm.assume(llvm.type.test) or llvm.type.checked.load intrinsics). During
33// ThinLTO, the pass operates in two phases:
34// - Export phase: this is run during the thin link over a single merged module
35// that contains all vtables with !type metadata that participate in the link.
36// The pass computes a resolution for each virtual call and stores it in the
37// type identifier summary.
38// - Import phase: this is run during the thin backends over the individual
39// modules. The pass applies the resolutions previously computed during the
40// import phase to each eligible virtual call.
41//
42//===----------------------------------------------------------------------===//
43
44#include "llvm/Transforms/IPO/WholeProgramDevirt.h"
45#include "llvm/ADT/ArrayRef.h"
46#include "llvm/ADT/DenseMap.h"
47#include "llvm/ADT/DenseMapInfo.h"
48#include "llvm/ADT/DenseSet.h"
49#include "llvm/ADT/MapVector.h"
50#include "llvm/ADT/SmallVector.h"
51#include "llvm/ADT/iterator_range.h"
52#include "llvm/Analysis/AliasAnalysis.h"
53#include "llvm/Analysis/BasicAliasAnalysis.h"
54#include "llvm/Analysis/OptimizationRemarkEmitter.h"
55#include "llvm/Analysis/TypeMetadataUtils.h"
56#include "llvm/IR/CallSite.h"
57#include "llvm/IR/Constants.h"
58#include "llvm/IR/DataLayout.h"
59#include "llvm/IR/DebugLoc.h"
60#include "llvm/IR/DerivedTypes.h"
61#include "llvm/IR/Dominators.h"
62#include "llvm/IR/Function.h"
63#include "llvm/IR/GlobalAlias.h"
64#include "llvm/IR/GlobalVariable.h"
65#include "llvm/IR/IRBuilder.h"
66#include "llvm/IR/InstrTypes.h"
67#include "llvm/IR/Instruction.h"
68#include "llvm/IR/Instructions.h"
69#include "llvm/IR/Intrinsics.h"
70#include "llvm/IR/LLVMContext.h"
71#include "llvm/IR/Metadata.h"
72#include "llvm/IR/Module.h"
73#include "llvm/IR/ModuleSummaryIndexYAML.h"
74#include "llvm/Pass.h"
75#include "llvm/PassRegistry.h"
76#include "llvm/PassSupport.h"
77#include "llvm/Support/Casting.h"
78#include "llvm/Support/Error.h"
79#include "llvm/Support/FileSystem.h"
80#include "llvm/Support/MathExtras.h"
81#include "llvm/Transforms/IPO.h"
82#include "llvm/Transforms/IPO/FunctionAttrs.h"
83#include "llvm/Transforms/Utils/Evaluator.h"
84#include <algorithm>
85#include <cstddef>
86#include <map>
87#include <set>
88#include <string>
89
90using namespace llvm;
91using namespace wholeprogramdevirt;
92
93#define DEBUG_TYPE"wholeprogramdevirt" "wholeprogramdevirt"
94
95static cl::opt<PassSummaryAction> ClSummaryAction(
96 "wholeprogramdevirt-summary-action",
97 cl::desc("What to do with the summary when running this pass"),
98 cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing")llvm::cl::OptionEnumValue { "none", int(PassSummaryAction::None
), "Do nothing" }
,
99 clEnumValN(PassSummaryAction::Import, "import",llvm::cl::OptionEnumValue { "import", int(PassSummaryAction::
Import), "Import typeid resolutions from summary and globals"
}
100 "Import typeid resolutions from summary and globals")llvm::cl::OptionEnumValue { "import", int(PassSummaryAction::
Import), "Import typeid resolutions from summary and globals"
}
,
101 clEnumValN(PassSummaryAction::Export, "export",llvm::cl::OptionEnumValue { "export", int(PassSummaryAction::
Export), "Export typeid resolutions to summary and globals" }
102 "Export typeid resolutions to summary and globals")llvm::cl::OptionEnumValue { "export", int(PassSummaryAction::
Export), "Export typeid resolutions to summary and globals" }
),
103 cl::Hidden);
104
105static cl::opt<std::string> ClReadSummary(
106 "wholeprogramdevirt-read-summary",
107 cl::desc("Read summary from given YAML file before running pass"),
108 cl::Hidden);
109
110static cl::opt<std::string> ClWriteSummary(
111 "wholeprogramdevirt-write-summary",
112 cl::desc("Write summary to given YAML file after running pass"),
113 cl::Hidden);
114
115static cl::opt<unsigned>
116 ClThreshold("wholeprogramdevirt-branch-funnel-threshold", cl::Hidden,
117 cl::init(10), cl::ZeroOrMore,
118 cl::desc("Maximum number of call targets per "
119 "call site to enable branch funnels"));
120
121// Find the minimum offset that we may store a value of size Size bits at. If
122// IsAfter is set, look for an offset before the object, otherwise look for an
123// offset after the object.
124uint64_t
125wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets,
126 bool IsAfter, uint64_t Size) {
127 // Find a minimum offset taking into account only vtable sizes.
128 uint64_t MinByte = 0;
129 for (const VirtualCallTarget &Target : Targets) {
130 if (IsAfter)
131 MinByte = std::max(MinByte, Target.minAfterBytes());
132 else
133 MinByte = std::max(MinByte, Target.minBeforeBytes());
134 }
135
136 // Build a vector of arrays of bytes covering, for each target, a slice of the
137 // used region (see AccumBitVector::BytesUsed in
138 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
139 // this aligns the used regions to start at MinByte.
140 //
141 // In this example, A, B and C are vtables, # is a byte already allocated for
142 // a virtual function pointer, AAAA... (etc.) are the used regions for the
143 // vtables and Offset(X) is the value computed for the Offset variable below
144 // for X.
145 //
146 // Offset(A)
147 // | |
148 // |MinByte
149 // A: ################AAAAAAAA|AAAAAAAA
150 // B: ########BBBBBBBBBBBBBBBB|BBBB
151 // C: ########################|CCCCCCCCCCCCCCCC
152 // | Offset(B) |
153 //
154 // This code produces the slices of A, B and C that appear after the divider
155 // at MinByte.
156 std::vector<ArrayRef<uint8_t>> Used;
157 for (const VirtualCallTarget &Target : Targets) {
158 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
159 : Target.TM->Bits->Before.BytesUsed;
160 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
161 : MinByte - Target.minBeforeBytes();
162
163 // Disregard used regions that are smaller than Offset. These are
164 // effectively all-free regions that do not need to be checked.
165 if (VTUsed.size() > Offset)
166 Used.push_back(VTUsed.slice(Offset));
167 }
168
169 if (Size == 1) {
170 // Find a free bit in each member of Used.
171 for (unsigned I = 0;; ++I) {
172 uint8_t BitsUsed = 0;
173 for (auto &&B : Used)
174 if (I < B.size())
175 BitsUsed |= B[I];
176 if (BitsUsed != 0xff)
177 return (MinByte + I) * 8 +
178 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
179 }
180 } else {
181 // Find a free (Size/8) byte region in each member of Used.
182 // FIXME: see if alignment helps.
183 for (unsigned I = 0;; ++I) {
184 for (auto &&B : Used) {
185 unsigned Byte = 0;
186 while ((I + Byte) < B.size() && Byte < (Size / 8)) {
187 if (B[I + Byte])
188 goto NextI;
189 ++Byte;
190 }
191 }
192 return (MinByte + I) * 8;
193 NextI:;
194 }
195 }
196}
197
198void wholeprogramdevirt::setBeforeReturnValues(
199 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
200 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
201 if (BitWidth == 1)
202 OffsetByte = -(AllocBefore / 8 + 1);
203 else
204 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
205 OffsetBit = AllocBefore % 8;
206
207 for (VirtualCallTarget &Target : Targets) {
208 if (BitWidth == 1)
209 Target.setBeforeBit(AllocBefore);
210 else
211 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
212 }
213}
214
215void wholeprogramdevirt::setAfterReturnValues(
216 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
217 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
218 if (BitWidth == 1)
219 OffsetByte = AllocAfter / 8;
220 else
221 OffsetByte = (AllocAfter + 7) / 8;
222 OffsetBit = AllocAfter % 8;
223
224 for (VirtualCallTarget &Target : Targets) {
225 if (BitWidth == 1)
226 Target.setAfterBit(AllocAfter);
227 else
228 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
229 }
230}
231
232VirtualCallTarget::VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
233 : Fn(Fn), TM(TM),
234 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()), WasDevirt(false) {}
235
236namespace {
237
238// A slot in a set of virtual tables. The TypeID identifies the set of virtual
239// tables, and the ByteOffset is the offset in bytes from the address point to
240// the virtual function pointer.
241struct VTableSlot {
242 Metadata *TypeID;
243 uint64_t ByteOffset;
244};
245
246} // end anonymous namespace
247
248namespace llvm {
249
250template <> struct DenseMapInfo<VTableSlot> {
251 static VTableSlot getEmptyKey() {
252 return {DenseMapInfo<Metadata *>::getEmptyKey(),
253 DenseMapInfo<uint64_t>::getEmptyKey()};
254 }
255 static VTableSlot getTombstoneKey() {
256 return {DenseMapInfo<Metadata *>::getTombstoneKey(),
257 DenseMapInfo<uint64_t>::getTombstoneKey()};
258 }
259 static unsigned getHashValue(const VTableSlot &I) {
260 return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
261 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset);
262 }
263 static bool isEqual(const VTableSlot &LHS,
264 const VTableSlot &RHS) {
265 return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
266 }
267};
268
269} // end namespace llvm
270
271namespace {
272
273// A virtual call site. VTable is the loaded virtual table pointer, and CS is
274// the indirect virtual call.
275struct VirtualCallSite {
276 Value *VTable;
277 CallSite CS;
278
279 // If non-null, this field points to the associated unsafe use count stored in
280 // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
281 // of that field for details.
282 unsigned *NumUnsafeUses;
283
284 void
285 emitRemark(const StringRef OptName, const StringRef TargetName,
286 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter) {
287 Function *F = CS.getCaller();
288 DebugLoc DLoc = CS->getDebugLoc();
289 BasicBlock *Block = CS.getParent();
290
291 using namespace ore;
292 OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE"wholeprogramdevirt", OptName, DLoc, Block)
293 << NV("Optimization", OptName)
294 << ": devirtualized a call to "
295 << NV("FunctionName", TargetName));
296 }
297
298 void replaceAndErase(
299 const StringRef OptName, const StringRef TargetName, bool RemarksEnabled,
300 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
301 Value *New) {
302 if (RemarksEnabled)
303 emitRemark(OptName, TargetName, OREGetter);
304 CS->replaceAllUsesWith(New);
305 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
306 BranchInst::Create(II->getNormalDest(), CS.getInstruction());
307 II->getUnwindDest()->removePredecessor(II->getParent());
308 }
309 CS->eraseFromParent();
310 // This use is no longer unsafe.
311 if (NumUnsafeUses)
312 --*NumUnsafeUses;
313 }
314};
315
316// Call site information collected for a specific VTableSlot and possibly a list
317// of constant integer arguments. The grouping by arguments is handled by the
318// VTableSlotInfo class.
319struct CallSiteInfo {
320 /// The set of call sites for this slot. Used during regular LTO and the
321 /// import phase of ThinLTO (as well as the export phase of ThinLTO for any
322 /// call sites that appear in the merged module itself); in each of these
323 /// cases we are directly operating on the call sites at the IR level.
324 std::vector<VirtualCallSite> CallSites;
325
326 /// Whether all call sites represented by this CallSiteInfo, including those
327 /// in summaries, have been devirtualized. This starts off as true because a
328 /// default constructed CallSiteInfo represents no call sites.
329 bool AllCallSitesDevirted = true;
330
331 // These fields are used during the export phase of ThinLTO and reflect
332 // information collected from function summaries.
333
334 /// Whether any function summary contains an llvm.assume(llvm.type.test) for
335 /// this slot.
336 bool SummaryHasTypeTestAssumeUsers = false;
337
338 /// CFI-specific: a vector containing the list of function summaries that use
339 /// the llvm.type.checked.load intrinsic and therefore will require
340 /// resolutions for llvm.type.test in order to implement CFI checks if
341 /// devirtualization was unsuccessful. If devirtualization was successful, the
342 /// pass will clear this vector by calling markDevirt(). If at the end of the
343 /// pass the vector is non-empty, we will need to add a use of llvm.type.test
344 /// to each of the function summaries in the vector.
345 std::vector<FunctionSummary *> SummaryTypeCheckedLoadUsers;
346
347 bool isExported() const {
348 return SummaryHasTypeTestAssumeUsers ||
349 !SummaryTypeCheckedLoadUsers.empty();
350 }
351
352 void markSummaryHasTypeTestAssumeUsers() {
353 SummaryHasTypeTestAssumeUsers = true;
354 AllCallSitesDevirted = false;
355 }
356
357 void addSummaryTypeCheckedLoadUser(FunctionSummary *FS) {
358 SummaryTypeCheckedLoadUsers.push_back(FS);
359 AllCallSitesDevirted = false;
360 }
361
362 void markDevirt() {
363 AllCallSitesDevirted = true;
364
365 // As explained in the comment for SummaryTypeCheckedLoadUsers.
366 SummaryTypeCheckedLoadUsers.clear();
367 }
368};
369
370// Call site information collected for a specific VTableSlot.
371struct VTableSlotInfo {
372 // The set of call sites which do not have all constant integer arguments
373 // (excluding "this").
374 CallSiteInfo CSInfo;
375
376 // The set of call sites with all constant integer arguments (excluding
377 // "this"), grouped by argument list.
378 std::map<std::vector<uint64_t>, CallSiteInfo> ConstCSInfo;
379
380 void addCallSite(Value *VTable, CallSite CS, unsigned *NumUnsafeUses);
381
382private:
383 CallSiteInfo &findCallSiteInfo(CallSite CS);
384};
385
386CallSiteInfo &VTableSlotInfo::findCallSiteInfo(CallSite CS) {
387 std::vector<uint64_t> Args;
388 auto *CI = dyn_cast<IntegerType>(CS.getType());
389 if (!CI || CI->getBitWidth() > 64 || CS.arg_empty())
390 return CSInfo;
391 for (auto &&Arg : make_range(CS.arg_begin() + 1, CS.arg_end())) {
392 auto *CI = dyn_cast<ConstantInt>(Arg);
393 if (!CI || CI->getBitWidth() > 64)
394 return CSInfo;
395 Args.push_back(CI->getZExtValue());
396 }
397 return ConstCSInfo[Args];
398}
399
400void VTableSlotInfo::addCallSite(Value *VTable, CallSite CS,
401 unsigned *NumUnsafeUses) {
402 auto &CSI = findCallSiteInfo(CS);
403 CSI.AllCallSitesDevirted = false;
404 CSI.CallSites.push_back({VTable, CS, NumUnsafeUses});
405}
406
407struct DevirtModule {
408 Module &M;
409 function_ref<AAResults &(Function &)> AARGetter;
410 function_ref<DominatorTree &(Function &)> LookupDomTree;
411
412 ModuleSummaryIndex *ExportSummary;
413 const ModuleSummaryIndex *ImportSummary;
414
415 IntegerType *Int8Ty;
416 PointerType *Int8PtrTy;
417 IntegerType *Int32Ty;
418 IntegerType *Int64Ty;
419 IntegerType *IntPtrTy;
420
421 bool RemarksEnabled;
422 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter;
423
424 MapVector<VTableSlot, VTableSlotInfo> CallSlots;
425
426 // This map keeps track of the number of "unsafe" uses of a loaded function
427 // pointer. The key is the associated llvm.type.test intrinsic call generated
428 // by this pass. An unsafe use is one that calls the loaded function pointer
429 // directly. Every time we eliminate an unsafe use (for example, by
430 // devirtualizing it or by applying virtual constant propagation), we
431 // decrement the value stored in this map. If a value reaches zero, we can
432 // eliminate the type check by RAUWing the associated llvm.type.test call with
433 // true.
434 std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
435
436 DevirtModule(Module &M, function_ref<AAResults &(Function &)> AARGetter,
437 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
438 function_ref<DominatorTree &(Function &)> LookupDomTree,
439 ModuleSummaryIndex *ExportSummary,
440 const ModuleSummaryIndex *ImportSummary)
441 : M(M), AARGetter(AARGetter), LookupDomTree(LookupDomTree),
442 ExportSummary(ExportSummary), ImportSummary(ImportSummary),
443 Int8Ty(Type::getInt8Ty(M.getContext())),
444 Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
445 Int32Ty(Type::getInt32Ty(M.getContext())),
446 Int64Ty(Type::getInt64Ty(M.getContext())),
447 IntPtrTy(M.getDataLayout().getIntPtrType(M.getContext(), 0)),
448 RemarksEnabled(areRemarksEnabled()), OREGetter(OREGetter) {
449 assert(!(ExportSummary && ImportSummary))((!(ExportSummary && ImportSummary)) ? static_cast<
void> (0) : __assert_fail ("!(ExportSummary && ImportSummary)"
, "/build/llvm-toolchain-snapshot-8~svn345461/lib/Transforms/IPO/WholeProgramDevirt.cpp"
, 449, __PRETTY_FUNCTION__))
;
450 }
451
452 bool areRemarksEnabled();
453
454 void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
455 void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
456
457 void buildTypeIdentifierMap(
458 std::vector<VTableBits> &Bits,
459 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
460 Constant *getPointerAtOffset(Constant *I, uint64_t Offset);
461 bool
462 tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
463 const std::set<TypeMemberInfo> &TypeMemberInfos,
464 uint64_t ByteOffset);
465
466 void applySingleImplDevirt(VTableSlotInfo &SlotInfo, Constant *TheFn,
467 bool &IsExported);
468 bool trySingleImplDevirt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
469 VTableSlotInfo &SlotInfo,
470 WholeProgramDevirtResolution *Res);
471
472 void applyICallBranchFunnel(VTableSlotInfo &SlotInfo, Constant *JT,
473 bool &IsExported);
474 void tryICallBranchFunnel(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
475 VTableSlotInfo &SlotInfo,
476 WholeProgramDevirtResolution *Res, VTableSlot Slot);
477
478 bool tryEvaluateFunctionsWithArgs(
479 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
480 ArrayRef<uint64_t> Args);
481
482 void applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
483 uint64_t TheRetVal);
484 bool tryUniformRetValOpt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
485 CallSiteInfo &CSInfo,
486 WholeProgramDevirtResolution::ByArg *Res);
487
488 // Returns the global symbol name that is used to export information about the
489 // given vtable slot and list of arguments.
490 std::string getGlobalName(VTableSlot Slot, ArrayRef<uint64_t> Args,
491 StringRef Name);
492
493 bool shouldExportConstantsAsAbsoluteSymbols();
494
495 // This function is called during the export phase to create a symbol
496 // definition containing information about the given vtable slot and list of
497 // arguments.
498 void exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
499 Constant *C);
500 void exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
501 uint32_t Const, uint32_t &Storage);
502
503 // This function is called during the import phase to create a reference to
504 // the symbol definition created during the export phase.
505 Constant *importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
506 StringRef Name);
507 Constant *importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
508 StringRef Name, IntegerType *IntTy,
509 uint32_t Storage);
510
511 Constant *getMemberAddr(const TypeMemberInfo *M);
512
513 void applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, bool IsOne,
514 Constant *UniqueMemberAddr);
515 bool tryUniqueRetValOpt(unsigned BitWidth,
516 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
517 CallSiteInfo &CSInfo,
518 WholeProgramDevirtResolution::ByArg *Res,
519 VTableSlot Slot, ArrayRef<uint64_t> Args);
520
521 void applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
522 Constant *Byte, Constant *Bit);
523 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
524 VTableSlotInfo &SlotInfo,
525 WholeProgramDevirtResolution *Res, VTableSlot Slot);
526
527 void rebuildGlobal(VTableBits &B);
528
529 // Apply the summary resolution for Slot to all virtual calls in SlotInfo.
530 void importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo);
531
532 // If we were able to eliminate all unsafe uses for a type checked load,
533 // eliminate the associated type tests by replacing them with true.
534 void removeRedundantTypeTests();
535
536 bool run();
537
538 // Lower the module using the action and summary passed as command line
539 // arguments. For testing purposes only.
540 static bool
541 runForTesting(Module &M, function_ref<AAResults &(Function &)> AARGetter,
542 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
543 function_ref<DominatorTree &(Function &)> LookupDomTree);
544};
545
546struct WholeProgramDevirt : public ModulePass {
547 static char ID;
548
549 bool UseCommandLine = false;
550
551 ModuleSummaryIndex *ExportSummary;
552 const ModuleSummaryIndex *ImportSummary;
553
554 WholeProgramDevirt() : ModulePass(ID), UseCommandLine(true) {
555 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
556 }
557
558 WholeProgramDevirt(ModuleSummaryIndex *ExportSummary,
559 const ModuleSummaryIndex *ImportSummary)
560 : ModulePass(ID), ExportSummary(ExportSummary),
561 ImportSummary(ImportSummary) {
562 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
563 }
564
565 bool runOnModule(Module &M) override {
566 if (skipModule(M))
567 return false;
568
569 // In the new pass manager, we can request the optimization
570 // remark emitter pass on a per-function-basis, which the
571 // OREGetter will do for us.
572 // In the old pass manager, this is harder, so we just build
573 // an optimization remark emitter on the fly, when we need it.
574 std::unique_ptr<OptimizationRemarkEmitter> ORE;
575 auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & {
576 ORE = make_unique<OptimizationRemarkEmitter>(F);
577 return *ORE;
578 };
579
580 auto LookupDomTree = [this](Function &F) -> DominatorTree & {
581 return this->getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
582 };
583
584 if (UseCommandLine)
585 return DevirtModule::runForTesting(M, LegacyAARGetter(*this), OREGetter,
586 LookupDomTree);
587
588 return DevirtModule(M, LegacyAARGetter(*this), OREGetter, LookupDomTree,
589 ExportSummary, ImportSummary)
590 .run();
591 }
592
593 void getAnalysisUsage(AnalysisUsage &AU) const override {
594 AU.addRequired<AssumptionCacheTracker>();
595 AU.addRequired<TargetLibraryInfoWrapperPass>();
596 AU.addRequired<DominatorTreeWrapperPass>();
597 }
598};
599
600} // end anonymous namespace
601
602INITIALIZE_PASS_BEGIN(WholeProgramDevirt, "wholeprogramdevirt",static void *initializeWholeProgramDevirtPassOnce(PassRegistry
&Registry) {
603 "Whole program devirtualization", false, false)static void *initializeWholeProgramDevirtPassOnce(PassRegistry
&Registry) {
604INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)initializeAssumptionCacheTrackerPass(Registry);
605INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)initializeTargetLibraryInfoWrapperPassPass(Registry);
606INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)initializeDominatorTreeWrapperPassPass(Registry);
607INITIALIZE_PASS_END(WholeProgramDevirt, "wholeprogramdevirt",PassInfo *PI = new PassInfo( "Whole program devirtualization"
, "wholeprogramdevirt", &WholeProgramDevirt::ID, PassInfo
::NormalCtor_t(callDefaultCtor<WholeProgramDevirt>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeWholeProgramDevirtPassFlag; void llvm
::initializeWholeProgramDevirtPass(PassRegistry &Registry
) { llvm::call_once(InitializeWholeProgramDevirtPassFlag, initializeWholeProgramDevirtPassOnce
, std::ref(Registry)); }
608 "Whole program devirtualization", false, false)PassInfo *PI = new PassInfo( "Whole program devirtualization"
, "wholeprogramdevirt", &WholeProgramDevirt::ID, PassInfo
::NormalCtor_t(callDefaultCtor<WholeProgramDevirt>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeWholeProgramDevirtPassFlag; void llvm
::initializeWholeProgramDevirtPass(PassRegistry &Registry
) { llvm::call_once(InitializeWholeProgramDevirtPassFlag, initializeWholeProgramDevirtPassOnce
, std::ref(Registry)); }
609char WholeProgramDevirt::ID = 0;
610
611ModulePass *
612llvm::createWholeProgramDevirtPass(ModuleSummaryIndex *ExportSummary,
613 const ModuleSummaryIndex *ImportSummary) {
614 return new WholeProgramDevirt(ExportSummary, ImportSummary);
615}
616
617PreservedAnalyses WholeProgramDevirtPass::run(Module &M,
618 ModuleAnalysisManager &AM) {
619 auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
620 auto AARGetter = [&](Function &F) -> AAResults & {
621 return FAM.getResult<AAManager>(F);
622 };
623 auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & {
624 return FAM.getResult<OptimizationRemarkEmitterAnalysis>(*F);
625 };
626 auto LookupDomTree = [&FAM](Function &F) -> DominatorTree & {
627 return FAM.getResult<DominatorTreeAnalysis>(F);
628 };
629 if (!DevirtModule(M, AARGetter, OREGetter, LookupDomTree, ExportSummary,
630 ImportSummary)
631 .run())
632 return PreservedAnalyses::all();
633 return PreservedAnalyses::none();
634}
635
636bool DevirtModule::runForTesting(
637 Module &M, function_ref<AAResults &(Function &)> AARGetter,
638 function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
639 function_ref<DominatorTree &(Function &)> LookupDomTree) {
640 ModuleSummaryIndex Summary(/*HaveGVs=*/false);
641
642 // Handle the command-line summary arguments. This code is for testing
643 // purposes only, so we handle errors directly.
644 if (!ClReadSummary.empty()) {
645 ExitOnError ExitOnErr("-wholeprogramdevirt-read-summary: " + ClReadSummary +
646 ": ");
647 auto ReadSummaryFile =
648 ExitOnErr(errorOrToExpected(MemoryBuffer::getFile(ClReadSummary)));
649
650 yaml::Input In(ReadSummaryFile->getBuffer());
651 In >> Summary;
652 ExitOnErr(errorCodeToError(In.error()));
653 }
654
655 bool Changed =
656 DevirtModule(
657 M, AARGetter, OREGetter, LookupDomTree,
658 ClSummaryAction == PassSummaryAction::Export ? &Summary : nullptr,
659 ClSummaryAction == PassSummaryAction::Import ? &Summary : nullptr)
660 .run();
661
662 if (!ClWriteSummary.empty()) {
663 ExitOnError ExitOnErr(
664 "-wholeprogramdevirt-write-summary: " + ClWriteSummary + ": ");
665 std::error_code EC;
666 raw_fd_ostream OS(ClWriteSummary, EC, sys::fs::F_Text);
667 ExitOnErr(errorCodeToError(EC));
668
669 yaml::Output Out(OS);
670 Out << Summary;
671 }
672
673 return Changed;
674}
675
676void DevirtModule::buildTypeIdentifierMap(
677 std::vector<VTableBits> &Bits,
678 DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
679 DenseMap<GlobalVariable *, VTableBits *> GVToBits;
680 Bits.reserve(M.getGlobalList().size());
681 SmallVector<MDNode *, 2> Types;
682 for (GlobalVariable &GV : M.globals()) {
683 Types.clear();
684 GV.getMetadata(LLVMContext::MD_type, Types);
685 if (GV.isDeclaration() || Types.empty())
686 continue;
687
688 VTableBits *&BitsPtr = GVToBits[&GV];
689 if (!BitsPtr) {
690 Bits.emplace_back();
691 Bits.back().GV = &GV;
692 Bits.back().ObjectSize =
693 M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
694 BitsPtr = &Bits.back();
695 }
696
697 for (MDNode *Type : Types) {
698 auto TypeID = Type->getOperand(1).get();
699
700 uint64_t Offset =
701 cast<ConstantInt>(
702 cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
703 ->getZExtValue();
704
705 TypeIdMap[TypeID].insert({BitsPtr, Offset});
706 }
707 }
708}
709
710Constant *DevirtModule::getPointerAtOffset(Constant *I, uint64_t Offset) {
711 if (I->getType()->isPointerTy()) {
712 if (Offset == 0)
713 return I;
714 return nullptr;
715 }
716
717 const DataLayout &DL = M.getDataLayout();
718
719 if (auto *C = dyn_cast<ConstantStruct>(I)) {
720 const StructLayout *SL = DL.getStructLayout(C->getType());
721 if (Offset >= SL->getSizeInBytes())
722 return nullptr;
723
724 unsigned Op = SL->getElementContainingOffset(Offset);
725 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
726 Offset - SL->getElementOffset(Op));
727 }
728 if (auto *C = dyn_cast<ConstantArray>(I)) {
729 ArrayType *VTableTy = C->getType();
730 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
731
732 unsigned Op = Offset / ElemSize;
733 if (Op >= C->getNumOperands())
734 return nullptr;
735
736 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
737 Offset % ElemSize);
738 }
739 return nullptr;
740}
741
742bool DevirtModule::tryFindVirtualCallTargets(
743 std::vector<VirtualCallTarget> &TargetsForSlot,
744 const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
745 for (const TypeMemberInfo &TM : TypeMemberInfos) {
746 if (!TM.Bits->GV->isConstant())
747 return false;
748
749 Constant *Ptr = getPointerAtOffset(TM.Bits->GV->getInitializer(),
750 TM.Offset + ByteOffset);
751 if (!Ptr)
752 return false;
753
754 auto Fn = dyn_cast<Function>(Ptr->stripPointerCasts());
755 if (!Fn)
756 return false;
757
758 // We can disregard __cxa_pure_virtual as a possible call target, as
759 // calls to pure virtuals are UB.
760 if (Fn->getName() == "__cxa_pure_virtual")
761 continue;
762
763 TargetsForSlot.push_back({Fn, &TM});
764 }
765
766 // Give up if we couldn't find any targets.
767 return !TargetsForSlot.empty();
768}
769
770void DevirtModule::applySingleImplDevirt(VTableSlotInfo &SlotInfo,
771 Constant *TheFn, bool &IsExported) {
772 auto Apply = [&](CallSiteInfo &CSInfo) {
773 for (auto &&VCallSite : CSInfo.CallSites) {
774 if (RemarksEnabled)
775 VCallSite.emitRemark("single-impl",
776 TheFn->stripPointerCasts()->getName(), OREGetter);
777 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
778 TheFn, VCallSite.CS.getCalledValue()->getType()));
779 // This use is no longer unsafe.
780 if (VCallSite.NumUnsafeUses)
781 --*VCallSite.NumUnsafeUses;
782 }
783 if (CSInfo.isExported())
784 IsExported = true;
785 CSInfo.markDevirt();
786 };
787 Apply(SlotInfo.CSInfo);
788 for (auto &P : SlotInfo.ConstCSInfo)
789 Apply(P.second);
790}
791
792bool DevirtModule::trySingleImplDevirt(
793 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
794 VTableSlotInfo &SlotInfo, WholeProgramDevirtResolution *Res) {
795 // See if the program contains a single implementation of this virtual
796 // function.
797 Function *TheFn = TargetsForSlot[0].Fn;
798 for (auto &&Target : TargetsForSlot)
799 if (TheFn != Target.Fn)
800 return false;
801
802 // If so, update each call site to call that implementation directly.
803 if (RemarksEnabled)
804 TargetsForSlot[0].WasDevirt = true;
805
806 bool IsExported = false;
807 applySingleImplDevirt(SlotInfo, TheFn, IsExported);
808 if (!IsExported)
809 return false;
810
811 // If the only implementation has local linkage, we must promote to external
812 // to make it visible to thin LTO objects. We can only get here during the
813 // ThinLTO export phase.
814 if (TheFn->hasLocalLinkage()) {
815 std::string NewName = (TheFn->getName() + "$merged").str();
816
817 // Since we are renaming the function, any comdats with the same name must
818 // also be renamed. This is required when targeting COFF, as the comdat name
819 // must match one of the names of the symbols in the comdat.
820 if (Comdat *C = TheFn->getComdat()) {
821 if (C->getName() == TheFn->getName()) {
822 Comdat *NewC = M.getOrInsertComdat(NewName);
823 NewC->setSelectionKind(C->getSelectionKind());
824 for (GlobalObject &GO : M.global_objects())
825 if (GO.getComdat() == C)
826 GO.setComdat(NewC);
827 }
828 }
829
830 TheFn->setLinkage(GlobalValue::ExternalLinkage);
831 TheFn->setVisibility(GlobalValue::HiddenVisibility);
832 TheFn->setName(NewName);
833 }
834
835 Res->TheKind = WholeProgramDevirtResolution::SingleImpl;
836 Res->SingleImplName = TheFn->getName();
837
838 return true;
839}
840
841void DevirtModule::tryICallBranchFunnel(
842 MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo,
843 WholeProgramDevirtResolution *Res, VTableSlot Slot) {
844 Triple T(M.getTargetTriple());
845 if (T.getArch() != Triple::x86_64)
846 return;
847
848 if (TargetsForSlot.size() > ClThreshold)
849 return;
850
851 bool HasNonDevirt = !SlotInfo.CSInfo.AllCallSitesDevirted;
852 if (!HasNonDevirt)
853 for (auto &P : SlotInfo.ConstCSInfo)
854 if (!P.second.AllCallSitesDevirted) {
855 HasNonDevirt = true;
856 break;
857 }
858
859 if (!HasNonDevirt)
860 return;
861
862 FunctionType *FT =
863 FunctionType::get(Type::getVoidTy(M.getContext()), {Int8PtrTy}, true);
864 Function *JT;
865 if (isa<MDString>(Slot.TypeID)) {
866 JT = Function::Create(FT, Function::ExternalLinkage,
867 getGlobalName(Slot, {}, "branch_funnel"), &M);
868 JT->setVisibility(GlobalValue::HiddenVisibility);
869 } else {
870 JT = Function::Create(FT, Function::InternalLinkage, "branch_funnel", &M);
871 }
872 JT->addAttribute(1, Attribute::Nest);
873
874 std::vector<Value *> JTArgs;
875 JTArgs.push_back(JT->arg_begin());
876 for (auto &T : TargetsForSlot) {
877 JTArgs.push_back(getMemberAddr(T.TM));
878 JTArgs.push_back(T.Fn);
879 }
880
881 BasicBlock *BB = BasicBlock::Create(M.getContext(), "", JT, nullptr);
882 Constant *Intr =
883 Intrinsic::getDeclaration(&M, llvm::Intrinsic::icall_branch_funnel, {});
884
885 auto *CI = CallInst::Create(Intr, JTArgs, "", BB);
886 CI->setTailCallKind(CallInst::TCK_MustTail);
887 ReturnInst::Create(M.getContext(), nullptr, BB);
888
889 bool IsExported = false;
890 applyICallBranchFunnel(SlotInfo, JT, IsExported);
891 if (IsExported)
892 Res->TheKind = WholeProgramDevirtResolution::BranchFunnel;
893}
894
895void DevirtModule::applyICallBranchFunnel(VTableSlotInfo &SlotInfo,
896 Constant *JT, bool &IsExported) {
897 auto Apply = [&](CallSiteInfo &CSInfo) {
898 if (CSInfo.isExported())
899 IsExported = true;
900 if (CSInfo.AllCallSitesDevirted)
901 return;
902 for (auto &&VCallSite : CSInfo.CallSites) {
903 CallSite CS = VCallSite.CS;
904
905 // Jump tables are only profitable if the retpoline mitigation is enabled.
906 Attribute FSAttr = CS.getCaller()->getFnAttribute("target-features");
907 if (FSAttr.hasAttribute(Attribute::None) ||
908 !FSAttr.getValueAsString().contains("+retpoline"))
909 continue;
910
911 if (RemarksEnabled)
912 VCallSite.emitRemark("branch-funnel",
913 JT->stripPointerCasts()->getName(), OREGetter);
914
915 // Pass the address of the vtable in the nest register, which is r10 on
916 // x86_64.
917 std::vector<Type *> NewArgs;
918 NewArgs.push_back(Int8PtrTy);
919 for (Type *T : CS.getFunctionType()->params())
920 NewArgs.push_back(T);
921 PointerType *NewFT = PointerType::getUnqual(
922 FunctionType::get(CS.getFunctionType()->getReturnType(), NewArgs,
923 CS.getFunctionType()->isVarArg()));
924
925 IRBuilder<> IRB(CS.getInstruction());
926 std::vector<Value *> Args;
927 Args.push_back(IRB.CreateBitCast(VCallSite.VTable, Int8PtrTy));
928 for (unsigned I = 0; I != CS.getNumArgOperands(); ++I)
929 Args.push_back(CS.getArgOperand(I));
930
931 CallSite NewCS;
932 if (CS.isCall())
933 NewCS = IRB.CreateCall(IRB.CreateBitCast(JT, NewFT), Args);
934 else
935 NewCS = IRB.CreateInvoke(
936 IRB.CreateBitCast(JT, NewFT),
937 cast<InvokeInst>(CS.getInstruction())->getNormalDest(),
938 cast<InvokeInst>(CS.getInstruction())->getUnwindDest(), Args);
939 NewCS.setCallingConv(CS.getCallingConv());
940
941 AttributeList Attrs = CS.getAttributes();
942 std::vector<AttributeSet> NewArgAttrs;
943 NewArgAttrs.push_back(AttributeSet::get(
944 M.getContext(), ArrayRef<Attribute>{Attribute::get(
945 M.getContext(), Attribute::Nest)}));
946 for (unsigned I = 0; I + 2 < Attrs.getNumAttrSets(); ++I)
947 NewArgAttrs.push_back(Attrs.getParamAttributes(I));
948 NewCS.setAttributes(
949 AttributeList::get(M.getContext(), Attrs.getFnAttributes(),
950 Attrs.getRetAttributes(), NewArgAttrs));
951
952 CS->replaceAllUsesWith(NewCS.getInstruction());
953 CS->eraseFromParent();
954
955 // This use is no longer unsafe.
956 if (VCallSite.NumUnsafeUses)
957 --*VCallSite.NumUnsafeUses;
958 }
959 // Don't mark as devirtualized because there may be callers compiled without
960 // retpoline mitigation, which would mean that they are lowered to
961 // llvm.type.test and therefore require an llvm.type.test resolution for the
962 // type identifier.
963 };
964 Apply(SlotInfo.CSInfo);
965 for (auto &P : SlotInfo.ConstCSInfo)
966 Apply(P.second);
967}
968
969bool DevirtModule::tryEvaluateFunctionsWithArgs(
970 MutableArrayRef<VirtualCallTarget> TargetsForSlot,
971 ArrayRef<uint64_t> Args) {
972 // Evaluate each function and store the result in each target's RetVal
973 // field.
974 for (VirtualCallTarget &Target : TargetsForSlot) {
975 if (Target.Fn->arg_size() != Args.size() + 1)
976 return false;
977
978 Evaluator Eval(M.getDataLayout(), nullptr);
979 SmallVector<Constant *, 2> EvalArgs;
980 EvalArgs.push_back(
981 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
982 for (unsigned I = 0; I != Args.size(); ++I) {
983 auto *ArgTy = dyn_cast<IntegerType>(
984 Target.Fn->getFunctionType()->getParamType(I + 1));
985 if (!ArgTy)
986 return false;
987 EvalArgs.push_back(ConstantInt::get(ArgTy, Args[I]));
988 }
989
990 Constant *RetVal;
991 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
992 !isa<ConstantInt>(RetVal))
993 return false;
994 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
995 }
996 return true;
997}
998
999void DevirtModule::applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
1000 uint64_t TheRetVal) {
1001 for (auto Call : CSInfo.CallSites)
1002 Call.replaceAndErase(
1003 "uniform-ret-val", FnName, RemarksEnabled, OREGetter,
1004 ConstantInt::get(cast<IntegerType>(Call.CS.getType()), TheRetVal));
1005 CSInfo.markDevirt();
1006}
1007
1008bool DevirtModule::tryUniformRetValOpt(
1009 MutableArrayRef<VirtualCallTarget> TargetsForSlot, CallSiteInfo &CSInfo,
1010 WholeProgramDevirtResolution::ByArg *Res) {
1011 // Uniform return value optimization. If all functions return the same
1012 // constant, replace all calls with that constant.
1013 uint64_t TheRetVal = TargetsForSlot[0].RetVal;
1014 for (const VirtualCallTarget &Target : TargetsForSlot)
1015 if (Target.RetVal != TheRetVal)
1016 return false;
1017
1018 if (CSInfo.isExported()) {
1019 Res->TheKind = WholeProgramDevirtResolution::ByArg::UniformRetVal;
1020 Res->Info = TheRetVal;
1021 }
1022
1023 applyUniformRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), TheRetVal);
1024 if (RemarksEnabled)
1025 for (auto &&Target : TargetsForSlot)
1026 Target.WasDevirt = true;
1027 return true;
1028}
1029
1030std::string DevirtModule::getGlobalName(VTableSlot Slot,
1031 ArrayRef<uint64_t> Args,
1032 StringRef Name) {
1033 std::string FullName = "__typeid_";
1034 raw_string_ostream OS(FullName);
1035 OS << cast<MDString>(Slot.TypeID)->getString() << '_' << Slot.ByteOffset;
1036 for (uint64_t Arg : Args)
1037 OS << '_' << Arg;
1038 OS << '_' << Name;
1039 return OS.str();
1040}
1041
1042bool DevirtModule::shouldExportConstantsAsAbsoluteSymbols() {
1043 Triple T(M.getTargetTriple());
1044 return (T.getArch() == Triple::x86 || T.getArch() == Triple::x86_64) &&
1045 T.getObjectFormat() == Triple::ELF;
1046}
1047
1048void DevirtModule::exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
1049 StringRef Name, Constant *C) {
1050 GlobalAlias *GA = GlobalAlias::create(Int8Ty, 0, GlobalValue::ExternalLinkage,
1051 getGlobalName(Slot, Args, Name), C, &M);
1052 GA->setVisibility(GlobalValue::HiddenVisibility);
1053}
1054
1055void DevirtModule::exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
1056 StringRef Name, uint32_t Const,
1057 uint32_t &Storage) {
1058 if (shouldExportConstantsAsAbsoluteSymbols()) {
1059 exportGlobal(
1060 Slot, Args, Name,
1061 ConstantExpr::getIntToPtr(ConstantInt::get(Int32Ty, Const), Int8PtrTy));
1062 return;
1063 }
1064
1065 Storage = Const;
1066}
1067
1068Constant *DevirtModule::importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
1069 StringRef Name) {
1070 Constant *C = M.getOrInsertGlobal(getGlobalName(Slot, Args, Name), Int8Ty);
1071 auto *GV = dyn_cast<GlobalVariable>(C);
1072 if (GV)
1073 GV->setVisibility(GlobalValue::HiddenVisibility);
1074 return C;
1075}
1076
1077Constant *DevirtModule::importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
1078 StringRef Name, IntegerType *IntTy,
1079 uint32_t Storage) {
1080 if (!shouldExportConstantsAsAbsoluteSymbols())
1081 return ConstantInt::get(IntTy, Storage);
1082
1083 Constant *C = importGlobal(Slot, Args, Name);
1084 auto *GV = cast<GlobalVariable>(C->stripPointerCasts());
1085 C = ConstantExpr::getPtrToInt(C, IntTy);
1086
1087 // We only need to set metadata if the global is newly created, in which
1088 // case it would not have hidden visibility.
1089 if (GV->hasMetadata(LLVMContext::MD_absolute_symbol))
1090 return C;
1091
1092 auto SetAbsRange = [&](uint64_t Min, uint64_t Max) {
1093 auto *MinC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Min));
1094 auto *MaxC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Max));
1095 GV->setMetadata(LLVMContext::MD_absolute_symbol,
1096 MDNode::get(M.getContext(), {MinC, MaxC}));
1097 };
1098 unsigned AbsWidth = IntTy->getBitWidth();
1099 if (AbsWidth == IntPtrTy->getBitWidth())
1100 SetAbsRange(~0ull, ~0ull); // Full set.
1101 else
1102 SetAbsRange(0, 1ull << AbsWidth);
1103 return C;
1104}
1105
1106void DevirtModule::applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
1107 bool IsOne,
1108 Constant *UniqueMemberAddr) {
1109 for (auto &&Call : CSInfo.CallSites) {
1110 IRBuilder<> B(Call.CS.getInstruction());
1111 Value *Cmp =
1112 B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
1113 B.CreateBitCast(Call.VTable, Int8PtrTy), UniqueMemberAddr);
1114 Cmp = B.CreateZExt(Cmp, Call.CS->getType());
1115 Call.replaceAndErase("unique-ret-val", FnName, RemarksEnabled, OREGetter,
1116 Cmp);
1117 }
1118 CSInfo.markDevirt();
1119}
1120
1121Constant *DevirtModule::getMemberAddr(const TypeMemberInfo *M) {
1122 Constant *C = ConstantExpr::getBitCast(M->Bits->GV, Int8PtrTy);
1123 return ConstantExpr::getGetElementPtr(Int8Ty, C,
1124 ConstantInt::get(Int64Ty, M->Offset));
1125}
1126
1127bool DevirtModule::tryUniqueRetValOpt(
1128 unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
1129 CallSiteInfo &CSInfo, WholeProgramDevirtResolution::ByArg *Res,
1130 VTableSlot Slot, ArrayRef<uint64_t> Args) {
1131 // IsOne controls whether we look for a 0 or a 1.
1132 auto tryUniqueRetValOptFor = [&](bool IsOne) {
1133 const TypeMemberInfo *UniqueMember = nullptr;
1134 for (const VirtualCallTarget &Target : TargetsForSlot) {
27
Assuming '__begin1' is not equal to '__end1'
1135 if (Target.RetVal == (IsOne ? 1 : 0)) {
28
'?' condition is true
29
Assuming the condition is true
30
Taking true branch
1136 if (UniqueMember)
31
Taking false branch
1137 return false;
1138 UniqueMember = Target.TM;
1139 }
1140 }
1141
1142 // We should have found a unique member or bailed out by now. We already
1143 // checked for a uniform return value in tryUniformRetValOpt.
1144 assert(UniqueMember)((UniqueMember) ? static_cast<void> (0) : __assert_fail
("UniqueMember", "/build/llvm-toolchain-snapshot-8~svn345461/lib/Transforms/IPO/WholeProgramDevirt.cpp"
, 1144, __PRETTY_FUNCTION__))
;
1145
1146 Constant *UniqueMemberAddr = getMemberAddr(UniqueMember);
1147 if (CSInfo.isExported()) {
32
Taking true branch
1148 Res->TheKind = WholeProgramDevirtResolution::ByArg::UniqueRetVal;
33
Access to field 'TheKind' results in a dereference of a null pointer (loaded from variable 'Res')
1149 Res->Info = IsOne;
1150
1151 exportGlobal(Slot, Args, "unique_member", UniqueMemberAddr);
1152 }
1153
1154 // Replace each call with the comparison.
1155 applyUniqueRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), IsOne,
1156 UniqueMemberAddr);
1157
1158 // Update devirtualization statistics for targets.
1159 if (RemarksEnabled)
1160 for (auto &&Target : TargetsForSlot)
1161 Target.WasDevirt = true;
1162
1163 return true;
1164 };
1165
1166 if (BitWidth == 1) {
24
Assuming 'BitWidth' is equal to 1
25
Taking true branch
1167 if (tryUniqueRetValOptFor(true))
26
Calling 'operator()'
1168 return true;
1169 if (tryUniqueRetValOptFor(false))
1170 return true;
1171 }
1172 return false;
1173}
1174
1175void DevirtModule::applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
1176 Constant *Byte, Constant *Bit) {
1177 for (auto Call : CSInfo.CallSites) {
1178 auto *RetType = cast<IntegerType>(Call.CS.getType());
1179 IRBuilder<> B(Call.CS.getInstruction());
1180 Value *Addr =
1181 B.CreateGEP(Int8Ty, B.CreateBitCast(Call.VTable, Int8PtrTy), Byte);
1182 if (RetType->getBitWidth() == 1) {
1183 Value *Bits = B.CreateLoad(Addr);
1184 Value *BitsAndBit = B.CreateAnd(Bits, Bit);
1185 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
1186 Call.replaceAndErase("virtual-const-prop-1-bit", FnName, RemarksEnabled,
1187 OREGetter, IsBitSet);
1188 } else {
1189 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
1190 Value *Val = B.CreateLoad(RetType, ValAddr);
1191 Call.replaceAndErase("virtual-const-prop", FnName, RemarksEnabled,
1192 OREGetter, Val);
1193 }
1194 }
1195 CSInfo.markDevirt();
1196}
1197
1198bool DevirtModule::tryVirtualConstProp(
1199 MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo,
1200 WholeProgramDevirtResolution *Res, VTableSlot Slot) {
1201 // This only works if the function returns an integer.
1202 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
1203 if (!RetType)
14
Taking false branch
1204 return false;
1205 unsigned BitWidth = RetType->getBitWidth();
1206 if (BitWidth > 64)
15
Assuming 'BitWidth' is <= 64
16
Taking false branch
1207 return false;
1208
1209 // Make sure that each function is defined, does not access memory, takes at
1210 // least one argument, does not use its first argument (which we assume is
1211 // 'this'), and has the same return type.
1212 //
1213 // Note that we test whether this copy of the function is readnone, rather
1214 // than testing function attributes, which must hold for any copy of the
1215 // function, even a less optimized version substituted at link time. This is
1216 // sound because the virtual constant propagation optimizations effectively
1217 // inline all implementations of the virtual function into each call site,
1218 // rather than using function attributes to perform local optimization.
1219 for (VirtualCallTarget &Target : TargetsForSlot) {
17
Assuming '__begin1' is equal to '__end1'
1220 if (Target.Fn->isDeclaration() ||
1221 computeFunctionBodyMemoryAccess(*Target.Fn, AARGetter(*Target.Fn)) !=
1222 MAK_ReadNone ||
1223 Target.Fn->arg_empty() || !Target.Fn->arg_begin()->use_empty() ||
1224 Target.Fn->getReturnType() != RetType)
1225 return false;
1226 }
1227
1228 for (auto &&CSByConstantArg : SlotInfo.ConstCSInfo) {
1229 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
18
Taking false branch
1230 continue;
1231
1232 WholeProgramDevirtResolution::ByArg *ResByArg = nullptr;
19
'ResByArg' initialized to a null pointer value
1233 if (Res)
20
Taking false branch
1234 ResByArg = &Res->ResByArg[CSByConstantArg.first];
1235
1236 if (tryUniformRetValOpt(TargetsForSlot, CSByConstantArg.second, ResByArg))
21
Taking false branch
1237 continue;
1238
1239 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second,
23
Calling 'DevirtModule::tryUniqueRetValOpt'
1240 ResByArg, Slot, CSByConstantArg.first))
22
Passing null pointer value via 4th parameter 'Res'
1241 continue;
1242
1243 // Find an allocation offset in bits in all vtables associated with the
1244 // type.
1245 uint64_t AllocBefore =
1246 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
1247 uint64_t AllocAfter =
1248 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
1249
1250 // Calculate the total amount of padding needed to store a value at both
1251 // ends of the object.
1252 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
1253 for (auto &&Target : TargetsForSlot) {
1254 TotalPaddingBefore += std::max<int64_t>(
1255 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
1256 TotalPaddingAfter += std::max<int64_t>(
1257 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
1258 }
1259
1260 // If the amount of padding is too large, give up.
1261 // FIXME: do something smarter here.
1262 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
1263 continue;
1264
1265 // Calculate the offset to the value as a (possibly negative) byte offset
1266 // and (if applicable) a bit offset, and store the values in the targets.
1267 int64_t OffsetByte;
1268 uint64_t OffsetBit;
1269 if (TotalPaddingBefore <= TotalPaddingAfter)
1270 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
1271 OffsetBit);
1272 else
1273 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
1274 OffsetBit);
1275
1276 if (RemarksEnabled)
1277 for (auto &&Target : TargetsForSlot)
1278 Target.WasDevirt = true;
1279
1280
1281 if (CSByConstantArg.second.isExported()) {
1282 ResByArg->TheKind = WholeProgramDevirtResolution::ByArg::VirtualConstProp;
1283 exportConstant(Slot, CSByConstantArg.first, "byte", OffsetByte,
1284 ResByArg->Byte);
1285 exportConstant(Slot, CSByConstantArg.first, "bit", 1ULL << OffsetBit,
1286 ResByArg->Bit);
1287 }
1288
1289 // Rewrite each call to a load from OffsetByte/OffsetBit.
1290 Constant *ByteConst = ConstantInt::get(Int32Ty, OffsetByte);
1291 Constant *BitConst = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
1292 applyVirtualConstProp(CSByConstantArg.second,
1293 TargetsForSlot[0].Fn->getName(), ByteConst, BitConst);
1294 }
1295 return true;
1296}
1297
1298void DevirtModule::rebuildGlobal(VTableBits &B) {
1299 if (B.Before.Bytes.empty() && B.After.Bytes.empty())
1300 return;
1301
1302 // Align each byte array to pointer width.
1303 unsigned PointerSize = M.getDataLayout().getPointerSize();
1304 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
1305 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
1306
1307 // Before was stored in reverse order; flip it now.
1308 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
1309 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
1310
1311 // Build an anonymous global containing the before bytes, followed by the
1312 // original initializer, followed by the after bytes.
1313 auto NewInit = ConstantStruct::getAnon(
1314 {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
1315 B.GV->getInitializer(),
1316 ConstantDataArray::get(M.getContext(), B.After.Bytes)});
1317 auto NewGV =
1318 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
1319 GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
1320 NewGV->setSection(B.GV->getSection());
1321 NewGV->setComdat(B.GV->getComdat());
1322
1323 // Copy the original vtable's metadata to the anonymous global, adjusting
1324 // offsets as required.
1325 NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
1326
1327 // Build an alias named after the original global, pointing at the second
1328 // element (the original initializer).
1329 auto Alias = GlobalAlias::create(
1330 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
1331 ConstantExpr::getGetElementPtr(
1332 NewInit->getType(), NewGV,
1333 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0),
1334 ConstantInt::get(Int32Ty, 1)}),
1335 &M);
1336 Alias->setVisibility(B.GV->getVisibility());
1337 Alias->takeName(B.GV);
1338
1339 B.GV->replaceAllUsesWith(Alias);
1340 B.GV->eraseFromParent();
1341}
1342
1343bool DevirtModule::areRemarksEnabled() {
1344 const auto &FL = M.getFunctionList();
1345 for (const Function &Fn : FL) {
1346 const auto &BBL = Fn.getBasicBlockList();
1347 if (BBL.empty())
1348 continue;
1349 auto DI = OptimizationRemark(DEBUG_TYPE"wholeprogramdevirt", "", DebugLoc(), &BBL.front());
1350 return DI.isEnabled();
1351 }
1352 return false;
1353}
1354
1355void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
1356 Function *AssumeFunc) {
1357 // Find all virtual calls via a virtual table pointer %p under an assumption
1358 // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
1359 // points to a member of the type identifier %md. Group calls by (type ID,
1360 // offset) pair (effectively the identity of the virtual function) and store
1361 // to CallSlots.
1362 DenseSet<CallSite> SeenCallSites;
1363 for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
1364 I != E;) {
1365 auto CI = dyn_cast<CallInst>(I->getUser());
1366 ++I;
1367 if (!CI)
1368 continue;
1369
1370 // Search for virtual calls based on %p and add them to DevirtCalls.
1371 SmallVector<DevirtCallSite, 1> DevirtCalls;
1372 SmallVector<CallInst *, 1> Assumes;
1373 auto &DT = LookupDomTree(*CI->getFunction());
1374 findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI, DT);
1375
1376 // If we found any, add them to CallSlots.
1377 if (!Assumes.empty()) {
1378 Metadata *TypeId =
1379 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
1380 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
1381 for (DevirtCallSite Call : DevirtCalls) {
1382 // Only add this CallSite if we haven't seen it before. The vtable
1383 // pointer may have been CSE'd with pointers from other call sites,
1384 // and we don't want to process call sites multiple times. We can't
1385 // just skip the vtable Ptr if it has been seen before, however, since
1386 // it may be shared by type tests that dominate different calls.
1387 if (SeenCallSites.insert(Call.CS).second)
1388 CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS, nullptr);
1389 }
1390 }
1391
1392 // We no longer need the assumes or the type test.
1393 for (auto Assume : Assumes)
1394 Assume->eraseFromParent();
1395 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
1396 // may use the vtable argument later.
1397 if (CI->use_empty())
1398 CI->eraseFromParent();
1399 }
1400}
1401
1402void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
1403 Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
1404
1405 for (auto I = TypeCheckedLoadFunc->use_begin(),
1406 E = TypeCheckedLoadFunc->use_end();
1407 I != E;) {
1408 auto CI = dyn_cast<CallInst>(I->getUser());
1409 ++I;
1410 if (!CI)
1411 continue;
1412
1413 Value *Ptr = CI->getArgOperand(0);
1414 Value *Offset = CI->getArgOperand(1);
1415 Value *TypeIdValue = CI->getArgOperand(2);
1416 Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
1417
1418 SmallVector<DevirtCallSite, 1> DevirtCalls;
1419 SmallVector<Instruction *, 1> LoadedPtrs;
1420 SmallVector<Instruction *, 1> Preds;
1421 bool HasNonCallUses = false;
1422 auto &DT = LookupDomTree(*CI->getFunction());
1423 findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
1424 HasNonCallUses, CI, DT);
1425
1426 // Start by generating "pessimistic" code that explicitly loads the function
1427 // pointer from the vtable and performs the type check. If possible, we will
1428 // eliminate the load and the type check later.
1429
1430 // If possible, only generate the load at the point where it is used.
1431 // This helps avoid unnecessary spills.
1432 IRBuilder<> LoadB(
1433 (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
1434 Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
1435 Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
1436 Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
1437
1438 for (Instruction *LoadedPtr : LoadedPtrs) {
1439 LoadedPtr->replaceAllUsesWith(LoadedValue);
1440 LoadedPtr->eraseFromParent();
1441 }
1442
1443 // Likewise for the type test.
1444 IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
1445 CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
1446
1447 for (Instruction *Pred : Preds) {
1448 Pred->replaceAllUsesWith(TypeTestCall);
1449 Pred->eraseFromParent();
1450 }
1451
1452 // We have already erased any extractvalue instructions that refer to the
1453 // intrinsic call, but the intrinsic may have other non-extractvalue uses
1454 // (although this is unlikely). In that case, explicitly build a pair and
1455 // RAUW it.
1456 if (!CI->use_empty()) {
1457 Value *Pair = UndefValue::get(CI->getType());
1458 IRBuilder<> B(CI);
1459 Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
1460 Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
1461 CI->replaceAllUsesWith(Pair);
1462 }
1463
1464 // The number of unsafe uses is initially the number of uses.
1465 auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
1466 NumUnsafeUses = DevirtCalls.size();
1467
1468 // If the function pointer has a non-call user, we cannot eliminate the type
1469 // check, as one of those users may eventually call the pointer. Increment
1470 // the unsafe use count to make sure it cannot reach zero.
1471 if (HasNonCallUses)
1472 ++NumUnsafeUses;
1473 for (DevirtCallSite Call : DevirtCalls) {
1474 CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS,
1475 &NumUnsafeUses);
1476 }
1477
1478 CI->eraseFromParent();
1479 }
1480}
1481
1482void DevirtModule::importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo) {
1483 const TypeIdSummary *TidSummary =
1484 ImportSummary->getTypeIdSummary(cast<MDString>(Slot.TypeID)->getString());
1485 if (!TidSummary)
1486 return;
1487 auto ResI = TidSummary->WPDRes.find(Slot.ByteOffset);
1488 if (ResI == TidSummary->WPDRes.end())
1489 return;
1490 const WholeProgramDevirtResolution &Res = ResI->second;
1491
1492 if (Res.TheKind == WholeProgramDevirtResolution::SingleImpl) {
1493 // The type of the function in the declaration is irrelevant because every
1494 // call site will cast it to the correct type.
1495 auto *SingleImpl = M.getOrInsertFunction(
1496 Res.SingleImplName, Type::getVoidTy(M.getContext()));
1497
1498 // This is the import phase so we should not be exporting anything.
1499 bool IsExported = false;
1500 applySingleImplDevirt(SlotInfo, SingleImpl, IsExported);
1501 assert(!IsExported)((!IsExported) ? static_cast<void> (0) : __assert_fail (
"!IsExported", "/build/llvm-toolchain-snapshot-8~svn345461/lib/Transforms/IPO/WholeProgramDevirt.cpp"
, 1501, __PRETTY_FUNCTION__))
;
1502 }
1503
1504 for (auto &CSByConstantArg : SlotInfo.ConstCSInfo) {
1505 auto I = Res.ResByArg.find(CSByConstantArg.first);
1506 if (I == Res.ResByArg.end())
1507 continue;
1508 auto &ResByArg = I->second;
1509 // FIXME: We should figure out what to do about the "function name" argument
1510 // to the apply* functions, as the function names are unavailable during the
1511 // importing phase. For now we just pass the empty string. This does not
1512 // impact correctness because the function names are just used for remarks.
1513 switch (ResByArg.TheKind) {
1514 case WholeProgramDevirtResolution::ByArg::UniformRetVal:
1515 applyUniformRetValOpt(CSByConstantArg.second, "", ResByArg.Info);
1516 break;
1517 case WholeProgramDevirtResolution::ByArg::UniqueRetVal: {
1518 Constant *UniqueMemberAddr =
1519 importGlobal(Slot, CSByConstantArg.first, "unique_member");
1520 applyUniqueRetValOpt(CSByConstantArg.second, "", ResByArg.Info,
1521 UniqueMemberAddr);
1522 break;
1523 }
1524 case WholeProgramDevirtResolution::ByArg::VirtualConstProp: {
1525 Constant *Byte = importConstant(Slot, CSByConstantArg.first, "byte",
1526 Int32Ty, ResByArg.Byte);
1527 Constant *Bit = importConstant(Slot, CSByConstantArg.first, "bit", Int8Ty,
1528 ResByArg.Bit);
1529 applyVirtualConstProp(CSByConstantArg.second, "", Byte, Bit);
1530 break;
1531 }
1532 default:
1533 break;
1534 }
1535 }
1536
1537 if (Res.TheKind == WholeProgramDevirtResolution::BranchFunnel) {
1538 auto *JT = M.getOrInsertFunction(getGlobalName(Slot, {}, "branch_funnel"),
1539 Type::getVoidTy(M.getContext()));
1540 bool IsExported = false;
1541 applyICallBranchFunnel(SlotInfo, JT, IsExported);
1542 assert(!IsExported)((!IsExported) ? static_cast<void> (0) : __assert_fail (
"!IsExported", "/build/llvm-toolchain-snapshot-8~svn345461/lib/Transforms/IPO/WholeProgramDevirt.cpp"
, 1542, __PRETTY_FUNCTION__))
;
1543 }
1544}
1545
1546void DevirtModule::removeRedundantTypeTests() {
1547 auto True = ConstantInt::getTrue(M.getContext());
1548 for (auto &&U : NumUnsafeUsesForTypeTest) {
1549 if (U.second == 0) {
1550 U.first->replaceAllUsesWith(True);
1551 U.first->eraseFromParent();
1552 }
1553 }
1554}
1555
1556bool DevirtModule::run() {
1557 Function *TypeTestFunc =
1558 M.getFunction(Intrinsic::getName(Intrinsic::type_test));
1559 Function *TypeCheckedLoadFunc =
1560 M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
1561 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
1562
1563 // Normally if there are no users of the devirtualization intrinsics in the
1564 // module, this pass has nothing to do. But if we are exporting, we also need
1565 // to handle any users that appear only in the function summaries.
1566 if (!ExportSummary &&
1
Assuming the condition is false
1567 (!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
1568 AssumeFunc->use_empty()) &&
1569 (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
1570 return false;
1571
1572 if (TypeTestFunc && AssumeFunc)
2
Assuming 'TypeTestFunc' is null
1573 scanTypeTestUsers(TypeTestFunc, AssumeFunc);
1574
1575 if (TypeCheckedLoadFunc)
3
Assuming 'TypeCheckedLoadFunc' is null
4
Taking false branch
1576 scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
1577
1578 if (ImportSummary) {
5
Assuming the condition is false
6
Taking false branch
1579 for (auto &S : CallSlots)
1580 importResolution(S.first, S.second);
1581
1582 removeRedundantTypeTests();
1583
1584 // The rest of the code is only necessary when exporting or during regular
1585 // LTO, so we are done.
1586 return true;
1587 }
1588
1589 // Rebuild type metadata into a map for easy lookup.
1590 std::vector<VTableBits> Bits;
1591 DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap;
1592 buildTypeIdentifierMap(Bits, TypeIdMap);
1593 if (TypeIdMap.empty())
7
Assuming the condition is false
8
Taking false branch
1594 return true;
1595
1596 // Collect information from summary about which calls to try to devirtualize.
1597 if (ExportSummary) {
9
Taking true branch
1598 DenseMap<GlobalValue::GUID, TinyPtrVector<Metadata *>> MetadataByGUID;
1599 for (auto &P : TypeIdMap) {
1600 if (auto *TypeId = dyn_cast<MDString>(P.first))
1601 MetadataByGUID[GlobalValue::getGUID(TypeId->getString())].push_back(
1602 TypeId);
1603 }
1604
1605 for (auto &P : *ExportSummary) {
1606 for (auto &S : P.second.SummaryList) {
1607 auto *FS = dyn_cast<FunctionSummary>(S.get());
1608 if (!FS)
1609 continue;
1610 // FIXME: Only add live functions.
1611 for (FunctionSummary::VFuncId VF : FS->type_test_assume_vcalls()) {
1612 for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1613 CallSlots[{MD, VF.Offset}]
1614 .CSInfo.markSummaryHasTypeTestAssumeUsers();
1615 }
1616 }
1617 for (FunctionSummary::VFuncId VF : FS->type_checked_load_vcalls()) {
1618 for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1619 CallSlots[{MD, VF.Offset}].CSInfo.addSummaryTypeCheckedLoadUser(FS);
1620 }
1621 }
1622 for (const FunctionSummary::ConstVCall &VC :
1623 FS->type_test_assume_const_vcalls()) {
1624 for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1625 CallSlots[{MD, VC.VFunc.Offset}]
1626 .ConstCSInfo[VC.Args]
1627 .markSummaryHasTypeTestAssumeUsers();
1628 }
1629 }
1630 for (const FunctionSummary::ConstVCall &VC :
1631 FS->type_checked_load_const_vcalls()) {
1632 for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1633 CallSlots[{MD, VC.VFunc.Offset}]
1634 .ConstCSInfo[VC.Args]
1635 .addSummaryTypeCheckedLoadUser(FS);
1636 }
1637 }
1638 }
1639 }
1640 }
1641
1642 // For each (type, offset) pair:
1643 bool DidVirtualConstProp = false;
1644 std::map<std::string, Function*> DevirtTargets;
1645 for (auto &S : CallSlots) {
1646 // Search each of the members of the type identifier for the virtual
1647 // function implementation at offset S.first.ByteOffset, and add to
1648 // TargetsForSlot.
1649 std::vector<VirtualCallTarget> TargetsForSlot;
1650 if (tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
10
Taking true branch
1651 S.first.ByteOffset)) {
1652 WholeProgramDevirtResolution *Res = nullptr;
1653 if (ExportSummary && isa<MDString>(S.first.TypeID))
11
Assuming the condition is false
1654 Res = &ExportSummary
1655 ->getOrInsertTypeIdSummary(
1656 cast<MDString>(S.first.TypeID)->getString())
1657 .WPDRes[S.first.ByteOffset];
1658
1659 if (!trySingleImplDevirt(TargetsForSlot, S.second, Res)) {
12
Taking true branch
1660 DidVirtualConstProp |=
1661 tryVirtualConstProp(TargetsForSlot, S.second, Res, S.first);
13
Calling 'DevirtModule::tryVirtualConstProp'
1662
1663 tryICallBranchFunnel(TargetsForSlot, S.second, Res, S.first);
1664 }
1665
1666 // Collect functions devirtualized at least for one call site for stats.
1667 if (RemarksEnabled)
1668 for (const auto &T : TargetsForSlot)
1669 if (T.WasDevirt)
1670 DevirtTargets[T.Fn->getName()] = T.Fn;
1671 }
1672
1673 // CFI-specific: if we are exporting and any llvm.type.checked.load
1674 // intrinsics were *not* devirtualized, we need to add the resulting
1675 // llvm.type.test intrinsics to the function summaries so that the
1676 // LowerTypeTests pass will export them.
1677 if (ExportSummary && isa<MDString>(S.first.TypeID)) {
1678 auto GUID =
1679 GlobalValue::getGUID(cast<MDString>(S.first.TypeID)->getString());
1680 for (auto FS : S.second.CSInfo.SummaryTypeCheckedLoadUsers)
1681 FS->addTypeTest(GUID);
1682 for (auto &CCS : S.second.ConstCSInfo)
1683 for (auto FS : CCS.second.SummaryTypeCheckedLoadUsers)
1684 FS->addTypeTest(GUID);
1685 }
1686 }
1687
1688 if (RemarksEnabled) {
1689 // Generate remarks for each devirtualized function.
1690 for (const auto &DT : DevirtTargets) {
1691 Function *F = DT.second;
1692
1693 using namespace ore;
1694 OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE"wholeprogramdevirt", "Devirtualized", F)
1695 << "devirtualized "
1696 << NV("FunctionName", F->getName()));
1697 }
1698 }
1699
1700 removeRedundantTypeTests();
1701
1702 // Rebuild each global we touched as part of virtual constant propagation to
1703 // include the before and after bytes.
1704 if (DidVirtualConstProp)
1705 for (VTableBits &B : Bits)
1706 rebuildGlobal(B);
1707
1708 return true;
1709}