LLVM  9.0.0svn
WholeProgramDevirt.cpp
Go to the documentation of this file.
1 //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass implements whole program optimization of virtual calls in cases
10 // where we know (via !type metadata) that the list of callees is fixed. This
11 // includes the following:
12 // - Single implementation devirtualization: if a virtual call has a single
13 // possible callee, replace all calls with a direct call to that callee.
14 // - Virtual constant propagation: if the virtual function's return type is an
15 // integer <=64 bits and all possible callees are readnone, for each class and
16 // each list of constant arguments: evaluate the function, store the return
17 // value alongside the virtual table, and rewrite each virtual call as a load
18 // from the virtual table.
19 // - Uniform return value optimization: if the conditions for virtual constant
20 // propagation hold and each function returns the same constant value, replace
21 // each virtual call with that constant.
22 // - Unique return value optimization for i1 return values: if the conditions
23 // for virtual constant propagation hold and a single vtable's function
24 // returns 0, or a single vtable's function returns 1, replace each virtual
25 // call with a comparison of the vptr against that vtable's address.
26 //
27 // This pass is intended to be used during the regular and thin LTO pipelines.
28 // During regular LTO, the pass determines the best optimization for each
29 // virtual call and applies the resolutions directly to virtual calls that are
30 // eligible for virtual call optimization (i.e. calls that use either of the
31 // llvm.assume(llvm.type.test) or llvm.type.checked.load intrinsics). During
32 // ThinLTO, the pass operates in two phases:
33 // - Export phase: this is run during the thin link over a single merged module
34 // that contains all vtables with !type metadata that participate in the link.
35 // The pass computes a resolution for each virtual call and stores it in the
36 // type identifier summary.
37 // - Import phase: this is run during the thin backends over the individual
38 // modules. The pass applies the resolutions previously computed during the
39 // import phase to each eligible virtual call.
40 //
41 //===----------------------------------------------------------------------===//
42 
44 #include "llvm/ADT/ArrayRef.h"
45 #include "llvm/ADT/DenseMap.h"
46 #include "llvm/ADT/DenseMapInfo.h"
47 #include "llvm/ADT/DenseSet.h"
48 #include "llvm/ADT/MapVector.h"
49 #include "llvm/ADT/SmallVector.h"
55 #include "llvm/IR/CallSite.h"
56 #include "llvm/IR/Constants.h"
57 #include "llvm/IR/DataLayout.h"
58 #include "llvm/IR/DebugLoc.h"
59 #include "llvm/IR/DerivedTypes.h"
60 #include "llvm/IR/Dominators.h"
61 #include "llvm/IR/Function.h"
62 #include "llvm/IR/GlobalAlias.h"
63 #include "llvm/IR/GlobalVariable.h"
64 #include "llvm/IR/IRBuilder.h"
65 #include "llvm/IR/InstrTypes.h"
66 #include "llvm/IR/Instruction.h"
67 #include "llvm/IR/Instructions.h"
68 #include "llvm/IR/Intrinsics.h"
69 #include "llvm/IR/LLVMContext.h"
70 #include "llvm/IR/Metadata.h"
71 #include "llvm/IR/Module.h"
73 #include "llvm/Pass.h"
74 #include "llvm/PassRegistry.h"
75 #include "llvm/PassSupport.h"
76 #include "llvm/Support/Casting.h"
77 #include "llvm/Support/Error.h"
80 #include "llvm/Transforms/IPO.h"
83 #include <algorithm>
84 #include <cstddef>
85 #include <map>
86 #include <set>
87 #include <string>
88 
89 using namespace llvm;
90 using namespace wholeprogramdevirt;
91 
92 #define DEBUG_TYPE "wholeprogramdevirt"
93 
95  "wholeprogramdevirt-summary-action",
96  cl::desc("What to do with the summary when running this pass"),
97  cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing"),
99  "Import typeid resolutions from summary and globals"),
101  "Export typeid resolutions to summary and globals")),
102  cl::Hidden);
103 
105  "wholeprogramdevirt-read-summary",
106  cl::desc("Read summary from given YAML file before running pass"),
107  cl::Hidden);
108 
110  "wholeprogramdevirt-write-summary",
111  cl::desc("Write summary to given YAML file after running pass"),
112  cl::Hidden);
113 
114 static cl::opt<unsigned>
115  ClThreshold("wholeprogramdevirt-branch-funnel-threshold", cl::Hidden,
117  cl::desc("Maximum number of call targets per "
118  "call site to enable branch funnels"));
119 
120 // Find the minimum offset that we may store a value of size Size bits at. If
121 // IsAfter is set, look for an offset before the object, otherwise look for an
122 // offset after the object.
123 uint64_t
125  bool IsAfter, uint64_t Size) {
126  // Find a minimum offset taking into account only vtable sizes.
127  uint64_t MinByte = 0;
128  for (const VirtualCallTarget &Target : Targets) {
129  if (IsAfter)
130  MinByte = std::max(MinByte, Target.minAfterBytes());
131  else
132  MinByte = std::max(MinByte, Target.minBeforeBytes());
133  }
134 
135  // Build a vector of arrays of bytes covering, for each target, a slice of the
136  // used region (see AccumBitVector::BytesUsed in
137  // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
138  // this aligns the used regions to start at MinByte.
139  //
140  // In this example, A, B and C are vtables, # is a byte already allocated for
141  // a virtual function pointer, AAAA... (etc.) are the used regions for the
142  // vtables and Offset(X) is the value computed for the Offset variable below
143  // for X.
144  //
145  // Offset(A)
146  // | |
147  // |MinByte
148  // A: ################AAAAAAAA|AAAAAAAA
149  // B: ########BBBBBBBBBBBBBBBB|BBBB
150  // C: ########################|CCCCCCCCCCCCCCCC
151  // | Offset(B) |
152  //
153  // This code produces the slices of A, B and C that appear after the divider
154  // at MinByte.
155  std::vector<ArrayRef<uint8_t>> Used;
156  for (const VirtualCallTarget &Target : Targets) {
157  ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
158  : Target.TM->Bits->Before.BytesUsed;
159  uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
160  : MinByte - Target.minBeforeBytes();
161 
162  // Disregard used regions that are smaller than Offset. These are
163  // effectively all-free regions that do not need to be checked.
164  if (VTUsed.size() > Offset)
165  Used.push_back(VTUsed.slice(Offset));
166  }
167 
168  if (Size == 1) {
169  // Find a free bit in each member of Used.
170  for (unsigned I = 0;; ++I) {
171  uint8_t BitsUsed = 0;
172  for (auto &&B : Used)
173  if (I < B.size())
174  BitsUsed |= B[I];
175  if (BitsUsed != 0xff)
176  return (MinByte + I) * 8 +
177  countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
178  }
179  } else {
180  // Find a free (Size/8) byte region in each member of Used.
181  // FIXME: see if alignment helps.
182  for (unsigned I = 0;; ++I) {
183  for (auto &&B : Used) {
184  unsigned Byte = 0;
185  while ((I + Byte) < B.size() && Byte < (Size / 8)) {
186  if (B[I + Byte])
187  goto NextI;
188  ++Byte;
189  }
190  }
191  return (MinByte + I) * 8;
192  NextI:;
193  }
194  }
195 }
196 
198  MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
199  unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
200  if (BitWidth == 1)
201  OffsetByte = -(AllocBefore / 8 + 1);
202  else
203  OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
204  OffsetBit = AllocBefore % 8;
205 
206  for (VirtualCallTarget &Target : Targets) {
207  if (BitWidth == 1)
208  Target.setBeforeBit(AllocBefore);
209  else
210  Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
211  }
212 }
213 
215  MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
216  unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
217  if (BitWidth == 1)
218  OffsetByte = AllocAfter / 8;
219  else
220  OffsetByte = (AllocAfter + 7) / 8;
221  OffsetBit = AllocAfter % 8;
222 
223  for (VirtualCallTarget &Target : Targets) {
224  if (BitWidth == 1)
225  Target.setAfterBit(AllocAfter);
226  else
227  Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
228  }
229 }
230 
232  : Fn(Fn), TM(TM),
233  IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()), WasDevirt(false) {}
234 
235 namespace {
236 
237 // A slot in a set of virtual tables. The TypeID identifies the set of virtual
238 // tables, and the ByteOffset is the offset in bytes from the address point to
239 // the virtual function pointer.
240 struct VTableSlot {
241  Metadata *TypeID;
242  uint64_t ByteOffset;
243 };
244 
245 } // end anonymous namespace
246 
247 namespace llvm {
248 
249 template <> struct DenseMapInfo<VTableSlot> {
250  static VTableSlot getEmptyKey() {
253  }
254  static VTableSlot getTombstoneKey() {
257  }
258  static unsigned getHashValue(const VTableSlot &I) {
259  return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
261  }
262  static bool isEqual(const VTableSlot &LHS,
263  const VTableSlot &RHS) {
264  return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
265  }
266 };
267 
268 } // end namespace llvm
269 
270 namespace {
271 
272 // A virtual call site. VTable is the loaded virtual table pointer, and CS is
273 // the indirect virtual call.
274 struct VirtualCallSite {
275  Value *VTable;
276  CallSite CS;
277 
278  // If non-null, this field points to the associated unsafe use count stored in
279  // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
280  // of that field for details.
281  unsigned *NumUnsafeUses;
282 
283  void
284  emitRemark(const StringRef OptName, const StringRef TargetName,
286  Function *F = CS.getCaller();
287  DebugLoc DLoc = CS->getDebugLoc();
288  BasicBlock *Block = CS.getParent();
289 
290  using namespace ore;
291  OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE, OptName, DLoc, Block)
292  << NV("Optimization", OptName)
293  << ": devirtualized a call to "
294  << NV("FunctionName", TargetName));
295  }
296 
297  void replaceAndErase(
298  const StringRef OptName, const StringRef TargetName, bool RemarksEnabled,
300  Value *New) {
301  if (RemarksEnabled)
302  emitRemark(OptName, TargetName, OREGetter);
303  CS->replaceAllUsesWith(New);
304  if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
305  BranchInst::Create(II->getNormalDest(), CS.getInstruction());
306  II->getUnwindDest()->removePredecessor(II->getParent());
307  }
308  CS->eraseFromParent();
309  // This use is no longer unsafe.
310  if (NumUnsafeUses)
311  --*NumUnsafeUses;
312  }
313 };
314 
315 // Call site information collected for a specific VTableSlot and possibly a list
316 // of constant integer arguments. The grouping by arguments is handled by the
317 // VTableSlotInfo class.
318 struct CallSiteInfo {
319  /// The set of call sites for this slot. Used during regular LTO and the
320  /// import phase of ThinLTO (as well as the export phase of ThinLTO for any
321  /// call sites that appear in the merged module itself); in each of these
322  /// cases we are directly operating on the call sites at the IR level.
323  std::vector<VirtualCallSite> CallSites;
324 
325  /// Whether all call sites represented by this CallSiteInfo, including those
326  /// in summaries, have been devirtualized. This starts off as true because a
327  /// default constructed CallSiteInfo represents no call sites.
328  bool AllCallSitesDevirted = true;
329 
330  // These fields are used during the export phase of ThinLTO and reflect
331  // information collected from function summaries.
332 
333  /// Whether any function summary contains an llvm.assume(llvm.type.test) for
334  /// this slot.
335  bool SummaryHasTypeTestAssumeUsers = false;
336 
337  /// CFI-specific: a vector containing the list of function summaries that use
338  /// the llvm.type.checked.load intrinsic and therefore will require
339  /// resolutions for llvm.type.test in order to implement CFI checks if
340  /// devirtualization was unsuccessful. If devirtualization was successful, the
341  /// pass will clear this vector by calling markDevirt(). If at the end of the
342  /// pass the vector is non-empty, we will need to add a use of llvm.type.test
343  /// to each of the function summaries in the vector.
344  std::vector<FunctionSummary *> SummaryTypeCheckedLoadUsers;
345 
346  bool isExported() const {
347  return SummaryHasTypeTestAssumeUsers ||
348  !SummaryTypeCheckedLoadUsers.empty();
349  }
350 
351  void markSummaryHasTypeTestAssumeUsers() {
352  SummaryHasTypeTestAssumeUsers = true;
353  AllCallSitesDevirted = false;
354  }
355 
356  void addSummaryTypeCheckedLoadUser(FunctionSummary *FS) {
357  SummaryTypeCheckedLoadUsers.push_back(FS);
358  AllCallSitesDevirted = false;
359  }
360 
361  void markDevirt() {
362  AllCallSitesDevirted = true;
363 
364  // As explained in the comment for SummaryTypeCheckedLoadUsers.
365  SummaryTypeCheckedLoadUsers.clear();
366  }
367 };
368 
369 // Call site information collected for a specific VTableSlot.
370 struct VTableSlotInfo {
371  // The set of call sites which do not have all constant integer arguments
372  // (excluding "this").
373  CallSiteInfo CSInfo;
374 
375  // The set of call sites with all constant integer arguments (excluding
376  // "this"), grouped by argument list.
377  std::map<std::vector<uint64_t>, CallSiteInfo> ConstCSInfo;
378 
379  void addCallSite(Value *VTable, CallSite CS, unsigned *NumUnsafeUses);
380 
381 private:
382  CallSiteInfo &findCallSiteInfo(CallSite CS);
383 };
384 
385 CallSiteInfo &VTableSlotInfo::findCallSiteInfo(CallSite CS) {
386  std::vector<uint64_t> Args;
387  auto *CI = dyn_cast<IntegerType>(CS.getType());
388  if (!CI || CI->getBitWidth() > 64 || CS.arg_empty())
389  return CSInfo;
390  for (auto &&Arg : make_range(CS.arg_begin() + 1, CS.arg_end())) {
391  auto *CI = dyn_cast<ConstantInt>(Arg);
392  if (!CI || CI->getBitWidth() > 64)
393  return CSInfo;
394  Args.push_back(CI->getZExtValue());
395  }
396  return ConstCSInfo[Args];
397 }
398 
399 void VTableSlotInfo::addCallSite(Value *VTable, CallSite CS,
400  unsigned *NumUnsafeUses) {
401  auto &CSI = findCallSiteInfo(CS);
402  CSI.AllCallSitesDevirted = false;
403  CSI.CallSites.push_back({VTable, CS, NumUnsafeUses});
404 }
405 
406 struct DevirtModule {
407  Module &M;
410 
411  ModuleSummaryIndex *ExportSummary;
412  const ModuleSummaryIndex *ImportSummary;
413 
414  IntegerType *Int8Ty;
415  PointerType *Int8PtrTy;
417  IntegerType *Int64Ty;
418  IntegerType *IntPtrTy;
419 
420  bool RemarksEnabled;
422 
424 
425  // This map keeps track of the number of "unsafe" uses of a loaded function
426  // pointer. The key is the associated llvm.type.test intrinsic call generated
427  // by this pass. An unsafe use is one that calls the loaded function pointer
428  // directly. Every time we eliminate an unsafe use (for example, by
429  // devirtualizing it or by applying virtual constant propagation), we
430  // decrement the value stored in this map. If a value reaches zero, we can
431  // eliminate the type check by RAUWing the associated llvm.type.test call with
432  // true.
433  std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
434 
435  DevirtModule(Module &M, function_ref<AAResults &(Function &)> AARGetter,
437  function_ref<DominatorTree &(Function &)> LookupDomTree,
438  ModuleSummaryIndex *ExportSummary,
439  const ModuleSummaryIndex *ImportSummary)
440  : M(M), AARGetter(AARGetter), LookupDomTree(LookupDomTree),
441  ExportSummary(ExportSummary), ImportSummary(ImportSummary),
442  Int8Ty(Type::getInt8Ty(M.getContext())),
443  Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
445  Int64Ty(Type::getInt64Ty(M.getContext())),
446  IntPtrTy(M.getDataLayout().getIntPtrType(M.getContext(), 0)),
447  RemarksEnabled(areRemarksEnabled()), OREGetter(OREGetter) {
448  assert(!(ExportSummary && ImportSummary));
449  }
450 
451  bool areRemarksEnabled();
452 
453  void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
454  void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
455 
456  void buildTypeIdentifierMap(
457  std::vector<VTableBits> &Bits,
458  DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
459  Constant *getPointerAtOffset(Constant *I, uint64_t Offset);
460  bool
461  tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
462  const std::set<TypeMemberInfo> &TypeMemberInfos,
463  uint64_t ByteOffset);
464 
465  void applySingleImplDevirt(VTableSlotInfo &SlotInfo, Constant *TheFn,
466  bool &IsExported);
467  bool trySingleImplDevirt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
468  VTableSlotInfo &SlotInfo,
470 
471  void applyICallBranchFunnel(VTableSlotInfo &SlotInfo, Constant *JT,
472  bool &IsExported);
473  void tryICallBranchFunnel(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
474  VTableSlotInfo &SlotInfo,
475  WholeProgramDevirtResolution *Res, VTableSlot Slot);
476 
477  bool tryEvaluateFunctionsWithArgs(
478  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
480 
481  void applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
482  uint64_t TheRetVal);
483  bool tryUniformRetValOpt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
484  CallSiteInfo &CSInfo,
486 
487  // Returns the global symbol name that is used to export information about the
488  // given vtable slot and list of arguments.
489  std::string getGlobalName(VTableSlot Slot, ArrayRef<uint64_t> Args,
490  StringRef Name);
491 
492  bool shouldExportConstantsAsAbsoluteSymbols();
493 
494  // This function is called during the export phase to create a symbol
495  // definition containing information about the given vtable slot and list of
496  // arguments.
497  void exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
498  Constant *C);
499  void exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
500  uint32_t Const, uint32_t &Storage);
501 
502  // This function is called during the import phase to create a reference to
503  // the symbol definition created during the export phase.
504  Constant *importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
505  StringRef Name);
506  Constant *importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
507  StringRef Name, IntegerType *IntTy,
508  uint32_t Storage);
509 
510  Constant *getMemberAddr(const TypeMemberInfo *M);
511 
512  void applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, bool IsOne,
513  Constant *UniqueMemberAddr);
514  bool tryUniqueRetValOpt(unsigned BitWidth,
515  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
516  CallSiteInfo &CSInfo,
518  VTableSlot Slot, ArrayRef<uint64_t> Args);
519 
520  void applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
521  Constant *Byte, Constant *Bit);
522  bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
523  VTableSlotInfo &SlotInfo,
524  WholeProgramDevirtResolution *Res, VTableSlot Slot);
525 
526  void rebuildGlobal(VTableBits &B);
527 
528  // Apply the summary resolution for Slot to all virtual calls in SlotInfo.
529  void importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo);
530 
531  // If we were able to eliminate all unsafe uses for a type checked load,
532  // eliminate the associated type tests by replacing them with true.
533  void removeRedundantTypeTests();
534 
535  bool run();
536 
537  // Lower the module using the action and summary passed as command line
538  // arguments. For testing purposes only.
539  static bool
540  runForTesting(Module &M, function_ref<AAResults &(Function &)> AARGetter,
542  function_ref<DominatorTree &(Function &)> LookupDomTree);
543 };
544 
545 struct WholeProgramDevirt : public ModulePass {
546  static char ID;
547 
548  bool UseCommandLine = false;
549 
550  ModuleSummaryIndex *ExportSummary;
551  const ModuleSummaryIndex *ImportSummary;
552 
553  WholeProgramDevirt() : ModulePass(ID), UseCommandLine(true) {
555  }
556 
557  WholeProgramDevirt(ModuleSummaryIndex *ExportSummary,
558  const ModuleSummaryIndex *ImportSummary)
559  : ModulePass(ID), ExportSummary(ExportSummary),
560  ImportSummary(ImportSummary) {
562  }
563 
564  bool runOnModule(Module &M) override {
565  if (skipModule(M))
566  return false;
567 
568  // In the new pass manager, we can request the optimization
569  // remark emitter pass on a per-function-basis, which the
570  // OREGetter will do for us.
571  // In the old pass manager, this is harder, so we just build
572  // an optimization remark emitter on the fly, when we need it.
573  std::unique_ptr<OptimizationRemarkEmitter> ORE;
574  auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & {
575  ORE = make_unique<OptimizationRemarkEmitter>(F);
576  return *ORE;
577  };
578 
579  auto LookupDomTree = [this](Function &F) -> DominatorTree & {
580  return this->getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
581  };
582 
583  if (UseCommandLine)
584  return DevirtModule::runForTesting(M, LegacyAARGetter(*this), OREGetter,
585  LookupDomTree);
586 
587  return DevirtModule(M, LegacyAARGetter(*this), OREGetter, LookupDomTree,
588  ExportSummary, ImportSummary)
589  .run();
590  }
591 
592  void getAnalysisUsage(AnalysisUsage &AU) const override {
596  }
597 };
598 
599 } // end anonymous namespace
600 
601 INITIALIZE_PASS_BEGIN(WholeProgramDevirt, "wholeprogramdevirt",
602  "Whole program devirtualization", false, false)
607  "Whole program devirtualization", false, false)
608 char WholeProgramDevirt::ID = 0;
609 
610 ModulePass *
612  const ModuleSummaryIndex *ImportSummary) {
613  return new WholeProgramDevirt(ExportSummary, ImportSummary);
614 }
615 
617  ModuleAnalysisManager &AM) {
618  auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
619  auto AARGetter = [&](Function &F) -> AAResults & {
620  return FAM.getResult<AAManager>(F);
621  };
622  auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & {
623  return FAM.getResult<OptimizationRemarkEmitterAnalysis>(*F);
624  };
625  auto LookupDomTree = [&FAM](Function &F) -> DominatorTree & {
626  return FAM.getResult<DominatorTreeAnalysis>(F);
627  };
628  if (!DevirtModule(M, AARGetter, OREGetter, LookupDomTree, ExportSummary,
629  ImportSummary)
630  .run())
631  return PreservedAnalyses::all();
632  return PreservedAnalyses::none();
633 }
634 
635 bool DevirtModule::runForTesting(
636  Module &M, function_ref<AAResults &(Function &)> AARGetter,
638  function_ref<DominatorTree &(Function &)> LookupDomTree) {
639  ModuleSummaryIndex Summary(/*HaveGVs=*/false);
640 
641  // Handle the command-line summary arguments. This code is for testing
642  // purposes only, so we handle errors directly.
643  if (!ClReadSummary.empty()) {
644  ExitOnError ExitOnErr("-wholeprogramdevirt-read-summary: " + ClReadSummary +
645  ": ");
646  auto ReadSummaryFile =
648 
649  yaml::Input In(ReadSummaryFile->getBuffer());
650  In >> Summary;
651  ExitOnErr(errorCodeToError(In.error()));
652  }
653 
654  bool Changed =
655  DevirtModule(
656  M, AARGetter, OREGetter, LookupDomTree,
657  ClSummaryAction == PassSummaryAction::Export ? &Summary : nullptr,
658  ClSummaryAction == PassSummaryAction::Import ? &Summary : nullptr)
659  .run();
660 
661  if (!ClWriteSummary.empty()) {
662  ExitOnError ExitOnErr(
663  "-wholeprogramdevirt-write-summary: " + ClWriteSummary + ": ");
664  std::error_code EC;
666  ExitOnErr(errorCodeToError(EC));
667 
668  yaml::Output Out(OS);
669  Out << Summary;
670  }
671 
672  return Changed;
673 }
674 
675 void DevirtModule::buildTypeIdentifierMap(
676  std::vector<VTableBits> &Bits,
677  DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
679  Bits.reserve(M.getGlobalList().size());
681  for (GlobalVariable &GV : M.globals()) {
682  Types.clear();
683  GV.getMetadata(LLVMContext::MD_type, Types);
684  if (GV.isDeclaration() || Types.empty())
685  continue;
686 
687  VTableBits *&BitsPtr = GVToBits[&GV];
688  if (!BitsPtr) {
689  Bits.emplace_back();
690  Bits.back().GV = &GV;
691  Bits.back().ObjectSize =
692  M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
693  BitsPtr = &Bits.back();
694  }
695 
696  for (MDNode *Type : Types) {
697  auto TypeID = Type->getOperand(1).get();
698 
699  uint64_t Offset =
700  cast<ConstantInt>(
701  cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
702  ->getZExtValue();
703 
704  TypeIdMap[TypeID].insert({BitsPtr, Offset});
705  }
706  }
707 }
708 
709 Constant *DevirtModule::getPointerAtOffset(Constant *I, uint64_t Offset) {
710  if (I->getType()->isPointerTy()) {
711  if (Offset == 0)
712  return I;
713  return nullptr;
714  }
715 
716  const DataLayout &DL = M.getDataLayout();
717 
718  if (auto *C = dyn_cast<ConstantStruct>(I)) {
719  const StructLayout *SL = DL.getStructLayout(C->getType());
720  if (Offset >= SL->getSizeInBytes())
721  return nullptr;
722 
723  unsigned Op = SL->getElementContainingOffset(Offset);
724  return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
725  Offset - SL->getElementOffset(Op));
726  }
727  if (auto *C = dyn_cast<ConstantArray>(I)) {
728  ArrayType *VTableTy = C->getType();
729  uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
730 
731  unsigned Op = Offset / ElemSize;
732  if (Op >= C->getNumOperands())
733  return nullptr;
734 
735  return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
736  Offset % ElemSize);
737  }
738  return nullptr;
739 }
740 
741 bool DevirtModule::tryFindVirtualCallTargets(
742  std::vector<VirtualCallTarget> &TargetsForSlot,
743  const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
744  for (const TypeMemberInfo &TM : TypeMemberInfos) {
745  if (!TM.Bits->GV->isConstant())
746  return false;
747 
748  Constant *Ptr = getPointerAtOffset(TM.Bits->GV->getInitializer(),
749  TM.Offset + ByteOffset);
750  if (!Ptr)
751  return false;
752 
753  auto Fn = dyn_cast<Function>(Ptr->stripPointerCasts());
754  if (!Fn)
755  return false;
756 
757  // We can disregard __cxa_pure_virtual as a possible call target, as
758  // calls to pure virtuals are UB.
759  if (Fn->getName() == "__cxa_pure_virtual")
760  continue;
761 
762  TargetsForSlot.push_back({Fn, &TM});
763  }
764 
765  // Give up if we couldn't find any targets.
766  return !TargetsForSlot.empty();
767 }
768 
769 void DevirtModule::applySingleImplDevirt(VTableSlotInfo &SlotInfo,
770  Constant *TheFn, bool &IsExported) {
771  auto Apply = [&](CallSiteInfo &CSInfo) {
772  for (auto &&VCallSite : CSInfo.CallSites) {
773  if (RemarksEnabled)
774  VCallSite.emitRemark("single-impl",
775  TheFn->stripPointerCasts()->getName(), OREGetter);
776  VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
777  TheFn, VCallSite.CS.getCalledValue()->getType()));
778  // This use is no longer unsafe.
779  if (VCallSite.NumUnsafeUses)
780  --*VCallSite.NumUnsafeUses;
781  }
782  if (CSInfo.isExported())
783  IsExported = true;
784  CSInfo.markDevirt();
785  };
786  Apply(SlotInfo.CSInfo);
787  for (auto &P : SlotInfo.ConstCSInfo)
788  Apply(P.second);
789 }
790 
791 bool DevirtModule::trySingleImplDevirt(
792  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
793  VTableSlotInfo &SlotInfo, WholeProgramDevirtResolution *Res) {
794  // See if the program contains a single implementation of this virtual
795  // function.
796  Function *TheFn = TargetsForSlot[0].Fn;
797  for (auto &&Target : TargetsForSlot)
798  if (TheFn != Target.Fn)
799  return false;
800 
801  // If so, update each call site to call that implementation directly.
802  if (RemarksEnabled)
803  TargetsForSlot[0].WasDevirt = true;
804 
805  bool IsExported = false;
806  applySingleImplDevirt(SlotInfo, TheFn, IsExported);
807  if (!IsExported)
808  return false;
809 
810  // If the only implementation has local linkage, we must promote to external
811  // to make it visible to thin LTO objects. We can only get here during the
812  // ThinLTO export phase.
813  if (TheFn->hasLocalLinkage()) {
814  std::string NewName = (TheFn->getName() + "$merged").str();
815 
816  // Since we are renaming the function, any comdats with the same name must
817  // also be renamed. This is required when targeting COFF, as the comdat name
818  // must match one of the names of the symbols in the comdat.
819  if (Comdat *C = TheFn->getComdat()) {
820  if (C->getName() == TheFn->getName()) {
821  Comdat *NewC = M.getOrInsertComdat(NewName);
822  NewC->setSelectionKind(C->getSelectionKind());
823  for (GlobalObject &GO : M.global_objects())
824  if (GO.getComdat() == C)
825  GO.setComdat(NewC);
826  }
827  }
828 
831  TheFn->setName(NewName);
832  }
833 
835  Res->SingleImplName = TheFn->getName();
836 
837  return true;
838 }
839 
840 void DevirtModule::tryICallBranchFunnel(
841  MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo,
842  WholeProgramDevirtResolution *Res, VTableSlot Slot) {
843  Triple T(M.getTargetTriple());
844  if (T.getArch() != Triple::x86_64)
845  return;
846 
847  if (TargetsForSlot.size() > ClThreshold)
848  return;
849 
850  bool HasNonDevirt = !SlotInfo.CSInfo.AllCallSitesDevirted;
851  if (!HasNonDevirt)
852  for (auto &P : SlotInfo.ConstCSInfo)
853  if (!P.second.AllCallSitesDevirted) {
854  HasNonDevirt = true;
855  break;
856  }
857 
858  if (!HasNonDevirt)
859  return;
860 
861  FunctionType *FT =
862  FunctionType::get(Type::getVoidTy(M.getContext()), {Int8PtrTy}, true);
863  Function *JT;
864  if (isa<MDString>(Slot.TypeID)) {
867  getGlobalName(Slot, {}, "branch_funnel"), &M);
868  JT->setVisibility(GlobalValue::HiddenVisibility);
869  } else {
872  "branch_funnel", &M);
873  }
874  JT->addAttribute(1, Attribute::Nest);
875 
876  std::vector<Value *> JTArgs;
877  JTArgs.push_back(JT->arg_begin());
878  for (auto &T : TargetsForSlot) {
879  JTArgs.push_back(getMemberAddr(T.TM));
880  JTArgs.push_back(T.Fn);
881  }
882 
883  BasicBlock *BB = BasicBlock::Create(M.getContext(), "", JT, nullptr);
884  Function *Intr =
885  Intrinsic::getDeclaration(&M, llvm::Intrinsic::icall_branch_funnel, {});
886 
887  auto *CI = CallInst::Create(Intr, JTArgs, "", BB);
888  CI->setTailCallKind(CallInst::TCK_MustTail);
889  ReturnInst::Create(M.getContext(), nullptr, BB);
890 
891  bool IsExported = false;
892  applyICallBranchFunnel(SlotInfo, JT, IsExported);
893  if (IsExported)
895 }
896 
897 void DevirtModule::applyICallBranchFunnel(VTableSlotInfo &SlotInfo,
898  Constant *JT, bool &IsExported) {
899  auto Apply = [&](CallSiteInfo &CSInfo) {
900  if (CSInfo.isExported())
901  IsExported = true;
902  if (CSInfo.AllCallSitesDevirted)
903  return;
904  for (auto &&VCallSite : CSInfo.CallSites) {
905  CallSite CS = VCallSite.CS;
906 
907  // Jump tables are only profitable if the retpoline mitigation is enabled.
908  Attribute FSAttr = CS.getCaller()->getFnAttribute("target-features");
909  if (FSAttr.hasAttribute(Attribute::None) ||
910  !FSAttr.getValueAsString().contains("+retpoline"))
911  continue;
912 
913  if (RemarksEnabled)
914  VCallSite.emitRemark("branch-funnel",
915  JT->stripPointerCasts()->getName(), OREGetter);
916 
917  // Pass the address of the vtable in the nest register, which is r10 on
918  // x86_64.
919  std::vector<Type *> NewArgs;
920  NewArgs.push_back(Int8PtrTy);
921  for (Type *T : CS.getFunctionType()->params())
922  NewArgs.push_back(T);
923  FunctionType *NewFT =
925  CS.getFunctionType()->isVarArg());
926  PointerType *NewFTPtr = PointerType::getUnqual(NewFT);
927 
928  IRBuilder<> IRB(CS.getInstruction());
929  std::vector<Value *> Args;
930  Args.push_back(IRB.CreateBitCast(VCallSite.VTable, Int8PtrTy));
931  for (unsigned I = 0; I != CS.getNumArgOperands(); ++I)
932  Args.push_back(CS.getArgOperand(I));
933 
934  CallSite NewCS;
935  if (CS.isCall())
936  NewCS = IRB.CreateCall(NewFT, IRB.CreateBitCast(JT, NewFTPtr), Args);
937  else
938  NewCS = IRB.CreateInvoke(
939  NewFT, IRB.CreateBitCast(JT, NewFTPtr),
940  cast<InvokeInst>(CS.getInstruction())->getNormalDest(),
941  cast<InvokeInst>(CS.getInstruction())->getUnwindDest(), Args);
942  NewCS.setCallingConv(CS.getCallingConv());
943 
945  std::vector<AttributeSet> NewArgAttrs;
946  NewArgAttrs.push_back(AttributeSet::get(
948  M.getContext(), Attribute::Nest)}));
949  for (unsigned I = 0; I + 2 < Attrs.getNumAttrSets(); ++I)
950  NewArgAttrs.push_back(Attrs.getParamAttributes(I));
951  NewCS.setAttributes(
953  Attrs.getRetAttributes(), NewArgAttrs));
954 
955  CS->replaceAllUsesWith(NewCS.getInstruction());
956  CS->eraseFromParent();
957 
958  // This use is no longer unsafe.
959  if (VCallSite.NumUnsafeUses)
960  --*VCallSite.NumUnsafeUses;
961  }
962  // Don't mark as devirtualized because there may be callers compiled without
963  // retpoline mitigation, which would mean that they are lowered to
964  // llvm.type.test and therefore require an llvm.type.test resolution for the
965  // type identifier.
966  };
967  Apply(SlotInfo.CSInfo);
968  for (auto &P : SlotInfo.ConstCSInfo)
969  Apply(P.second);
970 }
971 
972 bool DevirtModule::tryEvaluateFunctionsWithArgs(
973  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
975  // Evaluate each function and store the result in each target's RetVal
976  // field.
977  for (VirtualCallTarget &Target : TargetsForSlot) {
978  if (Target.Fn->arg_size() != Args.size() + 1)
979  return false;
980 
981  Evaluator Eval(M.getDataLayout(), nullptr);
983  EvalArgs.push_back(
984  Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
985  for (unsigned I = 0; I != Args.size(); ++I) {
986  auto *ArgTy = dyn_cast<IntegerType>(
987  Target.Fn->getFunctionType()->getParamType(I + 1));
988  if (!ArgTy)
989  return false;
990  EvalArgs.push_back(ConstantInt::get(ArgTy, Args[I]));
991  }
992 
993  Constant *RetVal;
994  if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
995  !isa<ConstantInt>(RetVal))
996  return false;
997  Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
998  }
999  return true;
1000 }
1001 
1002 void DevirtModule::applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
1003  uint64_t TheRetVal) {
1004  for (auto Call : CSInfo.CallSites)
1005  Call.replaceAndErase(
1006  "uniform-ret-val", FnName, RemarksEnabled, OREGetter,
1007  ConstantInt::get(cast<IntegerType>(Call.CS.getType()), TheRetVal));
1008  CSInfo.markDevirt();
1009 }
1010 
1011 bool DevirtModule::tryUniformRetValOpt(
1012  MutableArrayRef<VirtualCallTarget> TargetsForSlot, CallSiteInfo &CSInfo,
1014  // Uniform return value optimization. If all functions return the same
1015  // constant, replace all calls with that constant.
1016  uint64_t TheRetVal = TargetsForSlot[0].RetVal;
1017  for (const VirtualCallTarget &Target : TargetsForSlot)
1018  if (Target.RetVal != TheRetVal)
1019  return false;
1020 
1021  if (CSInfo.isExported()) {
1023  Res->Info = TheRetVal;
1024  }
1025 
1026  applyUniformRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), TheRetVal);
1027  if (RemarksEnabled)
1028  for (auto &&Target : TargetsForSlot)
1029  Target.WasDevirt = true;
1030  return true;
1031 }
1032 
1033 std::string DevirtModule::getGlobalName(VTableSlot Slot,
1034  ArrayRef<uint64_t> Args,
1035  StringRef Name) {
1036  std::string FullName = "__typeid_";
1037  raw_string_ostream OS(FullName);
1038  OS << cast<MDString>(Slot.TypeID)->getString() << '_' << Slot.ByteOffset;
1039  for (uint64_t Arg : Args)
1040  OS << '_' << Arg;
1041  OS << '_' << Name;
1042  return OS.str();
1043 }
1044 
1045 bool DevirtModule::shouldExportConstantsAsAbsoluteSymbols() {
1046  Triple T(M.getTargetTriple());
1047  return (T.getArch() == Triple::x86 || T.getArch() == Triple::x86_64) &&
1048  T.getObjectFormat() == Triple::ELF;
1049 }
1050 
1051 void DevirtModule::exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
1052  StringRef Name, Constant *C) {
1054  getGlobalName(Slot, Args, Name), C, &M);
1056 }
1057 
1058 void DevirtModule::exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
1059  StringRef Name, uint32_t Const,
1060  uint32_t &Storage) {
1061  if (shouldExportConstantsAsAbsoluteSymbols()) {
1062  exportGlobal(
1063  Slot, Args, Name,
1064  ConstantExpr::getIntToPtr(ConstantInt::get(Int32Ty, Const), Int8PtrTy));
1065  return;
1066  }
1067 
1068  Storage = Const;
1069 }
1070 
1071 Constant *DevirtModule::importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
1072  StringRef Name) {
1073  Constant *C = M.getOrInsertGlobal(getGlobalName(Slot, Args, Name), Int8Ty);
1074  auto *GV = dyn_cast<GlobalVariable>(C);
1075  if (GV)
1077  return C;
1078 }
1079 
1080 Constant *DevirtModule::importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
1081  StringRef Name, IntegerType *IntTy,
1082  uint32_t Storage) {
1083  if (!shouldExportConstantsAsAbsoluteSymbols())
1084  return ConstantInt::get(IntTy, Storage);
1085 
1086  Constant *C = importGlobal(Slot, Args, Name);
1087  auto *GV = cast<GlobalVariable>(C->stripPointerCasts());
1088  C = ConstantExpr::getPtrToInt(C, IntTy);
1089 
1090  // We only need to set metadata if the global is newly created, in which
1091  // case it would not have hidden visibility.
1092  if (GV->hasMetadata(LLVMContext::MD_absolute_symbol))
1093  return C;
1094 
1095  auto SetAbsRange = [&](uint64_t Min, uint64_t Max) {
1096  auto *MinC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Min));
1097  auto *MaxC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Max));
1098  GV->setMetadata(LLVMContext::MD_absolute_symbol,
1099  MDNode::get(M.getContext(), {MinC, MaxC}));
1100  };
1101  unsigned AbsWidth = IntTy->getBitWidth();
1102  if (AbsWidth == IntPtrTy->getBitWidth())
1103  SetAbsRange(~0ull, ~0ull); // Full set.
1104  else
1105  SetAbsRange(0, 1ull << AbsWidth);
1106  return C;
1107 }
1108 
1109 void DevirtModule::applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
1110  bool IsOne,
1111  Constant *UniqueMemberAddr) {
1112  for (auto &&Call : CSInfo.CallSites) {
1113  IRBuilder<> B(Call.CS.getInstruction());
1114  Value *Cmp =
1115  B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
1116  B.CreateBitCast(Call.VTable, Int8PtrTy), UniqueMemberAddr);
1117  Cmp = B.CreateZExt(Cmp, Call.CS->getType());
1118  Call.replaceAndErase("unique-ret-val", FnName, RemarksEnabled, OREGetter,
1119  Cmp);
1120  }
1121  CSInfo.markDevirt();
1122 }
1123 
1124 Constant *DevirtModule::getMemberAddr(const TypeMemberInfo *M) {
1125  Constant *C = ConstantExpr::getBitCast(M->Bits->GV, Int8PtrTy);
1126  return ConstantExpr::getGetElementPtr(Int8Ty, C,
1127  ConstantInt::get(Int64Ty, M->Offset));
1128 }
1129 
1130 bool DevirtModule::tryUniqueRetValOpt(
1131  unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
1132  CallSiteInfo &CSInfo, WholeProgramDevirtResolution::ByArg *Res,
1133  VTableSlot Slot, ArrayRef<uint64_t> Args) {
1134  // IsOne controls whether we look for a 0 or a 1.
1135  auto tryUniqueRetValOptFor = [&](bool IsOne) {
1136  const TypeMemberInfo *UniqueMember = nullptr;
1137  for (const VirtualCallTarget &Target : TargetsForSlot) {
1138  if (Target.RetVal == (IsOne ? 1 : 0)) {
1139  if (UniqueMember)
1140  return false;
1141  UniqueMember = Target.TM;
1142  }
1143  }
1144 
1145  // We should have found a unique member or bailed out by now. We already
1146  // checked for a uniform return value in tryUniformRetValOpt.
1147  assert(UniqueMember);
1148 
1149  Constant *UniqueMemberAddr = getMemberAddr(UniqueMember);
1150  if (CSInfo.isExported()) {
1152  Res->Info = IsOne;
1153 
1154  exportGlobal(Slot, Args, "unique_member", UniqueMemberAddr);
1155  }
1156 
1157  // Replace each call with the comparison.
1158  applyUniqueRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), IsOne,
1159  UniqueMemberAddr);
1160 
1161  // Update devirtualization statistics for targets.
1162  if (RemarksEnabled)
1163  for (auto &&Target : TargetsForSlot)
1164  Target.WasDevirt = true;
1165 
1166  return true;
1167  };
1168 
1169  if (BitWidth == 1) {
1170  if (tryUniqueRetValOptFor(true))
1171  return true;
1172  if (tryUniqueRetValOptFor(false))
1173  return true;
1174  }
1175  return false;
1176 }
1177 
1178 void DevirtModule::applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
1179  Constant *Byte, Constant *Bit) {
1180  for (auto Call : CSInfo.CallSites) {
1181  auto *RetType = cast<IntegerType>(Call.CS.getType());
1182  IRBuilder<> B(Call.CS.getInstruction());
1183  Value *Addr =
1184  B.CreateGEP(Int8Ty, B.CreateBitCast(Call.VTable, Int8PtrTy), Byte);
1185  if (RetType->getBitWidth() == 1) {
1186  Value *Bits = B.CreateLoad(Int8Ty, Addr);
1187  Value *BitsAndBit = B.CreateAnd(Bits, Bit);
1188  auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
1189  Call.replaceAndErase("virtual-const-prop-1-bit", FnName, RemarksEnabled,
1190  OREGetter, IsBitSet);
1191  } else {
1192  Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
1193  Value *Val = B.CreateLoad(RetType, ValAddr);
1194  Call.replaceAndErase("virtual-const-prop", FnName, RemarksEnabled,
1195  OREGetter, Val);
1196  }
1197  }
1198  CSInfo.markDevirt();
1199 }
1200 
1201 bool DevirtModule::tryVirtualConstProp(
1202  MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo,
1203  WholeProgramDevirtResolution *Res, VTableSlot Slot) {
1204  // This only works if the function returns an integer.
1205  auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
1206  if (!RetType)
1207  return false;
1208  unsigned BitWidth = RetType->getBitWidth();
1209  if (BitWidth > 64)
1210  return false;
1211 
1212  // Make sure that each function is defined, does not access memory, takes at
1213  // least one argument, does not use its first argument (which we assume is
1214  // 'this'), and has the same return type.
1215  //
1216  // Note that we test whether this copy of the function is readnone, rather
1217  // than testing function attributes, which must hold for any copy of the
1218  // function, even a less optimized version substituted at link time. This is
1219  // sound because the virtual constant propagation optimizations effectively
1220  // inline all implementations of the virtual function into each call site,
1221  // rather than using function attributes to perform local optimization.
1222  for (VirtualCallTarget &Target : TargetsForSlot) {
1223  if (Target.Fn->isDeclaration() ||
1224  computeFunctionBodyMemoryAccess(*Target.Fn, AARGetter(*Target.Fn)) !=
1225  MAK_ReadNone ||
1226  Target.Fn->arg_empty() || !Target.Fn->arg_begin()->use_empty() ||
1227  Target.Fn->getReturnType() != RetType)
1228  return false;
1229  }
1230 
1231  for (auto &&CSByConstantArg : SlotInfo.ConstCSInfo) {
1232  if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
1233  continue;
1234 
1235  WholeProgramDevirtResolution::ByArg *ResByArg = nullptr;
1236  if (Res)
1237  ResByArg = &Res->ResByArg[CSByConstantArg.first];
1238 
1239  if (tryUniformRetValOpt(TargetsForSlot, CSByConstantArg.second, ResByArg))
1240  continue;
1241 
1242  if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second,
1243  ResByArg, Slot, CSByConstantArg.first))
1244  continue;
1245 
1246  // Find an allocation offset in bits in all vtables associated with the
1247  // type.
1248  uint64_t AllocBefore =
1249  findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
1250  uint64_t AllocAfter =
1251  findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
1252 
1253  // Calculate the total amount of padding needed to store a value at both
1254  // ends of the object.
1255  uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
1256  for (auto &&Target : TargetsForSlot) {
1257  TotalPaddingBefore += std::max<int64_t>(
1258  (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
1259  TotalPaddingAfter += std::max<int64_t>(
1260  (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
1261  }
1262 
1263  // If the amount of padding is too large, give up.
1264  // FIXME: do something smarter here.
1265  if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
1266  continue;
1267 
1268  // Calculate the offset to the value as a (possibly negative) byte offset
1269  // and (if applicable) a bit offset, and store the values in the targets.
1270  int64_t OffsetByte;
1271  uint64_t OffsetBit;
1272  if (TotalPaddingBefore <= TotalPaddingAfter)
1273  setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
1274  OffsetBit);
1275  else
1276  setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
1277  OffsetBit);
1278 
1279  if (RemarksEnabled)
1280  for (auto &&Target : TargetsForSlot)
1281  Target.WasDevirt = true;
1282 
1283 
1284  if (CSByConstantArg.second.isExported()) {
1286  exportConstant(Slot, CSByConstantArg.first, "byte", OffsetByte,
1287  ResByArg->Byte);
1288  exportConstant(Slot, CSByConstantArg.first, "bit", 1ULL << OffsetBit,
1289  ResByArg->Bit);
1290  }
1291 
1292  // Rewrite each call to a load from OffsetByte/OffsetBit.
1293  Constant *ByteConst = ConstantInt::get(Int32Ty, OffsetByte);
1294  Constant *BitConst = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
1295  applyVirtualConstProp(CSByConstantArg.second,
1296  TargetsForSlot[0].Fn->getName(), ByteConst, BitConst);
1297  }
1298  return true;
1299 }
1300 
1301 void DevirtModule::rebuildGlobal(VTableBits &B) {
1302  if (B.Before.Bytes.empty() && B.After.Bytes.empty())
1303  return;
1304 
1305  // Align each byte array to pointer width.
1306  unsigned PointerSize = M.getDataLayout().getPointerSize();
1307  B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
1308  B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
1309 
1310  // Before was stored in reverse order; flip it now.
1311  for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
1312  std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
1313 
1314  // Build an anonymous global containing the before bytes, followed by the
1315  // original initializer, followed by the after bytes.
1316  auto NewInit = ConstantStruct::getAnon(
1317  {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
1318  B.GV->getInitializer(),
1319  ConstantDataArray::get(M.getContext(), B.After.Bytes)});
1320  auto NewGV =
1321  new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
1322  GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
1323  NewGV->setSection(B.GV->getSection());
1324  NewGV->setComdat(B.GV->getComdat());
1325 
1326  // Copy the original vtable's metadata to the anonymous global, adjusting
1327  // offsets as required.
1328  NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
1329 
1330  // Build an alias named after the original global, pointing at the second
1331  // element (the original initializer).
1332  auto Alias = GlobalAlias::create(
1333  B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
1335  NewInit->getType(), NewGV,
1337  ConstantInt::get(Int32Ty, 1)}),
1338  &M);
1339  Alias->setVisibility(B.GV->getVisibility());
1340  Alias->takeName(B.GV);
1341 
1342  B.GV->replaceAllUsesWith(Alias);
1343  B.GV->eraseFromParent();
1344 }
1345 
1346 bool DevirtModule::areRemarksEnabled() {
1347  const auto &FL = M.getFunctionList();
1348  for (const Function &Fn : FL) {
1349  const auto &BBL = Fn.getBasicBlockList();
1350  if (BBL.empty())
1351  continue;
1352  auto DI = OptimizationRemark(DEBUG_TYPE, "", DebugLoc(), &BBL.front());
1353  return DI.isEnabled();
1354  }
1355  return false;
1356 }
1357 
1358 void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
1359  Function *AssumeFunc) {
1360  // Find all virtual calls via a virtual table pointer %p under an assumption
1361  // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
1362  // points to a member of the type identifier %md. Group calls by (type ID,
1363  // offset) pair (effectively the identity of the virtual function) and store
1364  // to CallSlots.
1365  DenseSet<CallSite> SeenCallSites;
1366  for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
1367  I != E;) {
1368  auto CI = dyn_cast<CallInst>(I->getUser());
1369  ++I;
1370  if (!CI)
1371  continue;
1372 
1373  // Search for virtual calls based on %p and add them to DevirtCalls.
1374  SmallVector<DevirtCallSite, 1> DevirtCalls;
1376  auto &DT = LookupDomTree(*CI->getFunction());
1377  findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI, DT);
1378 
1379  // If we found any, add them to CallSlots.
1380  if (!Assumes.empty()) {
1381  Metadata *TypeId =
1382  cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
1383  Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
1384  for (DevirtCallSite Call : DevirtCalls) {
1385  // Only add this CallSite if we haven't seen it before. The vtable
1386  // pointer may have been CSE'd with pointers from other call sites,
1387  // and we don't want to process call sites multiple times. We can't
1388  // just skip the vtable Ptr if it has been seen before, however, since
1389  // it may be shared by type tests that dominate different calls.
1390  if (SeenCallSites.insert(Call.CS).second)
1391  CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS, nullptr);
1392  }
1393  }
1394 
1395  // We no longer need the assumes or the type test.
1396  for (auto Assume : Assumes)
1397  Assume->eraseFromParent();
1398  // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
1399  // may use the vtable argument later.
1400  if (CI->use_empty())
1401  CI->eraseFromParent();
1402  }
1403 }
1404 
1405 void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
1406  Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
1407 
1408  for (auto I = TypeCheckedLoadFunc->use_begin(),
1409  E = TypeCheckedLoadFunc->use_end();
1410  I != E;) {
1411  auto CI = dyn_cast<CallInst>(I->getUser());
1412  ++I;
1413  if (!CI)
1414  continue;
1415 
1416  Value *Ptr = CI->getArgOperand(0);
1417  Value *Offset = CI->getArgOperand(1);
1418  Value *TypeIdValue = CI->getArgOperand(2);
1419  Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
1420 
1421  SmallVector<DevirtCallSite, 1> DevirtCalls;
1422  SmallVector<Instruction *, 1> LoadedPtrs;
1424  bool HasNonCallUses = false;
1425  auto &DT = LookupDomTree(*CI->getFunction());
1426  findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
1427  HasNonCallUses, CI, DT);
1428 
1429  // Start by generating "pessimistic" code that explicitly loads the function
1430  // pointer from the vtable and performs the type check. If possible, we will
1431  // eliminate the load and the type check later.
1432 
1433  // If possible, only generate the load at the point where it is used.
1434  // This helps avoid unnecessary spills.
1435  IRBuilder<> LoadB(
1436  (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
1437  Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
1438  Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
1439  Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
1440 
1441  for (Instruction *LoadedPtr : LoadedPtrs) {
1442  LoadedPtr->replaceAllUsesWith(LoadedValue);
1443  LoadedPtr->eraseFromParent();
1444  }
1445 
1446  // Likewise for the type test.
1447  IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
1448  CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
1449 
1450  for (Instruction *Pred : Preds) {
1451  Pred->replaceAllUsesWith(TypeTestCall);
1452  Pred->eraseFromParent();
1453  }
1454 
1455  // We have already erased any extractvalue instructions that refer to the
1456  // intrinsic call, but the intrinsic may have other non-extractvalue uses
1457  // (although this is unlikely). In that case, explicitly build a pair and
1458  // RAUW it.
1459  if (!CI->use_empty()) {
1460  Value *Pair = UndefValue::get(CI->getType());
1461  IRBuilder<> B(CI);
1462  Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
1463  Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
1464  CI->replaceAllUsesWith(Pair);
1465  }
1466 
1467  // The number of unsafe uses is initially the number of uses.
1468  auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
1469  NumUnsafeUses = DevirtCalls.size();
1470 
1471  // If the function pointer has a non-call user, we cannot eliminate the type
1472  // check, as one of those users may eventually call the pointer. Increment
1473  // the unsafe use count to make sure it cannot reach zero.
1474  if (HasNonCallUses)
1475  ++NumUnsafeUses;
1476  for (DevirtCallSite Call : DevirtCalls) {
1477  CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS,
1478  &NumUnsafeUses);
1479  }
1480 
1481  CI->eraseFromParent();
1482  }
1483 }
1484 
1485 void DevirtModule::importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo) {
1486  const TypeIdSummary *TidSummary =
1487  ImportSummary->getTypeIdSummary(cast<MDString>(Slot.TypeID)->getString());
1488  if (!TidSummary)
1489  return;
1490  auto ResI = TidSummary->WPDRes.find(Slot.ByteOffset);
1491  if (ResI == TidSummary->WPDRes.end())
1492  return;
1493  const WholeProgramDevirtResolution &Res = ResI->second;
1494 
1496  // The type of the function in the declaration is irrelevant because every
1497  // call site will cast it to the correct type.
1498  Constant *SingleImpl =
1499  cast<Constant>(M.getOrInsertFunction(Res.SingleImplName,
1500  Type::getVoidTy(M.getContext()))
1501  .getCallee());
1502 
1503  // This is the import phase so we should not be exporting anything.
1504  bool IsExported = false;
1505  applySingleImplDevirt(SlotInfo, SingleImpl, IsExported);
1506  assert(!IsExported);
1507  }
1508 
1509  for (auto &CSByConstantArg : SlotInfo.ConstCSInfo) {
1510  auto I = Res.ResByArg.find(CSByConstantArg.first);
1511  if (I == Res.ResByArg.end())
1512  continue;
1513  auto &ResByArg = I->second;
1514  // FIXME: We should figure out what to do about the "function name" argument
1515  // to the apply* functions, as the function names are unavailable during the
1516  // importing phase. For now we just pass the empty string. This does not
1517  // impact correctness because the function names are just used for remarks.
1518  switch (ResByArg.TheKind) {
1520  applyUniformRetValOpt(CSByConstantArg.second, "", ResByArg.Info);
1521  break;
1523  Constant *UniqueMemberAddr =
1524  importGlobal(Slot, CSByConstantArg.first, "unique_member");
1525  applyUniqueRetValOpt(CSByConstantArg.second, "", ResByArg.Info,
1526  UniqueMemberAddr);
1527  break;
1528  }
1530  Constant *Byte = importConstant(Slot, CSByConstantArg.first, "byte",
1531  Int32Ty, ResByArg.Byte);
1532  Constant *Bit = importConstant(Slot, CSByConstantArg.first, "bit", Int8Ty,
1533  ResByArg.Bit);
1534  applyVirtualConstProp(CSByConstantArg.second, "", Byte, Bit);
1535  break;
1536  }
1537  default:
1538  break;
1539  }
1540  }
1541 
1543  // The type of the function is irrelevant, because it's bitcast at calls
1544  // anyhow.
1545  Constant *JT = cast<Constant>(
1546  M.getOrInsertFunction(getGlobalName(Slot, {}, "branch_funnel"),
1547  Type::getVoidTy(M.getContext()))
1548  .getCallee());
1549  bool IsExported = false;
1550  applyICallBranchFunnel(SlotInfo, JT, IsExported);
1551  assert(!IsExported);
1552  }
1553 }
1554 
1555 void DevirtModule::removeRedundantTypeTests() {
1556  auto True = ConstantInt::getTrue(M.getContext());
1557  for (auto &&U : NumUnsafeUsesForTypeTest) {
1558  if (U.second == 0) {
1559  U.first->replaceAllUsesWith(True);
1560  U.first->eraseFromParent();
1561  }
1562  }
1563 }
1564 
1565 bool DevirtModule::run() {
1566  // If only some of the modules were split, we cannot correctly perform
1567  // this transformation. We already checked for the presense of type tests
1568  // with partially split modules during the thin link, and would have emitted
1569  // an error if any were found, so here we can simply return.
1570  if ((ExportSummary && ExportSummary->partiallySplitLTOUnits()) ||
1571  (ImportSummary && ImportSummary->partiallySplitLTOUnits()))
1572  return false;
1573 
1574  Function *TypeTestFunc =
1575  M.getFunction(Intrinsic::getName(Intrinsic::type_test));
1576  Function *TypeCheckedLoadFunc =
1577  M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
1578  Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
1579 
1580  // Normally if there are no users of the devirtualization intrinsics in the
1581  // module, this pass has nothing to do. But if we are exporting, we also need
1582  // to handle any users that appear only in the function summaries.
1583  if (!ExportSummary &&
1584  (!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
1585  AssumeFunc->use_empty()) &&
1586  (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
1587  return false;
1588 
1589  if (TypeTestFunc && AssumeFunc)
1590  scanTypeTestUsers(TypeTestFunc, AssumeFunc);
1591 
1592  if (TypeCheckedLoadFunc)
1593  scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
1594 
1595  if (ImportSummary) {
1596  for (auto &S : CallSlots)
1597  importResolution(S.first, S.second);
1598 
1599  removeRedundantTypeTests();
1600 
1601  // The rest of the code is only necessary when exporting or during regular
1602  // LTO, so we are done.
1603  return true;
1604  }
1605 
1606  // Rebuild type metadata into a map for easy lookup.
1607  std::vector<VTableBits> Bits;
1609  buildTypeIdentifierMap(Bits, TypeIdMap);
1610  if (TypeIdMap.empty())
1611  return true;
1612 
1613  // Collect information from summary about which calls to try to devirtualize.
1614  if (ExportSummary) {
1616  for (auto &P : TypeIdMap) {
1617  if (auto *TypeId = dyn_cast<MDString>(P.first))
1618  MetadataByGUID[GlobalValue::getGUID(TypeId->getString())].push_back(
1619  TypeId);
1620  }
1621 
1622  for (auto &P : *ExportSummary) {
1623  for (auto &S : P.second.SummaryList) {
1624  auto *FS = dyn_cast<FunctionSummary>(S.get());
1625  if (!FS)
1626  continue;
1627  // FIXME: Only add live functions.
1628  for (FunctionSummary::VFuncId VF : FS->type_test_assume_vcalls()) {
1629  for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1630  CallSlots[{MD, VF.Offset}]
1631  .CSInfo.markSummaryHasTypeTestAssumeUsers();
1632  }
1633  }
1634  for (FunctionSummary::VFuncId VF : FS->type_checked_load_vcalls()) {
1635  for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1636  CallSlots[{MD, VF.Offset}].CSInfo.addSummaryTypeCheckedLoadUser(FS);
1637  }
1638  }
1639  for (const FunctionSummary::ConstVCall &VC :
1640  FS->type_test_assume_const_vcalls()) {
1641  for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1642  CallSlots[{MD, VC.VFunc.Offset}]
1643  .ConstCSInfo[VC.Args]
1644  .markSummaryHasTypeTestAssumeUsers();
1645  }
1646  }
1647  for (const FunctionSummary::ConstVCall &VC :
1648  FS->type_checked_load_const_vcalls()) {
1649  for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1650  CallSlots[{MD, VC.VFunc.Offset}]
1651  .ConstCSInfo[VC.Args]
1652  .addSummaryTypeCheckedLoadUser(FS);
1653  }
1654  }
1655  }
1656  }
1657  }
1658 
1659  // For each (type, offset) pair:
1660  bool DidVirtualConstProp = false;
1661  std::map<std::string, Function*> DevirtTargets;
1662  for (auto &S : CallSlots) {
1663  // Search each of the members of the type identifier for the virtual
1664  // function implementation at offset S.first.ByteOffset, and add to
1665  // TargetsForSlot.
1666  std::vector<VirtualCallTarget> TargetsForSlot;
1667  if (tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
1668  S.first.ByteOffset)) {
1669  WholeProgramDevirtResolution *Res = nullptr;
1670  if (ExportSummary && isa<MDString>(S.first.TypeID))
1671  Res = &ExportSummary
1672  ->getOrInsertTypeIdSummary(
1673  cast<MDString>(S.first.TypeID)->getString())
1674  .WPDRes[S.first.ByteOffset];
1675 
1676  if (!trySingleImplDevirt(TargetsForSlot, S.second, Res)) {
1677  DidVirtualConstProp |=
1678  tryVirtualConstProp(TargetsForSlot, S.second, Res, S.first);
1679 
1680  tryICallBranchFunnel(TargetsForSlot, S.second, Res, S.first);
1681  }
1682 
1683  // Collect functions devirtualized at least for one call site for stats.
1684  if (RemarksEnabled)
1685  for (const auto &T : TargetsForSlot)
1686  if (T.WasDevirt)
1687  DevirtTargets[T.Fn->getName()] = T.Fn;
1688  }
1689 
1690  // CFI-specific: if we are exporting and any llvm.type.checked.load
1691  // intrinsics were *not* devirtualized, we need to add the resulting
1692  // llvm.type.test intrinsics to the function summaries so that the
1693  // LowerTypeTests pass will export them.
1694  if (ExportSummary && isa<MDString>(S.first.TypeID)) {
1695  auto GUID =
1696  GlobalValue::getGUID(cast<MDString>(S.first.TypeID)->getString());
1697  for (auto FS : S.second.CSInfo.SummaryTypeCheckedLoadUsers)
1698  FS->addTypeTest(GUID);
1699  for (auto &CCS : S.second.ConstCSInfo)
1700  for (auto FS : CCS.second.SummaryTypeCheckedLoadUsers)
1701  FS->addTypeTest(GUID);
1702  }
1703  }
1704 
1705  if (RemarksEnabled) {
1706  // Generate remarks for each devirtualized function.
1707  for (const auto &DT : DevirtTargets) {
1708  Function *F = DT.second;
1709 
1710  using namespace ore;
1711  OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE, "Devirtualized", F)
1712  << "devirtualized "
1713  << NV("FunctionName", F->getName()));
1714  }
1715  }
1716 
1717  removeRedundantTypeTests();
1718 
1719  // Rebuild each global we touched as part of virtual constant propagation to
1720  // include the before and after bytes.
1721  if (DidVirtualConstProp)
1722  for (VTableBits &B : Bits)
1723  rebuildGlobal(B);
1724 
1725  return true;
1726 }
void setVisibility(VisibilityTypes V)
Definition: GlobalValue.h:242
IterTy arg_end() const
Definition: CallSite.h:588
uint64_t CallInst * C
StringRef getSection() const
Get the custom section of this global if it has one.
Definition: GlobalObject.h:89
use_iterator use_end()
Definition: Value.h:346
A parsed version of the target data layout string in and methods for querying it. ...
Definition: DataLayout.h:110
const std::string & getTargetTriple() const
Get the target triple which is a string describing the target host.
Definition: Module.h:240
bool empty() const
Definition: Function.h:678
bool hasLocalLinkage() const
Definition: GlobalValue.h:445
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
GCNRegPressure max(const GCNRegPressure &P1, const GCNRegPressure &P2)
DiagnosticInfoOptimizationBase::Argument NV
Whole program devirtualization
static unsigned getHashValue(const VTableSlot &I)
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:776
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
IterTy arg_begin() const
Definition: CallSite.h:584
This class represents lattice values for constants.
Definition: AllocatorList.h:23
void setAttributes(AttributeList PAL)
Set the parameter attributes of the call.
Definition: CallSite.h:341
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve &#39;CreateLoad(Ty, Ptr, "...")&#39; correctly, instead of converting the string to &#39;bool...
Definition: IRBuilder.h:1434
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:65
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant *> IdxList, bool InBounds=false, Optional< unsigned > InRangeIndex=None, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
Definition: Constants.h:1153
amdgpu Simplify well known AMD library false FunctionCallee Value const Twine & Name
void findDevirtualizableCallsForTypeTest(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< CallInst *> &Assumes, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.test, find all devirtualizable call sites based on the call ...
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
Implements a dense probed hash-table based set.
Definition: DenseSet.h:249
const StructLayout * getStructLayout(StructType *Ty) const
Returns a StructLayout object, indicating the alignment of the struct, its size, and the offsets of i...
Definition: DataLayout.cpp:607
void push_back(const T &Elt)
Definition: SmallVector.h:211
This provides a very simple, boring adaptor for a begin and end iterator into a range type...
Helper for check-and-exit error handling.
Definition: Error.h:1249
void initializeWholeProgramDevirtPass(PassRegistry &)
This class represents a function call, abstracting a target machine&#39;s calling convention.
This file contains the declarations for metadata subclasses.
An immutable pass that tracks lazily created AssumptionCache objects.
An efficient, type-erasing, non-owning reference to a callable.
Definition: STLExtras.h:116
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
Definition: Constants.cpp:1769
uint64_t Info
Additional information for the resolution:
Like Internal, but omit from symbol table.
Definition: GlobalValue.h:56
void setCallingConv(CallingConv::ID CC)
Set the calling convention of the call.
Definition: CallSite.h:324
Externally visible function.
Definition: GlobalValue.h:48
static cl::opt< unsigned > ClThreshold("wholeprogramdevirt-branch-funnel-threshold", cl::Hidden, cl::init(10), cl::ZeroOrMore, cl::desc("Maximum number of call targets per " "call site to enable branch funnels"))
This class implements a map that also provides access to all stored values in a deterministic order...
Definition: MapVector.h:37
A debug info location.
Definition: DebugLoc.h:33
Metadata node.
Definition: Metadata.h:863
Analysis pass which computes a DominatorTree.
Definition: Dominators.h:230
F(f)
uint64_t alignTo(uint64_t Value, uint64_t Align, uint64_t Skew=0)
Returns the next integer (mod 2**64) that is greater than or equal to Value and is a multiple of Alig...
Definition: MathExtras.h:684
const GlobalListType & getGlobalList() const
Get the Module&#39;s list of global variables (constant).
Definition: Module.h:524
static IntegerType * getInt64Ty(LLVMContext &C)
Definition: Type.cpp:176
Hexagon Common GEP
MemoryAccessKind computeFunctionBodyMemoryAccess(Function &F, AAResults &AAR)
Returns the memory access properties of this copy of the function.
void setAfterReturnValues(MutableArrayRef< VirtualCallTarget > Targets, uint64_t AllocAfter, unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit)
std::map< uint64_t, WholeProgramDevirtResolution > WPDRes
Mapping from byte offset to whole-program devirt resolution for that (typeid, byte offset) pair...
unsigned getElementContainingOffset(uint64_t Offset) const
Given a valid byte offset into the structure, returns the structure index that contains it...
Definition: DataLayout.cpp:83
FunctionType * getFunctionType() const
Definition: CallSite.h:328
static Constant * getNullValue(Type *Ty)
Constructor to create a &#39;0&#39; constant of arbitrary type.
Definition: Constants.cpp:274
CallingConv::ID getCallingConv() const
Get the calling convention of the call.
Definition: CallSite.h:320
Export information to summary.
const char * getName() const
getName - Get the target name.
iterator_range< global_object_iterator > global_objects()
Definition: Module.h:662
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, Instruction *InsertBefore=nullptr)
void setBeforeReturnValues(MutableArrayRef< VirtualCallTarget > Targets, uint64_t AllocBefore, unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit)
unsigned countTrailingZeros(T Val, ZeroBehavior ZB=ZB_Width)
Count number of 0&#39;s from the least significant bit to the most stopping at the first 1...
Definition: MathExtras.h:119
void findDevirtualizableCallsForTypeCheckedLoad(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< Instruction *> &LoadedPtrs, SmallVectorImpl< Instruction *> &Preds, bool &HasNonCallUses, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.checked.load, find all devirtualizable call sites based on t...
AnalysisUsage & addRequired()
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
Definition: DataLayout.h:562
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:50
StringRef getName(ID id)
Return the LLVM name for an intrinsic, such as "llvm.ppc.altivec.lvx".
Definition: Function.cpp:638
const DataLayout & getDataLayout() const
Get the data layout for the module&#39;s target platform.
Definition: Module.cpp:369
When retpoline mitigation is enabled, use a branch funnel that is defined in the merged module...
LLVMContext & getContext() const
Get the global data context.
Definition: Module.h:244
The returned value is undefined.
Definition: MathExtras.h:45
bool isConstant() const
If the value is a global constant, its value is immutable throughout the runtime execution of the pro...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:742
This class is a functor to be used in legacy module or SCC passes for computing AA results for a func...
TypeID
Definitions of all of the base types for the Type system.
Definition: Type.h:54
No attributes have been set.
Definition: Attributes.h:71
AttributeSet getRetAttributes() const
The attributes for the ret value are returned.
unsigned Intr
void setName(const Twine &Name)
Change the name of the value.
Definition: Value.cpp:285
Type * getType() const
Return the type of the instruction that generated this call site.
Definition: CallSite.h:272
static bool isEqual(const VTableSlot &LHS, const VTableSlot &RHS)
enum llvm::WholeProgramDevirtResolution::Kind TheKind
InstrTy * getInstruction() const
Definition: CallSite.h:96
Class to represent function types.
Definition: DerivedTypes.h:103
Value * CreateBitCast(Value *V, Type *DestTy, const Twine &Name="")
Definition: IRBuilder.h:1809
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:244
Class to represent array types.
Definition: DerivedTypes.h:403
AttributeSet getParamAttributes(unsigned ArgNo) const
The attributes for the argument or parameter at the given index are returned.
bool isVarArg() const
Definition: DerivedTypes.h:123
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
Definition: APInt.h:32
LinkageTypes getLinkage() const
Definition: GlobalValue.h:460
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
Definition: Value.cpp:429
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
unsigned getBitWidth() const
Get the number of bits in this IntegerType.
Definition: DerivedTypes.h:66
Class to hold module path string table and global value map, and encapsulate methods for operating on...
static ConstantAsMetadata * get(Constant *C)
Definition: Metadata.h:409
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree...
Definition: Dominators.h:144
Function * getDeclaration(Module *M, ID id, ArrayRef< Type *> Tys=None)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
Definition: Function.cpp:1044
Value * getOperand(unsigned i) const
Definition: User.h:169
Class to represent pointers.
Definition: DerivedTypes.h:544
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
Definition: PassManager.h:156
static Constant * getBitCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
Definition: Constants.cpp:1782
IntegerType * getIntPtrType(LLVMContext &C, unsigned AddressSpace=0) const
Returns an integer type with size at least as big as that of a pointer in the given address space...
Definition: DataLayout.cpp:769
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata *> MDs)
Definition: Metadata.h:1165
#define P(N)
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:432
bool hasAttribute(AttrKind Val) const
Return true if the attribute is present.
Definition: Attributes.cpp:238
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Definition: Function.h:135
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
A set of analyses that are preserved following a run of a transformation pass.
Definition: PassManager.h:153
* if(!EatIfPresent(lltok::kw_thread_local)) return false
ParseOptionalThreadLocal := /*empty.
static AttributeSet get(LLVMContext &C, const AttrBuilder &B)
Definition: Attributes.cpp:584
VisibilityTypes getVisibility() const
Definition: GlobalValue.h:236
MutableArrayRef - Represent a mutable reference to an array (0 or more elements consecutively in memo...
Definition: ArrayRef.h:290
unsigned getNumArgOperands() const
Definition: CallSite.h:303
Import information from summary.
LLVM Basic Block Representation.
Definition: BasicBlock.h:57
The instances of the Type class are immutable: once they are created, they are never changed...
Definition: Type.h:45
A call site that could be devirtualized.
static Optional< bool > isBigEndian(const SmallVector< int64_t, 4 > &ByteOffsets, int64_t FirstOffset)
size_t size() const
size - Get the array size.
Definition: ArrayRef.h:148
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This is an important base class in LLVM.
Definition: Constant.h:41
Error errorCodeToError(std::error_code EC)
Helper for converting an std::error_code to a Error.
Definition: Error.cpp:87
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
Definition: CommandLine.h:652
PreservedAnalyses run(Module &M, ModuleAnalysisManager &)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
bool isPointerTy() const
True if this is an instance of PointerType.
Definition: Type.h:223
#define DEBUG_TYPE
AttributeList getAttributes() const
Get the parameter attributes of the call.
Definition: CallSite.h:337
A manager for alias analyses.
Diagnostic information for applied optimization remarks.
void eraseFromParent()
eraseFromParent - This method unlinks &#39;this&#39; from the containing module and deletes it...
Definition: Globals.cpp:380
INITIALIZE_PASS_BEGIN(WholeProgramDevirt, "wholeprogramdevirt", "Whole program devirtualization", false, false) INITIALIZE_PASS_END(WholeProgramDevirt
ArrayRef< Type * > params() const
Definition: DerivedTypes.h:130
Expected< T > errorOrToExpected(ErrorOr< T > &&EO)
Convert an ErrorOr<T> to an Expected<T>.
Definition: Error.h:1103
Represent the analysis usage information of a pass.
void addAttribute(unsigned i, Attribute::AttrKind Kind)
adds the attribute to the list of attributes.
Definition: Function.cpp:380
static Type * getVoidTy(LLVMContext &C)
Definition: Type.cpp:160
Type::TypeID TypeID
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
static FunctionType * get(Type *Result, ArrayRef< Type *> Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
Definition: Type.cpp:296
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition: BasicBlock.h:99
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again...
Definition: DenseMap.h:129
LLVM_NODISCARD bool contains(StringRef Other) const
Return true if the given string is a substring of *this, and false otherwise.
Definition: StringRef.h:432
Class to represent integer types.
Definition: DerivedTypes.h:40
enum llvm::WholeProgramDevirtResolution::ByArg::Kind TheKind
static UndefValue * get(Type *T)
Static factory methods - Return an &#39;undef&#39; object of the specified type.
Definition: Constants.cpp:1424
const Constant * stripPointerCasts() const
Definition: Constant.h:177
Comdat * getOrInsertComdat(StringRef Name)
Return the Comdat in the module with the specified name.
Definition: Module.cpp:482
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs, address space casts, and aliases.
Definition: Value.cpp:535
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: PassManager.h:159
size_t size() const
Definition: SmallVector.h:52
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
Definition: Type.cpp:219
bool isEnabled() const override
std::string & str()
Flushes the stream contents to the target string and returns the string&#39;s reference.
Definition: raw_ostream.h:498
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE, "Assign register bank of generic virtual registers", false, false) RegBankSelect
Triple - Helper class for working with autoconf configuration names.
Definition: Triple.h:43
An "identifier" for a virtual function.
bool arg_empty() const
Definition: CallSite.h:225
This class evaluates LLVM IR, producing the Constant representing each SSA instruction.
Definition: Evaluator.h:38
Value * CreateGEP(Value *Ptr, ArrayRef< Value *> IdxList, const Twine &Name="")
Definition: IRBuilder.h:1535
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
unsigned getNumOperands() const
Definition: User.h:191
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the generic address space (address sp...
Definition: DerivedTypes.h:559
This is the shared class of boolean and integer constants.
Definition: Constants.h:83
void setSelectionKind(SelectionKind Val)
Definition: Comdat.h:45
This is a &#39;vector&#39; (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:837
Module.h This file contains the declarations for the Module class.
Single implementation devirtualization.
Type * getReturnType() const
Definition: DerivedTypes.h:124
uint64_t getSizeInBytes() const
Definition: DataLayout.h:570
unsigned getProgramAddressSpace() const
Definition: DataLayout.h:282
static Constant * get(Type *Ty, uint64_t V, bool isSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
Definition: Constants.cpp:631
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
static ConstantInt * getTrue(LLVMContext &Context)
Definition: Constants.cpp:587
void setLinkage(LinkageTypes LT)
Definition: GlobalValue.h:454
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
Definition: BitVector.h:940
wholeprogramdevirt
Target - Wrapper for Target specific information.
unsigned getNumAttrSets() const
A specification for a virtual function call with all constant integer arguments.
BBTy * getParent() const
Get the basic block containing the call site.
Definition: CallSite.h:101
GUID getGUID() const
Return a 64-bit global unique ID constructed from global value name (i.e.
Definition: GlobalValue.h:510
ArrayRef< T > slice(size_t N, size_t M) const
slice(n, m) - Chop off the first N elements of the array, and keep M elements in the array...
Definition: ArrayRef.h:178
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
Definition: CommandLine.h:627
const Comdat * getComdat() const
Definition: GlobalObject.h:100
ModulePass * createWholeProgramDevirtPass(ModuleSummaryIndex *ExportSummary, const ModuleSummaryIndex *ImportSummary)
This pass implements whole-program devirtualization using type metadata.
uint64_t getTypeAllocSize(Type *Ty) const
Returns the offset in bytes between successive objects of the specified type, including alignment pad...
Definition: DataLayout.h:469
use_iterator use_begin()
Definition: Value.h:338
std::map< std::vector< uint64_t >, ByArg > ResByArg
Resolutions for calls with all constant integer arguments (excluding the first argument, "this"), where the key is the argument vector.
Constant * getOrInsertGlobal(StringRef Name, Type *Ty, function_ref< GlobalVariable *()> CreateGlobalCallback)
Look up the specified global in the module symbol table.
Definition: Module.cpp:204
A raw_ostream that writes to a file descriptor.
Definition: raw_ostream.h:365
uint64_t getElementOffset(unsigned Idx) const
Definition: DataLayout.h:584
static IntegerType * getInt32Ty(LLVMContext &C)
Definition: Type.cpp:175
uint64_t findLowestOffset(ArrayRef< VirtualCallTarget > Targets, bool IsAfter, uint64_t Size)
LLVM_NODISCARD bool empty() const
Definition: SmallVector.h:55
StringRef getValueAsString() const
Return the attribute&#39;s value as a string.
Definition: Attributes.cpp:223
StringRef getName() const
Return a constant reference to the value&#39;s name.
Definition: Value.cpp:214
static Constant * getPtrToInt(Constant *C, Type *Ty, bool OnlyIfReduced=false)
Definition: Constants.cpp:1756
#define I(x, y, z)
Definition: MD5.cpp:58
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:224
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
Definition: Casting.h:332
const BasicBlockListType & getBasicBlockList() const
Get the underlying elements of the Function...
Definition: Function.h:649
uint32_t Size
Definition: Profile.cpp:46
Rename collisions when linking (static functions).
Definition: GlobalValue.h:55
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value *> Args=None, const Twine &Name="", MDNode *FPMathTag=nullptr)
Definition: IRBuilder.h:2051
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
Definition: Attributes.cpp:80
Provides passes for computing function attributes based on interprocedural analyses.
Function summary information to aid decisions and implementation of importing.
static ErrorOr< std::unique_ptr< MemoryBuffer > > getFile(const Twine &Filename, int64_t FileSize=-1, bool RequiresNullTerminator=true, bool IsVolatile=false)
Open the specified file as a MemoryBuffer, returning a new MemoryBuffer if successful, otherwise returning null.
bool isCall() const
Return true if a CallInst is enclosed.
Definition: CallSite.h:87
static cl::opt< PassSummaryAction > ClSummaryAction("wholeprogramdevirt-summary-action", cl::desc("What to do with the summary when running this pass"), cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing"), clEnumValN(PassSummaryAction::Import, "import", "Import typeid resolutions from summary and globals"), clEnumValN(PassSummaryAction::Export, "export", "Export typeid resolutions to summary and globals")), cl::Hidden)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
A raw_ostream that writes to an std::string.
Definition: raw_ostream.h:482
aarch64 promote const
LLVM Value Representation.
Definition: Value.h:72
static const Function * getParent(const Value *V)
AttributeSet getFnAttributes() const
The function attributes are returned.
Attribute getFnAttribute(Attribute::AttrKind Kind) const
Return the attribute for the given attribute kind.
Definition: Function.h:333
static Constant * getAnon(ArrayRef< Constant *> V, bool Packed=false)
Return an anonymous struct that has the specified elements.
Definition: Constants.h:468
static Constant * get(LLVMContext &Context, ArrayRef< ElementTy > Elts)
get() constructor - Return a constant with array type with an element count and element type matching...
Definition: Constants.h:702
Type * getElementType() const
Definition: DerivedTypes.h:394
iterator_range< global_iterator > globals()
Definition: Module.h:587
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:48
This is the interface for LLVM&#39;s primary stateless and local alias analysis.
static cl::opt< std::string > ClReadSummary("wholeprogramdevirt-read-summary", cl::desc("Read summary from given YAML file before running pass"), cl::Hidden)
static cl::opt< std::string > ClWriteSummary("wholeprogramdevirt-write-summary", cl::desc("Write summary to given YAML file after running pass"), cl::Hidden)
A container for analyses that lazily runs them and caches their results.
Legacy analysis pass which computes a DominatorTree.
Definition: Dominators.h:259
Root of the metadata hierarchy.
Definition: Metadata.h:57
static IntegerType * getInt8Ty(LLVMContext &C)
Definition: Type.cpp:173
void setSection(StringRef S)
Change the section for this global.
Definition: Globals.cpp:211
The optimization diagnostic interface.
bool use_empty() const
Definition: Value.h:322
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
Definition: Globals.cpp:444
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
ValTy * getArgOperand(unsigned i) const
Definition: CallSite.h:307
static AttributeList get(LLVMContext &C, ArrayRef< std::pair< unsigned, Attribute >> Attrs)
Create an AttributeList with the specified parameters in it.
Definition: Attributes.cpp:958
IntegerType * Int32Ty
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Definition: PassManager.h:1044
FunTy * getCaller() const
Return the caller function for this call site.
Definition: CallSite.h:275