LLVM  4.0.0
WholeProgramDevirt.cpp
Go to the documentation of this file.
1 //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This pass implements whole program optimization of virtual calls in cases
11 // where we know (via !type metadata) that the list of callees is fixed. This
12 // includes the following:
13 // - Single implementation devirtualization: if a virtual call has a single
14 // possible callee, replace all calls with a direct call to that callee.
15 // - Virtual constant propagation: if the virtual function's return type is an
16 // integer <=64 bits and all possible callees are readnone, for each class and
17 // each list of constant arguments: evaluate the function, store the return
18 // value alongside the virtual table, and rewrite each virtual call as a load
19 // from the virtual table.
20 // - Uniform return value optimization: if the conditions for virtual constant
21 // propagation hold and each function returns the same constant value, replace
22 // each virtual call with that constant.
23 // - Unique return value optimization for i1 return values: if the conditions
24 // for virtual constant propagation hold and a single vtable's function
25 // returns 0, or a single vtable's function returns 1, replace each virtual
26 // call with a comparison of the vptr against that vtable's address.
27 //
28 //===----------------------------------------------------------------------===//
29 
31 #include "llvm/ADT/ArrayRef.h"
32 #include "llvm/ADT/DenseMap.h"
33 #include "llvm/ADT/DenseMapInfo.h"
34 #include "llvm/ADT/DenseSet.h"
36 #include "llvm/ADT/MapVector.h"
37 #include "llvm/ADT/SmallVector.h"
39 #include "llvm/IR/CallSite.h"
40 #include "llvm/IR/Constants.h"
41 #include "llvm/IR/DataLayout.h"
43 #include "llvm/IR/DebugLoc.h"
44 #include "llvm/IR/DerivedTypes.h"
45 #include "llvm/IR/DiagnosticInfo.h"
46 #include "llvm/IR/Function.h"
47 #include "llvm/IR/GlobalAlias.h"
48 #include "llvm/IR/GlobalVariable.h"
49 #include "llvm/IR/IRBuilder.h"
50 #include "llvm/IR/InstrTypes.h"
51 #include "llvm/IR/Instruction.h"
52 #include "llvm/IR/Instructions.h"
53 #include "llvm/IR/Intrinsics.h"
54 #include "llvm/IR/LLVMContext.h"
55 #include "llvm/IR/Metadata.h"
56 #include "llvm/IR/Module.h"
57 #include "llvm/Pass.h"
58 #include "llvm/PassRegistry.h"
59 #include "llvm/PassSupport.h"
60 #include "llvm/Support/Casting.h"
62 #include "llvm/Transforms/IPO.h"
64 #include <algorithm>
65 #include <cstddef>
66 #include <map>
67 #include <set>
68 #include <string>
69 
70 using namespace llvm;
71 using namespace wholeprogramdevirt;
72 
73 #define DEBUG_TYPE "wholeprogramdevirt"
74 
75 // Find the minimum offset that we may store a value of size Size bits at. If
76 // IsAfter is set, look for an offset before the object, otherwise look for an
77 // offset after the object.
78 uint64_t
80  bool IsAfter, uint64_t Size) {
81  // Find a minimum offset taking into account only vtable sizes.
82  uint64_t MinByte = 0;
83  for (const VirtualCallTarget &Target : Targets) {
84  if (IsAfter)
85  MinByte = std::max(MinByte, Target.minAfterBytes());
86  else
87  MinByte = std::max(MinByte, Target.minBeforeBytes());
88  }
89 
90  // Build a vector of arrays of bytes covering, for each target, a slice of the
91  // used region (see AccumBitVector::BytesUsed in
92  // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
93  // this aligns the used regions to start at MinByte.
94  //
95  // In this example, A, B and C are vtables, # is a byte already allocated for
96  // a virtual function pointer, AAAA... (etc.) are the used regions for the
97  // vtables and Offset(X) is the value computed for the Offset variable below
98  // for X.
99  //
100  // Offset(A)
101  // | |
102  // |MinByte
103  // A: ################AAAAAAAA|AAAAAAAA
104  // B: ########BBBBBBBBBBBBBBBB|BBBB
105  // C: ########################|CCCCCCCCCCCCCCCC
106  // | Offset(B) |
107  //
108  // This code produces the slices of A, B and C that appear after the divider
109  // at MinByte.
110  std::vector<ArrayRef<uint8_t>> Used;
111  for (const VirtualCallTarget &Target : Targets) {
112  ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
113  : Target.TM->Bits->Before.BytesUsed;
114  uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
115  : MinByte - Target.minBeforeBytes();
116 
117  // Disregard used regions that are smaller than Offset. These are
118  // effectively all-free regions that do not need to be checked.
119  if (VTUsed.size() > Offset)
120  Used.push_back(VTUsed.slice(Offset));
121  }
122 
123  if (Size == 1) {
124  // Find a free bit in each member of Used.
125  for (unsigned I = 0;; ++I) {
126  uint8_t BitsUsed = 0;
127  for (auto &&B : Used)
128  if (I < B.size())
129  BitsUsed |= B[I];
130  if (BitsUsed != 0xff)
131  return (MinByte + I) * 8 +
132  countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
133  }
134  } else {
135  // Find a free (Size/8) byte region in each member of Used.
136  // FIXME: see if alignment helps.
137  for (unsigned I = 0;; ++I) {
138  for (auto &&B : Used) {
139  unsigned Byte = 0;
140  while ((I + Byte) < B.size() && Byte < (Size / 8)) {
141  if (B[I + Byte])
142  goto NextI;
143  ++Byte;
144  }
145  }
146  return (MinByte + I) * 8;
147  NextI:;
148  }
149  }
150 }
151 
153  MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
154  unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
155  if (BitWidth == 1)
156  OffsetByte = -(AllocBefore / 8 + 1);
157  else
158  OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
159  OffsetBit = AllocBefore % 8;
160 
161  for (VirtualCallTarget &Target : Targets) {
162  if (BitWidth == 1)
163  Target.setBeforeBit(AllocBefore);
164  else
165  Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
166  }
167 }
168 
170  MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
171  unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
172  if (BitWidth == 1)
173  OffsetByte = AllocAfter / 8;
174  else
175  OffsetByte = (AllocAfter + 7) / 8;
176  OffsetBit = AllocAfter % 8;
177 
178  for (VirtualCallTarget &Target : Targets) {
179  if (BitWidth == 1)
180  Target.setAfterBit(AllocAfter);
181  else
182  Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
183  }
184 }
185 
187  : Fn(Fn), TM(TM),
188  IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()), WasDevirt(false) {}
189 
190 namespace {
191 
192 // A slot in a set of virtual tables. The TypeID identifies the set of virtual
193 // tables, and the ByteOffset is the offset in bytes from the address point to
194 // the virtual function pointer.
195 struct VTableSlot {
196  Metadata *TypeID;
197  uint64_t ByteOffset;
198 };
199 
200 } // end anonymous namespace
201 
202 namespace llvm {
203 
204 template <> struct DenseMapInfo<VTableSlot> {
205  static VTableSlot getEmptyKey() {
208  }
209  static VTableSlot getTombstoneKey() {
212  }
213  static unsigned getHashValue(const VTableSlot &I) {
214  return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
216  }
217  static bool isEqual(const VTableSlot &LHS,
218  const VTableSlot &RHS) {
219  return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
220  }
221 };
222 
223 } // end namespace llvm
224 
225 namespace {
226 
227 // A virtual call site. VTable is the loaded virtual table pointer, and CS is
228 // the indirect virtual call.
229 struct VirtualCallSite {
230  Value *VTable;
231  CallSite CS;
232 
233  // If non-null, this field points to the associated unsafe use count stored in
234  // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
235  // of that field for details.
236  unsigned *NumUnsafeUses;
237 
238  void emitRemark(const Twine &OptName, const Twine &TargetName) {
239  Function *F = CS.getCaller();
241  F->getContext(), DEBUG_TYPE, *F,
242  CS.getInstruction()->getDebugLoc(),
243  OptName + ": devirtualized a call to " + TargetName);
244  }
245 
246  void replaceAndErase(const Twine &OptName, const Twine &TargetName,
247  bool RemarksEnabled, Value *New) {
248  if (RemarksEnabled)
249  emitRemark(OptName, TargetName);
250  CS->replaceAllUsesWith(New);
251  if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
252  BranchInst::Create(II->getNormalDest(), CS.getInstruction());
253  II->getUnwindDest()->removePredecessor(II->getParent());
254  }
255  CS->eraseFromParent();
256  // This use is no longer unsafe.
257  if (NumUnsafeUses)
258  --*NumUnsafeUses;
259  }
260 };
261 
262 struct DevirtModule {
263  Module &M;
264  IntegerType *Int8Ty;
265  PointerType *Int8PtrTy;
267 
268  bool RemarksEnabled;
269 
271 
272  // This map keeps track of the number of "unsafe" uses of a loaded function
273  // pointer. The key is the associated llvm.type.test intrinsic call generated
274  // by this pass. An unsafe use is one that calls the loaded function pointer
275  // directly. Every time we eliminate an unsafe use (for example, by
276  // devirtualizing it or by applying virtual constant propagation), we
277  // decrement the value stored in this map. If a value reaches zero, we can
278  // eliminate the type check by RAUWing the associated llvm.type.test call with
279  // true.
280  std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
281 
282  DevirtModule(Module &M)
283  : M(M), Int8Ty(Type::getInt8Ty(M.getContext())),
284  Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
285  Int32Ty(Type::getInt32Ty(M.getContext())),
286  RemarksEnabled(areRemarksEnabled()) {}
287 
288  bool areRemarksEnabled();
289 
290  void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
291  void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
292 
293  void buildTypeIdentifierMap(
294  std::vector<VTableBits> &Bits,
295  DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
296  Constant *getPointerAtOffset(Constant *I, uint64_t Offset);
297  bool
298  tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
299  const std::set<TypeMemberInfo> &TypeMemberInfos,
300  uint64_t ByteOffset);
301  bool trySingleImplDevirt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
303  bool tryEvaluateFunctionsWithArgs(
304  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
306  bool tryUniformRetValOpt(IntegerType *RetType,
307  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
309  bool tryUniqueRetValOpt(unsigned BitWidth,
310  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
312  bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
313  ArrayRef<VirtualCallSite> CallSites);
314 
315  void rebuildGlobal(VTableBits &B);
316 
317  bool run();
318 };
319 
320 struct WholeProgramDevirt : public ModulePass {
321  static char ID;
322 
323  WholeProgramDevirt() : ModulePass(ID) {
325  }
326 
327  bool runOnModule(Module &M) override {
328  if (skipModule(M))
329  return false;
330 
331  return DevirtModule(M).run();
332  }
333 };
334 
335 } // end anonymous namespace
336 
337 INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt",
338  "Whole program devirtualization", false, false)
339 char WholeProgramDevirt::ID = 0;
340 
342  return new WholeProgramDevirt;
343 }
344 
347  if (!DevirtModule(M).run())
348  return PreservedAnalyses::all();
349  return PreservedAnalyses::none();
350 }
351 
352 void DevirtModule::buildTypeIdentifierMap(
353  std::vector<VTableBits> &Bits,
354  DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
356  Bits.reserve(M.getGlobalList().size());
358  for (GlobalVariable &GV : M.globals()) {
359  Types.clear();
360  GV.getMetadata(LLVMContext::MD_type, Types);
361  if (Types.empty())
362  continue;
363 
364  VTableBits *&BitsPtr = GVToBits[&GV];
365  if (!BitsPtr) {
366  Bits.emplace_back();
367  Bits.back().GV = &GV;
368  Bits.back().ObjectSize =
369  M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
370  BitsPtr = &Bits.back();
371  }
372 
373  for (MDNode *Type : Types) {
374  auto TypeID = Type->getOperand(1).get();
375 
376  uint64_t Offset =
377  cast<ConstantInt>(
378  cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
379  ->getZExtValue();
380 
381  TypeIdMap[TypeID].insert({BitsPtr, Offset});
382  }
383  }
384 }
385 
386 Constant *DevirtModule::getPointerAtOffset(Constant *I, uint64_t Offset) {
387  if (I->getType()->isPointerTy()) {
388  if (Offset == 0)
389  return I;
390  return nullptr;
391  }
392 
393  const DataLayout &DL = M.getDataLayout();
394 
395  if (auto *C = dyn_cast<ConstantStruct>(I)) {
396  const StructLayout *SL = DL.getStructLayout(C->getType());
397  if (Offset >= SL->getSizeInBytes())
398  return nullptr;
399 
400  unsigned Op = SL->getElementContainingOffset(Offset);
401  return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
402  Offset - SL->getElementOffset(Op));
403  }
404  if (auto *C = dyn_cast<ConstantArray>(I)) {
405  ArrayType *VTableTy = C->getType();
406  uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
407 
408  unsigned Op = Offset / ElemSize;
409  if (Op >= C->getNumOperands())
410  return nullptr;
411 
412  return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
413  Offset % ElemSize);
414  }
415  return nullptr;
416 }
417 
418 bool DevirtModule::tryFindVirtualCallTargets(
419  std::vector<VirtualCallTarget> &TargetsForSlot,
420  const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
421  for (const TypeMemberInfo &TM : TypeMemberInfos) {
422  if (!TM.Bits->GV->isConstant())
423  return false;
424 
425  Constant *Ptr = getPointerAtOffset(TM.Bits->GV->getInitializer(),
426  TM.Offset + ByteOffset);
427  if (!Ptr)
428  return false;
429 
430  auto Fn = dyn_cast<Function>(Ptr->stripPointerCasts());
431  if (!Fn)
432  return false;
433 
434  // We can disregard __cxa_pure_virtual as a possible call target, as
435  // calls to pure virtuals are UB.
436  if (Fn->getName() == "__cxa_pure_virtual")
437  continue;
438 
439  TargetsForSlot.push_back({Fn, &TM});
440  }
441 
442  // Give up if we couldn't find any targets.
443  return !TargetsForSlot.empty();
444 }
445 
446 bool DevirtModule::trySingleImplDevirt(
447  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
449  // See if the program contains a single implementation of this virtual
450  // function.
451  Function *TheFn = TargetsForSlot[0].Fn;
452  for (auto &&Target : TargetsForSlot)
453  if (TheFn != Target.Fn)
454  return false;
455 
456  if (RemarksEnabled)
457  TargetsForSlot[0].WasDevirt = true;
458  // If so, update each call site to call that implementation directly.
459  for (auto &&VCallSite : CallSites) {
460  if (RemarksEnabled)
461  VCallSite.emitRemark("single-impl", TheFn->getName());
462  VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
463  TheFn, VCallSite.CS.getCalledValue()->getType()));
464  // This use is no longer unsafe.
465  if (VCallSite.NumUnsafeUses)
466  --*VCallSite.NumUnsafeUses;
467  }
468  return true;
469 }
470 
471 bool DevirtModule::tryEvaluateFunctionsWithArgs(
472  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
474  // Evaluate each function and store the result in each target's RetVal
475  // field.
476  for (VirtualCallTarget &Target : TargetsForSlot) {
477  if (Target.Fn->arg_size() != Args.size() + 1)
478  return false;
479  for (unsigned I = 0; I != Args.size(); ++I)
480  if (Target.Fn->getFunctionType()->getParamType(I + 1) !=
481  Args[I]->getType())
482  return false;
483 
484  Evaluator Eval(M.getDataLayout(), nullptr);
486  EvalArgs.push_back(
487  Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
488  EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end());
489  Constant *RetVal;
490  if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
491  !isa<ConstantInt>(RetVal))
492  return false;
493  Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
494  }
495  return true;
496 }
497 
498 bool DevirtModule::tryUniformRetValOpt(
499  IntegerType *RetType, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
501  // Uniform return value optimization. If all functions return the same
502  // constant, replace all calls with that constant.
503  uint64_t TheRetVal = TargetsForSlot[0].RetVal;
504  for (const VirtualCallTarget &Target : TargetsForSlot)
505  if (Target.RetVal != TheRetVal)
506  return false;
507 
508  auto TheRetValConst = ConstantInt::get(RetType, TheRetVal);
509  for (auto Call : CallSites)
510  Call.replaceAndErase("uniform-ret-val", TargetsForSlot[0].Fn->getName(),
511  RemarksEnabled, TheRetValConst);
512  if (RemarksEnabled)
513  for (auto &&Target : TargetsForSlot)
514  Target.WasDevirt = true;
515  return true;
516 }
517 
518 bool DevirtModule::tryUniqueRetValOpt(
519  unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
521  // IsOne controls whether we look for a 0 or a 1.
522  auto tryUniqueRetValOptFor = [&](bool IsOne) {
523  const TypeMemberInfo *UniqueMember = nullptr;
524  for (const VirtualCallTarget &Target : TargetsForSlot) {
525  if (Target.RetVal == (IsOne ? 1 : 0)) {
526  if (UniqueMember)
527  return false;
528  UniqueMember = Target.TM;
529  }
530  }
531 
532  // We should have found a unique member or bailed out by now. We already
533  // checked for a uniform return value in tryUniformRetValOpt.
534  assert(UniqueMember);
535 
536  // Replace each call with the comparison.
537  for (auto &&Call : CallSites) {
538  IRBuilder<> B(Call.CS.getInstruction());
539  Value *OneAddr = B.CreateBitCast(UniqueMember->Bits->GV, Int8PtrTy);
540  OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueMember->Offset);
541  Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
542  Call.VTable, OneAddr);
543  Call.replaceAndErase("unique-ret-val", TargetsForSlot[0].Fn->getName(),
544  RemarksEnabled, Cmp);
545  }
546  // Update devirtualization statistics for targets.
547  if (RemarksEnabled)
548  for (auto &&Target : TargetsForSlot)
549  Target.WasDevirt = true;
550 
551  return true;
552  };
553 
554  if (BitWidth == 1) {
555  if (tryUniqueRetValOptFor(true))
556  return true;
557  if (tryUniqueRetValOptFor(false))
558  return true;
559  }
560  return false;
561 }
562 
563 bool DevirtModule::tryVirtualConstProp(
564  MutableArrayRef<VirtualCallTarget> TargetsForSlot,
565  ArrayRef<VirtualCallSite> CallSites) {
566  // This only works if the function returns an integer.
567  auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
568  if (!RetType)
569  return false;
570  unsigned BitWidth = RetType->getBitWidth();
571  if (BitWidth > 64)
572  return false;
573 
574  // Make sure that each function does not access memory, takes at least one
575  // argument, does not use its first argument (which we assume is 'this'),
576  // and has the same return type.
577  for (VirtualCallTarget &Target : TargetsForSlot) {
578  if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() ||
579  !Target.Fn->arg_begin()->use_empty() ||
580  Target.Fn->getReturnType() != RetType)
581  return false;
582  }
583 
584  // Group call sites by the list of constant arguments they pass.
585  // The comparator ensures deterministic ordering.
586  struct ByAPIntValue {
587  bool operator()(const std::vector<ConstantInt *> &A,
588  const std::vector<ConstantInt *> &B) const {
589  return std::lexicographical_compare(
590  A.begin(), A.end(), B.begin(), B.end(),
591  [](ConstantInt *AI, ConstantInt *BI) {
592  return AI->getValue().ult(BI->getValue());
593  });
594  }
595  };
596  std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>,
597  ByAPIntValue>
598  VCallSitesByConstantArg;
599  for (auto &&VCallSite : CallSites) {
600  std::vector<ConstantInt *> Args;
601  if (VCallSite.CS.getType() != RetType)
602  continue;
603  for (auto &&Arg :
604  make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) {
605  if (!isa<ConstantInt>(Arg))
606  break;
607  Args.push_back(cast<ConstantInt>(&Arg));
608  }
609  if (Args.size() + 1 != VCallSite.CS.arg_size())
610  continue;
611 
612  VCallSitesByConstantArg[Args].push_back(VCallSite);
613  }
614 
615  for (auto &&CSByConstantArg : VCallSitesByConstantArg) {
616  if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
617  continue;
618 
619  if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second))
620  continue;
621 
622  if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second))
623  continue;
624 
625  // Find an allocation offset in bits in all vtables associated with the
626  // type.
627  uint64_t AllocBefore =
628  findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
629  uint64_t AllocAfter =
630  findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
631 
632  // Calculate the total amount of padding needed to store a value at both
633  // ends of the object.
634  uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
635  for (auto &&Target : TargetsForSlot) {
636  TotalPaddingBefore += std::max<int64_t>(
637  (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
638  TotalPaddingAfter += std::max<int64_t>(
639  (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
640  }
641 
642  // If the amount of padding is too large, give up.
643  // FIXME: do something smarter here.
644  if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
645  continue;
646 
647  // Calculate the offset to the value as a (possibly negative) byte offset
648  // and (if applicable) a bit offset, and store the values in the targets.
649  int64_t OffsetByte;
650  uint64_t OffsetBit;
651  if (TotalPaddingBefore <= TotalPaddingAfter)
652  setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
653  OffsetBit);
654  else
655  setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
656  OffsetBit);
657 
658  if (RemarksEnabled)
659  for (auto &&Target : TargetsForSlot)
660  Target.WasDevirt = true;
661 
662  // Rewrite each call to a load from OffsetByte/OffsetBit.
663  for (auto Call : CSByConstantArg.second) {
664  IRBuilder<> B(Call.CS.getInstruction());
665  Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte);
666  if (BitWidth == 1) {
667  Value *Bits = B.CreateLoad(Addr);
668  Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
669  Value *BitsAndBit = B.CreateAnd(Bits, Bit);
670  auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
671  Call.replaceAndErase("virtual-const-prop-1-bit",
672  TargetsForSlot[0].Fn->getName(),
673  RemarksEnabled, IsBitSet);
674  } else {
675  Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
676  Value *Val = B.CreateLoad(RetType, ValAddr);
677  Call.replaceAndErase("virtual-const-prop",
678  TargetsForSlot[0].Fn->getName(),
679  RemarksEnabled, Val);
680  }
681  }
682  }
683  return true;
684 }
685 
686 void DevirtModule::rebuildGlobal(VTableBits &B) {
687  if (B.Before.Bytes.empty() && B.After.Bytes.empty())
688  return;
689 
690  // Align each byte array to pointer width.
691  unsigned PointerSize = M.getDataLayout().getPointerSize();
692  B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
693  B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
694 
695  // Before was stored in reverse order; flip it now.
696  for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
697  std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
698 
699  // Build an anonymous global containing the before bytes, followed by the
700  // original initializer, followed by the after bytes.
701  auto NewInit = ConstantStruct::getAnon(
703  B.GV->getInitializer(),
705  auto NewGV =
706  new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
707  GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
708  NewGV->setSection(B.GV->getSection());
709  NewGV->setComdat(B.GV->getComdat());
710 
711  // Copy the original vtable's metadata to the anonymous global, adjusting
712  // offsets as required.
713  NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
714 
715  // Build an alias named after the original global, pointing at the second
716  // element (the original initializer).
717  auto Alias = GlobalAlias::create(
718  B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
720  NewInit->getType(), NewGV,
723  &M);
724  Alias->setVisibility(B.GV->getVisibility());
725  Alias->takeName(B.GV);
726 
727  B.GV->replaceAllUsesWith(Alias);
728  B.GV->eraseFromParent();
729 }
730 
731 bool DevirtModule::areRemarksEnabled() {
732  const auto &FL = M.getFunctionList();
733  if (FL.empty())
734  return false;
735  const Function &Fn = FL.front();
736  auto DI = OptimizationRemark(DEBUG_TYPE, Fn, DebugLoc(), "");
737  return DI.isEnabled();
738 }
739 
740 void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
741  Function *AssumeFunc) {
742  // Find all virtual calls via a virtual table pointer %p under an assumption
743  // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
744  // points to a member of the type identifier %md. Group calls by (type ID,
745  // offset) pair (effectively the identity of the virtual function) and store
746  // to CallSlots.
747  DenseSet<Value *> SeenPtrs;
748  for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
749  I != E;) {
750  auto CI = dyn_cast<CallInst>(I->getUser());
751  ++I;
752  if (!CI)
753  continue;
754 
755  // Search for virtual calls based on %p and add them to DevirtCalls.
756  SmallVector<DevirtCallSite, 1> DevirtCalls;
758  findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI);
759 
760  // If we found any, add them to CallSlots. Only do this if we haven't seen
761  // the vtable pointer before, as it may have been CSE'd with pointers from
762  // other call sites, and we don't want to process call sites multiple times.
763  if (!Assumes.empty()) {
764  Metadata *TypeId =
765  cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
766  Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
767  if (SeenPtrs.insert(Ptr).second) {
768  for (DevirtCallSite Call : DevirtCalls) {
769  CallSlots[{TypeId, Call.Offset}].push_back(
770  {CI->getArgOperand(0), Call.CS, nullptr});
771  }
772  }
773  }
774 
775  // We no longer need the assumes or the type test.
776  for (auto Assume : Assumes)
777  Assume->eraseFromParent();
778  // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
779  // may use the vtable argument later.
780  if (CI->use_empty())
781  CI->eraseFromParent();
782  }
783 }
784 
785 void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
786  Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
787 
788  for (auto I = TypeCheckedLoadFunc->use_begin(),
789  E = TypeCheckedLoadFunc->use_end();
790  I != E;) {
791  auto CI = dyn_cast<CallInst>(I->getUser());
792  ++I;
793  if (!CI)
794  continue;
795 
796  Value *Ptr = CI->getArgOperand(0);
797  Value *Offset = CI->getArgOperand(1);
798  Value *TypeIdValue = CI->getArgOperand(2);
799  Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
800 
801  SmallVector<DevirtCallSite, 1> DevirtCalls;
804  bool HasNonCallUses = false;
805  findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
806  HasNonCallUses, CI);
807 
808  // Start by generating "pessimistic" code that explicitly loads the function
809  // pointer from the vtable and performs the type check. If possible, we will
810  // eliminate the load and the type check later.
811 
812  // If possible, only generate the load at the point where it is used.
813  // This helps avoid unnecessary spills.
814  IRBuilder<> LoadB(
815  (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
816  Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
817  Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
818  Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
819 
820  for (Instruction *LoadedPtr : LoadedPtrs) {
821  LoadedPtr->replaceAllUsesWith(LoadedValue);
822  LoadedPtr->eraseFromParent();
823  }
824 
825  // Likewise for the type test.
826  IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
827  CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
828 
829  for (Instruction *Pred : Preds) {
830  Pred->replaceAllUsesWith(TypeTestCall);
831  Pred->eraseFromParent();
832  }
833 
834  // We have already erased any extractvalue instructions that refer to the
835  // intrinsic call, but the intrinsic may have other non-extractvalue uses
836  // (although this is unlikely). In that case, explicitly build a pair and
837  // RAUW it.
838  if (!CI->use_empty()) {
839  Value *Pair = UndefValue::get(CI->getType());
840  IRBuilder<> B(CI);
841  Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
842  Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
843  CI->replaceAllUsesWith(Pair);
844  }
845 
846  // The number of unsafe uses is initially the number of uses.
847  auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
848  NumUnsafeUses = DevirtCalls.size();
849 
850  // If the function pointer has a non-call user, we cannot eliminate the type
851  // check, as one of those users may eventually call the pointer. Increment
852  // the unsafe use count to make sure it cannot reach zero.
853  if (HasNonCallUses)
854  ++NumUnsafeUses;
855  for (DevirtCallSite Call : DevirtCalls) {
856  CallSlots[{TypeId, Call.Offset}].push_back(
857  {Ptr, Call.CS, &NumUnsafeUses});
858  }
859 
860  CI->eraseFromParent();
861  }
862 }
863 
864 bool DevirtModule::run() {
865  Function *TypeTestFunc =
866  M.getFunction(Intrinsic::getName(Intrinsic::type_test));
867  Function *TypeCheckedLoadFunc =
868  M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
869  Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
870 
871  if ((!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
872  AssumeFunc->use_empty()) &&
873  (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
874  return false;
875 
876  if (TypeTestFunc && AssumeFunc)
877  scanTypeTestUsers(TypeTestFunc, AssumeFunc);
878 
879  if (TypeCheckedLoadFunc)
880  scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
881 
882  // Rebuild type metadata into a map for easy lookup.
883  std::vector<VTableBits> Bits;
885  buildTypeIdentifierMap(Bits, TypeIdMap);
886  if (TypeIdMap.empty())
887  return true;
888 
889  // For each (type, offset) pair:
890  bool DidVirtualConstProp = false;
891  std::map<std::string, Function*> DevirtTargets;
892  for (auto &S : CallSlots) {
893  // Search each of the members of the type identifier for the virtual
894  // function implementation at offset S.first.ByteOffset, and add to
895  // TargetsForSlot.
896  std::vector<VirtualCallTarget> TargetsForSlot;
897  if (!tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
898  S.first.ByteOffset))
899  continue;
900 
901  if (!trySingleImplDevirt(TargetsForSlot, S.second) &&
902  tryVirtualConstProp(TargetsForSlot, S.second))
903  DidVirtualConstProp = true;
904 
905  // Collect functions devirtualized at least for one call site for stats.
906  if (RemarksEnabled)
907  for (const auto &T : TargetsForSlot)
908  if (T.WasDevirt)
909  DevirtTargets[T.Fn->getName()] = T.Fn;
910  }
911 
912  if (RemarksEnabled) {
913  // Generate remarks for each devirtualized function.
914  for (const auto &DT : DevirtTargets) {
915  Function *F = DT.second;
916  DISubprogram *SP = F->getSubprogram();
917  DebugLoc DL = SP ? DebugLoc::get(SP->getScopeLine(), 0, SP) : DebugLoc();
919  Twine("devirtualized ") + F->getName());
920  }
921  }
922 
923  // If we were able to eliminate all unsafe uses for a type checked load,
924  // eliminate the type test by replacing it with true.
925  if (TypeCheckedLoadFunc) {
926  auto True = ConstantInt::getTrue(M.getContext());
927  for (auto &&U : NumUnsafeUsesForTypeTest) {
928  if (U.second == 0) {
929  U.first->replaceAllUsesWith(True);
930  U.first->eraseFromParent();
931  }
932  }
933  }
934 
935  // Rebuild each global we touched as part of virtual constant propagation to
936  // include the before and after bytes.
937  if (DidVirtualConstProp)
938  for (VTableBits &B : Bits)
939  rebuildGlobal(B);
940 
941  return true;
942 }
void push_back(const T &Elt)
Definition: SmallVector.h:211
use_iterator use_end()
Definition: Value.h:318
A parsed version of the target data layout string in and methods for querying it. ...
Definition: DataLayout.h:102
LinkageTypes getLinkage() const
Definition: GlobalValue.h:429
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function. ...
Definition: Function.cpp:226
VisibilityTypes getVisibility() const
Definition: GlobalValue.h:219
static unsigned getHashValue(const VTableSlot &I)
const char * getName() const
getName - Get the target name.
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:52
static Constant * getAnon(ArrayRef< Constant * > V, bool Packed=false)
Return an anonymous struct that has the specified elements.
Definition: Constants.h:459
VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
Type::TypeID TypeID
const GlobalListType & getGlobalList() const
Get the Module's list of global variables (constant).
Definition: Module.h:471
This provides a very simple, boring adaptor for a begin and end iterator into a range type...
iterator end() const
Definition: ArrayRef.h:130
void initializeWholeProgramDevirtPass(PassRegistry &)
This class represents a function call, abstracting a target machine's calling convention.
This file contains the declarations for metadata subclasses.
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, Optional< unsigned > InRangeIndex=None, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
Definition: Constants.h:1126
Like Internal, but omit from symbol table.
Definition: GlobalValue.h:57
This class implements a map that also provides access to all stored values in a deterministic order...
Definition: MapVector.h:32
A debug info location.
Definition: DebugLoc.h:34
Metadata node.
Definition: Metadata.h:830
uint64_t alignTo(uint64_t Value, uint64_t Align, uint64_t Skew=0)
Returns the next integer (mod 2**64) that is greater than or equal to Value and is a multiple of Alig...
Definition: MathExtras.h:664
unsigned getBitWidth() const
Get the number of bits in this IntegerType.
Definition: DerivedTypes.h:65
Hexagon Common GEP
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void emitOptimizationRemark(LLVMContext &Ctx, const char *PassName, const Function &Fn, const DebugLoc &DLoc, const Twine &Msg)
Emit an optimization-applied message.
void setAfterReturnValues(MutableArrayRef< VirtualCallTarget > Targets, uint64_t AllocAfter, unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit)
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Definition: Constants.cpp:195
StringRef getName() const
Return a constant reference to the value's name.
Definition: Value.cpp:191
void setBeforeReturnValues(MutableArrayRef< VirtualCallTarget > Targets, uint64_t AllocBefore, unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit)
ArrayRef< T > slice(size_t N, size_t M) const
slice(n, m) - Chop off the first N elements of the array, and keep M elements in the array...
Definition: ArrayRef.h:171
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
Definition: DataLayout.h:496
StringRef getName(ID id)
Return the LLVM name for an intrinsic, such as "llvm.ppc.altivec.lvx".
Definition: Function.cpp:555
void findDevirtualizableCallsForTypeCheckedLoad(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< Instruction * > &LoadedPtrs, SmallVectorImpl< Instruction * > &Preds, bool &HasNonCallUses, CallInst *CI)
Given a call to the intrinsic .type.checked.load, find all devirtualizable call sites based on the ca...
DILocation * get() const
Get the underlying DILocation.
Definition: DebugLoc.cpp:21
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
Definition: Twine.h:81
const StructLayout * getStructLayout(StructType *Ty) const
Returns a StructLayout object, indicating the alignment of the struct, its size, and the offsets of i...
Definition: DataLayout.cpp:566
const APInt & getValue() const
Return the constant as an APInt value reference.
Definition: Constants.h:143
static Constant * get(LLVMContext &Context, ArrayRef< uint8_t > Elts)
get() constructors - Return a constant with array type with an element count and element type matchin...
Definition: Constants.cpp:2416
The returned value is undefined.
Definition: MathExtras.h:37
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:588
TypeID
Definitions of all of the base types for the Type system.
Definition: Type.h:54
void eraseFromParent() override
eraseFromParent - This method unlinks 'this' from the containing module and deletes it...
Definition: Globals.cpp:319
static bool isEqual(const VTableSlot &LHS, const VTableSlot &RHS)
LLVM_NODISCARD bool empty() const
Definition: SmallVector.h:60
Subprogram description.
#define F(x, y, z)
Definition: MD5.cpp:51
GraphT::NodeRef Eval(DominatorTreeBaseByGraphTraits< GraphT > &DT, typename GraphT::NodeRef VIn, unsigned LastLinked)
Class to represent array types.
Definition: DerivedTypes.h:345
Function Alias Analysis false
StringRef getSection() const
Get the custom section of this global if it has one.
Definition: GlobalObject.h:81
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
Definition: APInt.h:33
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=None)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
Definition: Function.cpp:949
ModulePass * createWholeProgramDevirtPass()
This pass implements whole-program devirtualization using type metadata.
static GCRegistry::Add< OcamlGC > B("ocaml","ocaml 3.10-compatible GC")
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
Definition: Value.cpp:401
Type * getElementType() const
Definition: DerivedTypes.h:336
size_t size() const
size - Get the array size.
Definition: ArrayRef.h:141
Maximum length of the test input libFuzzer tries to guess a good value based on the corpus and reports it always prefer smaller inputs during the corpus shuffle When libFuzzer itself reports a bug this exit code will be used If indicates the maximal total time in seconds to run the fuzzer minimizes the provided crash input Use with etc Experimental Use value profile to guide fuzzing Number of simultaneous worker processes to run the jobs If min(jobs, NumberOfCpuCores()/2)\" is used.") FUZZER_FLAG_INT(reload
Class to represent pointers.
Definition: DerivedTypes.h:443
bool ult(const APInt &RHS) const
Unsigned less than comparison.
Definition: APInt.cpp:501
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
Definition: PassManager.h:110
static GCRegistry::Add< CoreCLRGC > E("coreclr","CoreCLR-compatible GC")
Function * getFunction(StringRef Name) const
Look up the specified function in the module symbol table.
Definition: Module.cpp:196
uint64_t getElementOffset(unsigned Idx) const
Definition: DataLayout.h:517
static Constant * getBitCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
Definition: Constants.cpp:1695
A set of analyses that are preserved following a run of a transformation pass.
Definition: PassManager.h:107
Constant * stripPointerCasts()
Definition: Constant.h:155
std::size_t countTrailingZeros(T Val, ZeroBehavior ZB=ZB_Width)
Count number of 0's from the least significant bit to the most stopping at the first 1...
Definition: MathExtras.h:111
MutableArrayRef - Represent a mutable reference to an array (0 or more elements consecutively in memo...
Definition: ArrayRef.h:283
The instances of the Type class are immutable: once they are created, they are never changed...
Definition: Type.h:45
A call site that could be devirtualized.
const Comdat * getComdat() const
Definition: GlobalObject.h:92
This is an important base class in LLVM.
Definition: Constant.h:42
PreservedAnalyses run(Module &M, ModuleAnalysisManager &)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
#define DEBUG_TYPE
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:168
Diagnostic information for applied optimization remarks.
void findDevirtualizableCallsForTypeTest(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< CallInst * > &Assumes, CallInst *CI)
Given a call to the intrinsic .type.test, find all devirtualizable call sites based on the call and r...
uint32_t Offset
iterator begin() const
Definition: ArrayRef.h:129
Value * getOperand(unsigned i) const
Definition: User.h:145
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again...
Definition: DenseMap.h:87
Class to represent integer types.
Definition: DerivedTypes.h:39
bool isPointerTy() const
True if this is an instance of PointerType.
Definition: Type.h:213
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
Definition: Constants.cpp:1337
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: PassManager.h:113
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:36
This class evaluates LLVM IR, producing the Constant representing each SSA instruction.
Definition: Evaluator.h:37
const FunctionListType & getFunctionList() const
Get the Module's list of functions (constant).
Definition: Module.h:478
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the generic address space (address sp...
Definition: DerivedTypes.h:458
This is the shared class of boolean and integer constants.
Definition: Constants.h:88
uint64_t getTypeAllocSize(Type *Ty) const
Returns the offset in bytes between successive objects of the specified type, including alignment pad...
Definition: DataLayout.h:408
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:843
Module.h This file contains the declarations for the Module class.
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:230
uint64_t getSizeInBytes() const
Definition: DataLayout.h:503
unsigned getElementContainingOffset(uint64_t Offset) const
Given a valid byte offset into the structure, returns the structure index that contains it...
Definition: DataLayout.cpp:79
static Constant * get(Type *Ty, uint64_t V, bool isSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
Definition: Constants.cpp:558
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
static ConstantInt * getTrue(LLVMContext &Context)
Definition: Constants.cpp:506
static GCRegistry::Add< ShadowStackGC > C("shadow-stack","Very portable GC for uncooperative code generators")
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
Definition: BitVector.h:586
Target - Wrapper for Target specific information.
bool isConstant() const
If the value is a global constant, its value is immutable throughout the runtime execution of the pro...
DISubprogram * getSubprogram() const
Get the attached subprogram.
Definition: Metadata.cpp:1458
bool empty() const
Definition: Function.h:541
use_iterator use_begin()
Definition: Value.h:310
PointerType * getType() const
Global values are always pointers.
Definition: GlobalValue.h:259
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Definition: Module.cpp:384
uint64_t findLowestOffset(ArrayRef< VirtualCallTarget > Targets, bool IsAfter, uint64_t Size)
#define I(x, y, z)
Definition: MD5.cpp:54
LLVM_ATTRIBUTE_ALWAYS_INLINE size_type size() const
Definition: SmallVector.h:135
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:235
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
Definition: Casting.h:287
bool use_empty() const
Definition: Value.h:299
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
const BasicBlock & front() const
Definition: Function.h:542
LLVM Value Representation.
Definition: Value.h:71
static const Function * getParent(const Value *V)
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
Definition: Type.cpp:678
iterator_range< global_iterator > globals()
Definition: Module.h:524
A container for analyses that lazily runs them and caches their results.
int * Ptr
unsigned getPointerSize(unsigned AS=0) const
Layout pointer size FIXME: The defaults need to be removed once all of the backends/clients are updat...
Definition: DataLayout.cpp:608
static GCRegistry::Add< ErlangGC > A("erlang","erlang-compatible garbage collector")
Root of the metadata hierarchy.
Definition: Metadata.h:55
void setSection(StringRef S)
Change the section for this global.
Definition: Globals.cpp:173
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
Definition: Globals.cpp:384
IntegerType * Int32Ty
LLVMContext & getContext() const
Get the global data context.
Definition: Module.h:222