| File: | build/source/llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp |
| Warning: | line 1381, column 18 Access to field 'TheKind' results in a dereference of a null pointer (loaded from variable 'Res') |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
| 1 | //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===// | ||||
| 2 | // | ||||
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. | ||||
| 4 | // See https://llvm.org/LICENSE.txt for license information. | ||||
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception | ||||
| 6 | // | ||||
| 7 | //===----------------------------------------------------------------------===// | ||||
| 8 | // | ||||
| 9 | // This pass implements whole program optimization of virtual calls in cases | ||||
| 10 | // where we know (via !type metadata) that the list of callees is fixed. This | ||||
| 11 | // includes the following: | ||||
| 12 | // - Single implementation devirtualization: if a virtual call has a single | ||||
| 13 | // possible callee, replace all calls with a direct call to that callee. | ||||
| 14 | // - Virtual constant propagation: if the virtual function's return type is an | ||||
| 15 | // integer <=64 bits and all possible callees are readnone, for each class and | ||||
| 16 | // each list of constant arguments: evaluate the function, store the return | ||||
| 17 | // value alongside the virtual table, and rewrite each virtual call as a load | ||||
| 18 | // from the virtual table. | ||||
| 19 | // - Uniform return value optimization: if the conditions for virtual constant | ||||
| 20 | // propagation hold and each function returns the same constant value, replace | ||||
| 21 | // each virtual call with that constant. | ||||
| 22 | // - Unique return value optimization for i1 return values: if the conditions | ||||
| 23 | // for virtual constant propagation hold and a single vtable's function | ||||
| 24 | // returns 0, or a single vtable's function returns 1, replace each virtual | ||||
| 25 | // call with a comparison of the vptr against that vtable's address. | ||||
| 26 | // | ||||
| 27 | // This pass is intended to be used during the regular and thin LTO pipelines: | ||||
| 28 | // | ||||
| 29 | // During regular LTO, the pass determines the best optimization for each | ||||
| 30 | // virtual call and applies the resolutions directly to virtual calls that are | ||||
| 31 | // eligible for virtual call optimization (i.e. calls that use either of the | ||||
| 32 | // llvm.assume(llvm.type.test) or llvm.type.checked.load intrinsics). | ||||
| 33 | // | ||||
| 34 | // During hybrid Regular/ThinLTO, the pass operates in two phases: | ||||
| 35 | // - Export phase: this is run during the thin link over a single merged module | ||||
| 36 | // that contains all vtables with !type metadata that participate in the link. | ||||
| 37 | // The pass computes a resolution for each virtual call and stores it in the | ||||
| 38 | // type identifier summary. | ||||
| 39 | // - Import phase: this is run during the thin backends over the individual | ||||
| 40 | // modules. The pass applies the resolutions previously computed during the | ||||
| 41 | // import phase to each eligible virtual call. | ||||
| 42 | // | ||||
| 43 | // During ThinLTO, the pass operates in two phases: | ||||
| 44 | // - Export phase: this is run during the thin link over the index which | ||||
| 45 | // contains a summary of all vtables with !type metadata that participate in | ||||
| 46 | // the link. It computes a resolution for each virtual call and stores it in | ||||
| 47 | // the type identifier summary. Only single implementation devirtualization | ||||
| 48 | // is supported. | ||||
| 49 | // - Import phase: (same as with hybrid case above). | ||||
| 50 | // | ||||
| 51 | //===----------------------------------------------------------------------===// | ||||
| 52 | |||||
| 53 | #include "llvm/Transforms/IPO/WholeProgramDevirt.h" | ||||
| 54 | #include "llvm/ADT/ArrayRef.h" | ||||
| 55 | #include "llvm/ADT/DenseMap.h" | ||||
| 56 | #include "llvm/ADT/DenseMapInfo.h" | ||||
| 57 | #include "llvm/ADT/DenseSet.h" | ||||
| 58 | #include "llvm/ADT/MapVector.h" | ||||
| 59 | #include "llvm/ADT/SmallVector.h" | ||||
| 60 | #include "llvm/ADT/Statistic.h" | ||||
| 61 | #include "llvm/ADT/iterator_range.h" | ||||
| 62 | #include "llvm/Analysis/AssumptionCache.h" | ||||
| 63 | #include "llvm/Analysis/BasicAliasAnalysis.h" | ||||
| 64 | #include "llvm/Analysis/OptimizationRemarkEmitter.h" | ||||
| 65 | #include "llvm/Analysis/TypeMetadataUtils.h" | ||||
| 66 | #include "llvm/Bitcode/BitcodeReader.h" | ||||
| 67 | #include "llvm/Bitcode/BitcodeWriter.h" | ||||
| 68 | #include "llvm/IR/Constants.h" | ||||
| 69 | #include "llvm/IR/DataLayout.h" | ||||
| 70 | #include "llvm/IR/DebugLoc.h" | ||||
| 71 | #include "llvm/IR/DerivedTypes.h" | ||||
| 72 | #include "llvm/IR/Dominators.h" | ||||
| 73 | #include "llvm/IR/Function.h" | ||||
| 74 | #include "llvm/IR/GlobalAlias.h" | ||||
| 75 | #include "llvm/IR/GlobalVariable.h" | ||||
| 76 | #include "llvm/IR/IRBuilder.h" | ||||
| 77 | #include "llvm/IR/InstrTypes.h" | ||||
| 78 | #include "llvm/IR/Instruction.h" | ||||
| 79 | #include "llvm/IR/Instructions.h" | ||||
| 80 | #include "llvm/IR/Intrinsics.h" | ||||
| 81 | #include "llvm/IR/LLVMContext.h" | ||||
| 82 | #include "llvm/IR/MDBuilder.h" | ||||
| 83 | #include "llvm/IR/Metadata.h" | ||||
| 84 | #include "llvm/IR/Module.h" | ||||
| 85 | #include "llvm/IR/ModuleSummaryIndexYAML.h" | ||||
| 86 | #include "llvm/Support/Casting.h" | ||||
| 87 | #include "llvm/Support/CommandLine.h" | ||||
| 88 | #include "llvm/Support/Errc.h" | ||||
| 89 | #include "llvm/Support/Error.h" | ||||
| 90 | #include "llvm/Support/FileSystem.h" | ||||
| 91 | #include "llvm/Support/GlobPattern.h" | ||||
| 92 | #include "llvm/Support/MathExtras.h" | ||||
| 93 | #include "llvm/TargetParser/Triple.h" | ||||
| 94 | #include "llvm/Transforms/IPO.h" | ||||
| 95 | #include "llvm/Transforms/IPO/FunctionAttrs.h" | ||||
| 96 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" | ||||
| 97 | #include "llvm/Transforms/Utils/CallPromotionUtils.h" | ||||
| 98 | #include "llvm/Transforms/Utils/Evaluator.h" | ||||
| 99 | #include <algorithm> | ||||
| 100 | #include <cstddef> | ||||
| 101 | #include <map> | ||||
| 102 | #include <set> | ||||
| 103 | #include <string> | ||||
| 104 | |||||
| 105 | using namespace llvm; | ||||
| 106 | using namespace wholeprogramdevirt; | ||||
| 107 | |||||
| 108 | #define DEBUG_TYPE"wholeprogramdevirt" "wholeprogramdevirt" | ||||
| 109 | |||||
| 110 | STATISTIC(NumDevirtTargets, "Number of whole program devirtualization targets")static llvm::Statistic NumDevirtTargets = {"wholeprogramdevirt" , "NumDevirtTargets", "Number of whole program devirtualization targets" }; | ||||
| 111 | STATISTIC(NumSingleImpl, "Number of single implementation devirtualizations")static llvm::Statistic NumSingleImpl = {"wholeprogramdevirt", "NumSingleImpl", "Number of single implementation devirtualizations" }; | ||||
| 112 | STATISTIC(NumBranchFunnel, "Number of branch funnels")static llvm::Statistic NumBranchFunnel = {"wholeprogramdevirt" , "NumBranchFunnel", "Number of branch funnels"}; | ||||
| 113 | STATISTIC(NumUniformRetVal, "Number of uniform return value optimizations")static llvm::Statistic NumUniformRetVal = {"wholeprogramdevirt" , "NumUniformRetVal", "Number of uniform return value optimizations" }; | ||||
| 114 | STATISTIC(NumUniqueRetVal, "Number of unique return value optimizations")static llvm::Statistic NumUniqueRetVal = {"wholeprogramdevirt" , "NumUniqueRetVal", "Number of unique return value optimizations" }; | ||||
| 115 | STATISTIC(NumVirtConstProp1Bit,static llvm::Statistic NumVirtConstProp1Bit = {"wholeprogramdevirt" , "NumVirtConstProp1Bit", "Number of 1 bit virtual constant propagations" } | ||||
| 116 | "Number of 1 bit virtual constant propagations")static llvm::Statistic NumVirtConstProp1Bit = {"wholeprogramdevirt" , "NumVirtConstProp1Bit", "Number of 1 bit virtual constant propagations" }; | ||||
| 117 | STATISTIC(NumVirtConstProp, "Number of virtual constant propagations")static llvm::Statistic NumVirtConstProp = {"wholeprogramdevirt" , "NumVirtConstProp", "Number of virtual constant propagations" }; | ||||
| 118 | |||||
| 119 | static cl::opt<PassSummaryAction> ClSummaryAction( | ||||
| 120 | "wholeprogramdevirt-summary-action", | ||||
| 121 | cl::desc("What to do with the summary when running this pass"), | ||||
| 122 | cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing")llvm::cl::OptionEnumValue { "none", int(PassSummaryAction::None ), "Do nothing" }, | ||||
| 123 | clEnumValN(PassSummaryAction::Import, "import",llvm::cl::OptionEnumValue { "import", int(PassSummaryAction:: Import), "Import typeid resolutions from summary and globals" } | ||||
| 124 | "Import typeid resolutions from summary and globals")llvm::cl::OptionEnumValue { "import", int(PassSummaryAction:: Import), "Import typeid resolutions from summary and globals" }, | ||||
| 125 | clEnumValN(PassSummaryAction::Export, "export",llvm::cl::OptionEnumValue { "export", int(PassSummaryAction:: Export), "Export typeid resolutions to summary and globals" } | ||||
| 126 | "Export typeid resolutions to summary and globals")llvm::cl::OptionEnumValue { "export", int(PassSummaryAction:: Export), "Export typeid resolutions to summary and globals" }), | ||||
| 127 | cl::Hidden); | ||||
| 128 | |||||
| 129 | static cl::opt<std::string> ClReadSummary( | ||||
| 130 | "wholeprogramdevirt-read-summary", | ||||
| 131 | cl::desc( | ||||
| 132 | "Read summary from given bitcode or YAML file before running pass"), | ||||
| 133 | cl::Hidden); | ||||
| 134 | |||||
| 135 | static cl::opt<std::string> ClWriteSummary( | ||||
| 136 | "wholeprogramdevirt-write-summary", | ||||
| 137 | cl::desc("Write summary to given bitcode or YAML file after running pass. " | ||||
| 138 | "Output file format is deduced from extension: *.bc means writing " | ||||
| 139 | "bitcode, otherwise YAML"), | ||||
| 140 | cl::Hidden); | ||||
| 141 | |||||
| 142 | static cl::opt<unsigned> | ||||
| 143 | ClThreshold("wholeprogramdevirt-branch-funnel-threshold", cl::Hidden, | ||||
| 144 | cl::init(10), | ||||
| 145 | cl::desc("Maximum number of call targets per " | ||||
| 146 | "call site to enable branch funnels")); | ||||
| 147 | |||||
| 148 | static cl::opt<bool> | ||||
| 149 | PrintSummaryDevirt("wholeprogramdevirt-print-index-based", cl::Hidden, | ||||
| 150 | cl::desc("Print index-based devirtualization messages")); | ||||
| 151 | |||||
| 152 | /// Provide a way to force enable whole program visibility in tests. | ||||
| 153 | /// This is needed to support legacy tests that don't contain | ||||
| 154 | /// !vcall_visibility metadata (the mere presense of type tests | ||||
| 155 | /// previously implied hidden visibility). | ||||
| 156 | static cl::opt<bool> | ||||
| 157 | WholeProgramVisibility("whole-program-visibility", cl::Hidden, | ||||
| 158 | cl::desc("Enable whole program visibility")); | ||||
| 159 | |||||
| 160 | /// Provide a way to force disable whole program for debugging or workarounds, | ||||
| 161 | /// when enabled via the linker. | ||||
| 162 | static cl::opt<bool> DisableWholeProgramVisibility( | ||||
| 163 | "disable-whole-program-visibility", cl::Hidden, | ||||
| 164 | cl::desc("Disable whole program visibility (overrides enabling options)")); | ||||
| 165 | |||||
| 166 | /// Provide way to prevent certain function from being devirtualized | ||||
| 167 | static cl::list<std::string> | ||||
| 168 | SkipFunctionNames("wholeprogramdevirt-skip", | ||||
| 169 | cl::desc("Prevent function(s) from being devirtualized"), | ||||
| 170 | cl::Hidden, cl::CommaSeparated); | ||||
| 171 | |||||
| 172 | /// Mechanism to add runtime checking of devirtualization decisions, optionally | ||||
| 173 | /// trapping or falling back to indirect call on any that are not correct. | ||||
| 174 | /// Trapping mode is useful for debugging undefined behavior leading to failures | ||||
| 175 | /// with WPD. Fallback mode is useful for ensuring safety when whole program | ||||
| 176 | /// visibility may be compromised. | ||||
| 177 | enum WPDCheckMode { None, Trap, Fallback }; | ||||
| 178 | static cl::opt<WPDCheckMode> DevirtCheckMode( | ||||
| 179 | "wholeprogramdevirt-check", cl::Hidden, | ||||
| 180 | cl::desc("Type of checking for incorrect devirtualizations"), | ||||
| 181 | cl::values(clEnumValN(WPDCheckMode::None, "none", "No checking")llvm::cl::OptionEnumValue { "none", int(WPDCheckMode::None), "No checking" }, | ||||
| 182 | clEnumValN(WPDCheckMode::Trap, "trap", "Trap when incorrect")llvm::cl::OptionEnumValue { "trap", int(WPDCheckMode::Trap), "Trap when incorrect" }, | ||||
| 183 | clEnumValN(WPDCheckMode::Fallback, "fallback",llvm::cl::OptionEnumValue { "fallback", int(WPDCheckMode::Fallback ), "Fallback to indirect when incorrect" } | ||||
| 184 | "Fallback to indirect when incorrect")llvm::cl::OptionEnumValue { "fallback", int(WPDCheckMode::Fallback ), "Fallback to indirect when incorrect" })); | ||||
| 185 | |||||
| 186 | namespace { | ||||
| 187 | struct PatternList { | ||||
| 188 | std::vector<GlobPattern> Patterns; | ||||
| 189 | template <class T> void init(const T &StringList) { | ||||
| 190 | for (const auto &S : StringList) | ||||
| 191 | if (Expected<GlobPattern> Pat = GlobPattern::create(S)) | ||||
| 192 | Patterns.push_back(std::move(*Pat)); | ||||
| 193 | } | ||||
| 194 | bool match(StringRef S) { | ||||
| 195 | for (const GlobPattern &P : Patterns) | ||||
| 196 | if (P.match(S)) | ||||
| 197 | return true; | ||||
| 198 | return false; | ||||
| 199 | } | ||||
| 200 | }; | ||||
| 201 | } // namespace | ||||
| 202 | |||||
| 203 | // Find the minimum offset that we may store a value of size Size bits at. If | ||||
| 204 | // IsAfter is set, look for an offset before the object, otherwise look for an | ||||
| 205 | // offset after the object. | ||||
| 206 | uint64_t | ||||
| 207 | wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets, | ||||
| 208 | bool IsAfter, uint64_t Size) { | ||||
| 209 | // Find a minimum offset taking into account only vtable sizes. | ||||
| 210 | uint64_t MinByte = 0; | ||||
| 211 | for (const VirtualCallTarget &Target : Targets) { | ||||
| 212 | if (IsAfter) | ||||
| 213 | MinByte = std::max(MinByte, Target.minAfterBytes()); | ||||
| 214 | else | ||||
| 215 | MinByte = std::max(MinByte, Target.minBeforeBytes()); | ||||
| 216 | } | ||||
| 217 | |||||
| 218 | // Build a vector of arrays of bytes covering, for each target, a slice of the | ||||
| 219 | // used region (see AccumBitVector::BytesUsed in | ||||
| 220 | // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively, | ||||
| 221 | // this aligns the used regions to start at MinByte. | ||||
| 222 | // | ||||
| 223 | // In this example, A, B and C are vtables, # is a byte already allocated for | ||||
| 224 | // a virtual function pointer, AAAA... (etc.) are the used regions for the | ||||
| 225 | // vtables and Offset(X) is the value computed for the Offset variable below | ||||
| 226 | // for X. | ||||
| 227 | // | ||||
| 228 | // Offset(A) | ||||
| 229 | // | | | ||||
| 230 | // |MinByte | ||||
| 231 | // A: ################AAAAAAAA|AAAAAAAA | ||||
| 232 | // B: ########BBBBBBBBBBBBBBBB|BBBB | ||||
| 233 | // C: ########################|CCCCCCCCCCCCCCCC | ||||
| 234 | // | Offset(B) | | ||||
| 235 | // | ||||
| 236 | // This code produces the slices of A, B and C that appear after the divider | ||||
| 237 | // at MinByte. | ||||
| 238 | std::vector<ArrayRef<uint8_t>> Used; | ||||
| 239 | for (const VirtualCallTarget &Target : Targets) { | ||||
| 240 | ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed | ||||
| 241 | : Target.TM->Bits->Before.BytesUsed; | ||||
| 242 | uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes() | ||||
| 243 | : MinByte - Target.minBeforeBytes(); | ||||
| 244 | |||||
| 245 | // Disregard used regions that are smaller than Offset. These are | ||||
| 246 | // effectively all-free regions that do not need to be checked. | ||||
| 247 | if (VTUsed.size() > Offset) | ||||
| 248 | Used.push_back(VTUsed.slice(Offset)); | ||||
| 249 | } | ||||
| 250 | |||||
| 251 | if (Size == 1) { | ||||
| 252 | // Find a free bit in each member of Used. | ||||
| 253 | for (unsigned I = 0;; ++I) { | ||||
| 254 | uint8_t BitsUsed = 0; | ||||
| 255 | for (auto &&B : Used) | ||||
| 256 | if (I < B.size()) | ||||
| 257 | BitsUsed |= B[I]; | ||||
| 258 | if (BitsUsed != 0xff) | ||||
| 259 | return (MinByte + I) * 8 + llvm::countr_zero(uint8_t(~BitsUsed)); | ||||
| 260 | } | ||||
| 261 | } else { | ||||
| 262 | // Find a free (Size/8) byte region in each member of Used. | ||||
| 263 | // FIXME: see if alignment helps. | ||||
| 264 | for (unsigned I = 0;; ++I) { | ||||
| 265 | for (auto &&B : Used) { | ||||
| 266 | unsigned Byte = 0; | ||||
| 267 | while ((I + Byte) < B.size() && Byte < (Size / 8)) { | ||||
| 268 | if (B[I + Byte]) | ||||
| 269 | goto NextI; | ||||
| 270 | ++Byte; | ||||
| 271 | } | ||||
| 272 | } | ||||
| 273 | return (MinByte + I) * 8; | ||||
| 274 | NextI:; | ||||
| 275 | } | ||||
| 276 | } | ||||
| 277 | } | ||||
| 278 | |||||
| 279 | void wholeprogramdevirt::setBeforeReturnValues( | ||||
| 280 | MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore, | ||||
| 281 | unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { | ||||
| 282 | if (BitWidth == 1) | ||||
| 283 | OffsetByte = -(AllocBefore / 8 + 1); | ||||
| 284 | else | ||||
| 285 | OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8); | ||||
| 286 | OffsetBit = AllocBefore % 8; | ||||
| 287 | |||||
| 288 | for (VirtualCallTarget &Target : Targets) { | ||||
| 289 | if (BitWidth == 1) | ||||
| 290 | Target.setBeforeBit(AllocBefore); | ||||
| 291 | else | ||||
| 292 | Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8); | ||||
| 293 | } | ||||
| 294 | } | ||||
| 295 | |||||
| 296 | void wholeprogramdevirt::setAfterReturnValues( | ||||
| 297 | MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter, | ||||
| 298 | unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { | ||||
| 299 | if (BitWidth == 1) | ||||
| 300 | OffsetByte = AllocAfter / 8; | ||||
| 301 | else | ||||
| 302 | OffsetByte = (AllocAfter + 7) / 8; | ||||
| 303 | OffsetBit = AllocAfter % 8; | ||||
| 304 | |||||
| 305 | for (VirtualCallTarget &Target : Targets) { | ||||
| 306 | if (BitWidth == 1) | ||||
| 307 | Target.setAfterBit(AllocAfter); | ||||
| 308 | else | ||||
| 309 | Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8); | ||||
| 310 | } | ||||
| 311 | } | ||||
| 312 | |||||
| 313 | VirtualCallTarget::VirtualCallTarget(GlobalValue *Fn, const TypeMemberInfo *TM) | ||||
| 314 | : Fn(Fn), TM(TM), | ||||
| 315 | IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()), | ||||
| 316 | WasDevirt(false) {} | ||||
| 317 | |||||
| 318 | namespace { | ||||
| 319 | |||||
| 320 | // A slot in a set of virtual tables. The TypeID identifies the set of virtual | ||||
| 321 | // tables, and the ByteOffset is the offset in bytes from the address point to | ||||
| 322 | // the virtual function pointer. | ||||
| 323 | struct VTableSlot { | ||||
| 324 | Metadata *TypeID; | ||||
| 325 | uint64_t ByteOffset; | ||||
| 326 | }; | ||||
| 327 | |||||
| 328 | } // end anonymous namespace | ||||
| 329 | |||||
| 330 | namespace llvm { | ||||
| 331 | |||||
| 332 | template <> struct DenseMapInfo<VTableSlot> { | ||||
| 333 | static VTableSlot getEmptyKey() { | ||||
| 334 | return {DenseMapInfo<Metadata *>::getEmptyKey(), | ||||
| 335 | DenseMapInfo<uint64_t>::getEmptyKey()}; | ||||
| 336 | } | ||||
| 337 | static VTableSlot getTombstoneKey() { | ||||
| 338 | return {DenseMapInfo<Metadata *>::getTombstoneKey(), | ||||
| 339 | DenseMapInfo<uint64_t>::getTombstoneKey()}; | ||||
| 340 | } | ||||
| 341 | static unsigned getHashValue(const VTableSlot &I) { | ||||
| 342 | return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^ | ||||
| 343 | DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset); | ||||
| 344 | } | ||||
| 345 | static bool isEqual(const VTableSlot &LHS, | ||||
| 346 | const VTableSlot &RHS) { | ||||
| 347 | return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset; | ||||
| 348 | } | ||||
| 349 | }; | ||||
| 350 | |||||
| 351 | template <> struct DenseMapInfo<VTableSlotSummary> { | ||||
| 352 | static VTableSlotSummary getEmptyKey() { | ||||
| 353 | return {DenseMapInfo<StringRef>::getEmptyKey(), | ||||
| 354 | DenseMapInfo<uint64_t>::getEmptyKey()}; | ||||
| 355 | } | ||||
| 356 | static VTableSlotSummary getTombstoneKey() { | ||||
| 357 | return {DenseMapInfo<StringRef>::getTombstoneKey(), | ||||
| 358 | DenseMapInfo<uint64_t>::getTombstoneKey()}; | ||||
| 359 | } | ||||
| 360 | static unsigned getHashValue(const VTableSlotSummary &I) { | ||||
| 361 | return DenseMapInfo<StringRef>::getHashValue(I.TypeID) ^ | ||||
| 362 | DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset); | ||||
| 363 | } | ||||
| 364 | static bool isEqual(const VTableSlotSummary &LHS, | ||||
| 365 | const VTableSlotSummary &RHS) { | ||||
| 366 | return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset; | ||||
| 367 | } | ||||
| 368 | }; | ||||
| 369 | |||||
| 370 | } // end namespace llvm | ||||
| 371 | |||||
| 372 | namespace { | ||||
| 373 | |||||
| 374 | // Returns true if the function must be unreachable based on ValueInfo. | ||||
| 375 | // | ||||
| 376 | // In particular, identifies a function as unreachable in the following | ||||
| 377 | // conditions | ||||
| 378 | // 1) All summaries are live. | ||||
| 379 | // 2) All function summaries indicate it's unreachable | ||||
| 380 | // 3) There is no non-function with the same GUID (which is rare) | ||||
| 381 | bool mustBeUnreachableFunction(ValueInfo TheFnVI) { | ||||
| 382 | if ((!TheFnVI) || TheFnVI.getSummaryList().empty()) { | ||||
| 383 | // Returns false if ValueInfo is absent, or the summary list is empty | ||||
| 384 | // (e.g., function declarations). | ||||
| 385 | return false; | ||||
| 386 | } | ||||
| 387 | |||||
| 388 | for (const auto &Summary : TheFnVI.getSummaryList()) { | ||||
| 389 | // Conservatively returns false if any non-live functions are seen. | ||||
| 390 | // In general either all summaries should be live or all should be dead. | ||||
| 391 | if (!Summary->isLive()) | ||||
| 392 | return false; | ||||
| 393 | if (auto *FS = dyn_cast<FunctionSummary>(Summary->getBaseObject())) { | ||||
| 394 | if (!FS->fflags().MustBeUnreachable) | ||||
| 395 | return false; | ||||
| 396 | } | ||||
| 397 | // Be conservative if a non-function has the same GUID (which is rare). | ||||
| 398 | else | ||||
| 399 | return false; | ||||
| 400 | } | ||||
| 401 | // All function summaries are live and all of them agree that the function is | ||||
| 402 | // unreachble. | ||||
| 403 | return true; | ||||
| 404 | } | ||||
| 405 | |||||
| 406 | // A virtual call site. VTable is the loaded virtual table pointer, and CS is | ||||
| 407 | // the indirect virtual call. | ||||
| 408 | struct VirtualCallSite { | ||||
| 409 | Value *VTable = nullptr; | ||||
| 410 | CallBase &CB; | ||||
| 411 | |||||
| 412 | // If non-null, this field points to the associated unsafe use count stored in | ||||
| 413 | // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description | ||||
| 414 | // of that field for details. | ||||
| 415 | unsigned *NumUnsafeUses = nullptr; | ||||
| 416 | |||||
| 417 | void | ||||
| 418 | emitRemark(const StringRef OptName, const StringRef TargetName, | ||||
| 419 | function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter) { | ||||
| 420 | Function *F = CB.getCaller(); | ||||
| 421 | DebugLoc DLoc = CB.getDebugLoc(); | ||||
| 422 | BasicBlock *Block = CB.getParent(); | ||||
| 423 | |||||
| 424 | using namespace ore; | ||||
| 425 | OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE"wholeprogramdevirt", OptName, DLoc, Block) | ||||
| 426 | << NV("Optimization", OptName) | ||||
| 427 | << ": devirtualized a call to " | ||||
| 428 | << NV("FunctionName", TargetName)); | ||||
| 429 | } | ||||
| 430 | |||||
| 431 | void replaceAndErase( | ||||
| 432 | const StringRef OptName, const StringRef TargetName, bool RemarksEnabled, | ||||
| 433 | function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter, | ||||
| 434 | Value *New) { | ||||
| 435 | if (RemarksEnabled) | ||||
| 436 | emitRemark(OptName, TargetName, OREGetter); | ||||
| 437 | CB.replaceAllUsesWith(New); | ||||
| 438 | if (auto *II = dyn_cast<InvokeInst>(&CB)) { | ||||
| 439 | BranchInst::Create(II->getNormalDest(), &CB); | ||||
| 440 | II->getUnwindDest()->removePredecessor(II->getParent()); | ||||
| 441 | } | ||||
| 442 | CB.eraseFromParent(); | ||||
| 443 | // This use is no longer unsafe. | ||||
| 444 | if (NumUnsafeUses) | ||||
| 445 | --*NumUnsafeUses; | ||||
| 446 | } | ||||
| 447 | }; | ||||
| 448 | |||||
| 449 | // Call site information collected for a specific VTableSlot and possibly a list | ||||
| 450 | // of constant integer arguments. The grouping by arguments is handled by the | ||||
| 451 | // VTableSlotInfo class. | ||||
| 452 | struct CallSiteInfo { | ||||
| 453 | /// The set of call sites for this slot. Used during regular LTO and the | ||||
| 454 | /// import phase of ThinLTO (as well as the export phase of ThinLTO for any | ||||
| 455 | /// call sites that appear in the merged module itself); in each of these | ||||
| 456 | /// cases we are directly operating on the call sites at the IR level. | ||||
| 457 | std::vector<VirtualCallSite> CallSites; | ||||
| 458 | |||||
| 459 | /// Whether all call sites represented by this CallSiteInfo, including those | ||||
| 460 | /// in summaries, have been devirtualized. This starts off as true because a | ||||
| 461 | /// default constructed CallSiteInfo represents no call sites. | ||||
| 462 | bool AllCallSitesDevirted = true; | ||||
| 463 | |||||
| 464 | // These fields are used during the export phase of ThinLTO and reflect | ||||
| 465 | // information collected from function summaries. | ||||
| 466 | |||||
| 467 | /// Whether any function summary contains an llvm.assume(llvm.type.test) for | ||||
| 468 | /// this slot. | ||||
| 469 | bool SummaryHasTypeTestAssumeUsers = false; | ||||
| 470 | |||||
| 471 | /// CFI-specific: a vector containing the list of function summaries that use | ||||
| 472 | /// the llvm.type.checked.load intrinsic and therefore will require | ||||
| 473 | /// resolutions for llvm.type.test in order to implement CFI checks if | ||||
| 474 | /// devirtualization was unsuccessful. If devirtualization was successful, the | ||||
| 475 | /// pass will clear this vector by calling markDevirt(). If at the end of the | ||||
| 476 | /// pass the vector is non-empty, we will need to add a use of llvm.type.test | ||||
| 477 | /// to each of the function summaries in the vector. | ||||
| 478 | std::vector<FunctionSummary *> SummaryTypeCheckedLoadUsers; | ||||
| 479 | std::vector<FunctionSummary *> SummaryTypeTestAssumeUsers; | ||||
| 480 | |||||
| 481 | bool isExported() const { | ||||
| 482 | return SummaryHasTypeTestAssumeUsers || | ||||
| 483 | !SummaryTypeCheckedLoadUsers.empty(); | ||||
| 484 | } | ||||
| 485 | |||||
| 486 | void addSummaryTypeCheckedLoadUser(FunctionSummary *FS) { | ||||
| 487 | SummaryTypeCheckedLoadUsers.push_back(FS); | ||||
| 488 | AllCallSitesDevirted = false; | ||||
| 489 | } | ||||
| 490 | |||||
| 491 | void addSummaryTypeTestAssumeUser(FunctionSummary *FS) { | ||||
| 492 | SummaryTypeTestAssumeUsers.push_back(FS); | ||||
| 493 | SummaryHasTypeTestAssumeUsers = true; | ||||
| 494 | AllCallSitesDevirted = false; | ||||
| 495 | } | ||||
| 496 | |||||
| 497 | void markDevirt() { | ||||
| 498 | AllCallSitesDevirted = true; | ||||
| 499 | |||||
| 500 | // As explained in the comment for SummaryTypeCheckedLoadUsers. | ||||
| 501 | SummaryTypeCheckedLoadUsers.clear(); | ||||
| 502 | } | ||||
| 503 | }; | ||||
| 504 | |||||
| 505 | // Call site information collected for a specific VTableSlot. | ||||
| 506 | struct VTableSlotInfo { | ||||
| 507 | // The set of call sites which do not have all constant integer arguments | ||||
| 508 | // (excluding "this"). | ||||
| 509 | CallSiteInfo CSInfo; | ||||
| 510 | |||||
| 511 | // The set of call sites with all constant integer arguments (excluding | ||||
| 512 | // "this"), grouped by argument list. | ||||
| 513 | std::map<std::vector<uint64_t>, CallSiteInfo> ConstCSInfo; | ||||
| 514 | |||||
| 515 | void addCallSite(Value *VTable, CallBase &CB, unsigned *NumUnsafeUses); | ||||
| 516 | |||||
| 517 | private: | ||||
| 518 | CallSiteInfo &findCallSiteInfo(CallBase &CB); | ||||
| 519 | }; | ||||
| 520 | |||||
| 521 | CallSiteInfo &VTableSlotInfo::findCallSiteInfo(CallBase &CB) { | ||||
| 522 | std::vector<uint64_t> Args; | ||||
| 523 | auto *CBType = dyn_cast<IntegerType>(CB.getType()); | ||||
| 524 | if (!CBType || CBType->getBitWidth() > 64 || CB.arg_empty()) | ||||
| 525 | return CSInfo; | ||||
| 526 | for (auto &&Arg : drop_begin(CB.args())) { | ||||
| 527 | auto *CI = dyn_cast<ConstantInt>(Arg); | ||||
| 528 | if (!CI || CI->getBitWidth() > 64) | ||||
| 529 | return CSInfo; | ||||
| 530 | Args.push_back(CI->getZExtValue()); | ||||
| 531 | } | ||||
| 532 | return ConstCSInfo[Args]; | ||||
| 533 | } | ||||
| 534 | |||||
| 535 | void VTableSlotInfo::addCallSite(Value *VTable, CallBase &CB, | ||||
| 536 | unsigned *NumUnsafeUses) { | ||||
| 537 | auto &CSI = findCallSiteInfo(CB); | ||||
| 538 | CSI.AllCallSitesDevirted = false; | ||||
| 539 | CSI.CallSites.push_back({VTable, CB, NumUnsafeUses}); | ||||
| 540 | } | ||||
| 541 | |||||
| 542 | struct DevirtModule { | ||||
| 543 | Module &M; | ||||
| 544 | function_ref<AAResults &(Function &)> AARGetter; | ||||
| 545 | function_ref<DominatorTree &(Function &)> LookupDomTree; | ||||
| 546 | |||||
| 547 | ModuleSummaryIndex *ExportSummary; | ||||
| 548 | const ModuleSummaryIndex *ImportSummary; | ||||
| 549 | |||||
| 550 | IntegerType *Int8Ty; | ||||
| 551 | PointerType *Int8PtrTy; | ||||
| 552 | IntegerType *Int32Ty; | ||||
| 553 | IntegerType *Int64Ty; | ||||
| 554 | IntegerType *IntPtrTy; | ||||
| 555 | /// Sizeless array type, used for imported vtables. This provides a signal | ||||
| 556 | /// to analyzers that these imports may alias, as they do for example | ||||
| 557 | /// when multiple unique return values occur in the same vtable. | ||||
| 558 | ArrayType *Int8Arr0Ty; | ||||
| 559 | |||||
| 560 | bool RemarksEnabled; | ||||
| 561 | function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter; | ||||
| 562 | |||||
| 563 | MapVector<VTableSlot, VTableSlotInfo> CallSlots; | ||||
| 564 | |||||
| 565 | // Calls that have already been optimized. We may add a call to multiple | ||||
| 566 | // VTableSlotInfos if vtable loads are coalesced and need to make sure not to | ||||
| 567 | // optimize a call more than once. | ||||
| 568 | SmallPtrSet<CallBase *, 8> OptimizedCalls; | ||||
| 569 | |||||
| 570 | // This map keeps track of the number of "unsafe" uses of a loaded function | ||||
| 571 | // pointer. The key is the associated llvm.type.test intrinsic call generated | ||||
| 572 | // by this pass. An unsafe use is one that calls the loaded function pointer | ||||
| 573 | // directly. Every time we eliminate an unsafe use (for example, by | ||||
| 574 | // devirtualizing it or by applying virtual constant propagation), we | ||||
| 575 | // decrement the value stored in this map. If a value reaches zero, we can | ||||
| 576 | // eliminate the type check by RAUWing the associated llvm.type.test call with | ||||
| 577 | // true. | ||||
| 578 | std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest; | ||||
| 579 | PatternList FunctionsToSkip; | ||||
| 580 | |||||
| 581 | DevirtModule(Module &M, function_ref<AAResults &(Function &)> AARGetter, | ||||
| 582 | function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter, | ||||
| 583 | function_ref<DominatorTree &(Function &)> LookupDomTree, | ||||
| 584 | ModuleSummaryIndex *ExportSummary, | ||||
| 585 | const ModuleSummaryIndex *ImportSummary) | ||||
| 586 | : M(M), AARGetter(AARGetter), LookupDomTree(LookupDomTree), | ||||
| 587 | ExportSummary(ExportSummary), ImportSummary(ImportSummary), | ||||
| 588 | Int8Ty(Type::getInt8Ty(M.getContext())), | ||||
| 589 | Int8PtrTy(Type::getInt8PtrTy(M.getContext())), | ||||
| 590 | Int32Ty(Type::getInt32Ty(M.getContext())), | ||||
| 591 | Int64Ty(Type::getInt64Ty(M.getContext())), | ||||
| 592 | IntPtrTy(M.getDataLayout().getIntPtrType(M.getContext(), 0)), | ||||
| 593 | Int8Arr0Ty(ArrayType::get(Type::getInt8Ty(M.getContext()), 0)), | ||||
| 594 | RemarksEnabled(areRemarksEnabled()), OREGetter(OREGetter) { | ||||
| 595 | assert(!(ExportSummary && ImportSummary))(static_cast <bool> (!(ExportSummary && ImportSummary )) ? void (0) : __assert_fail ("!(ExportSummary && ImportSummary)" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 595, __extension__ __PRETTY_FUNCTION__)); | ||||
| 596 | FunctionsToSkip.init(SkipFunctionNames); | ||||
| 597 | } | ||||
| 598 | |||||
| 599 | bool areRemarksEnabled(); | ||||
| 600 | |||||
| 601 | void | ||||
| 602 | scanTypeTestUsers(Function *TypeTestFunc, | ||||
| 603 | DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap); | ||||
| 604 | void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc); | ||||
| 605 | |||||
| 606 | void buildTypeIdentifierMap( | ||||
| 607 | std::vector<VTableBits> &Bits, | ||||
| 608 | DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap); | ||||
| 609 | |||||
| 610 | bool | ||||
| 611 | tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot, | ||||
| 612 | const std::set<TypeMemberInfo> &TypeMemberInfos, | ||||
| 613 | uint64_t ByteOffset, | ||||
| 614 | ModuleSummaryIndex *ExportSummary); | ||||
| 615 | |||||
| 616 | void applySingleImplDevirt(VTableSlotInfo &SlotInfo, Constant *TheFn, | ||||
| 617 | bool &IsExported); | ||||
| 618 | bool trySingleImplDevirt(ModuleSummaryIndex *ExportSummary, | ||||
| 619 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 620 | VTableSlotInfo &SlotInfo, | ||||
| 621 | WholeProgramDevirtResolution *Res); | ||||
| 622 | |||||
| 623 | void applyICallBranchFunnel(VTableSlotInfo &SlotInfo, Constant *JT, | ||||
| 624 | bool &IsExported); | ||||
| 625 | void tryICallBranchFunnel(MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 626 | VTableSlotInfo &SlotInfo, | ||||
| 627 | WholeProgramDevirtResolution *Res, VTableSlot Slot); | ||||
| 628 | |||||
| 629 | bool tryEvaluateFunctionsWithArgs( | ||||
| 630 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 631 | ArrayRef<uint64_t> Args); | ||||
| 632 | |||||
| 633 | void applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, | ||||
| 634 | uint64_t TheRetVal); | ||||
| 635 | bool tryUniformRetValOpt(MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 636 | CallSiteInfo &CSInfo, | ||||
| 637 | WholeProgramDevirtResolution::ByArg *Res); | ||||
| 638 | |||||
| 639 | // Returns the global symbol name that is used to export information about the | ||||
| 640 | // given vtable slot and list of arguments. | ||||
| 641 | std::string getGlobalName(VTableSlot Slot, ArrayRef<uint64_t> Args, | ||||
| 642 | StringRef Name); | ||||
| 643 | |||||
| 644 | bool shouldExportConstantsAsAbsoluteSymbols(); | ||||
| 645 | |||||
| 646 | // This function is called during the export phase to create a symbol | ||||
| 647 | // definition containing information about the given vtable slot and list of | ||||
| 648 | // arguments. | ||||
| 649 | void exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name, | ||||
| 650 | Constant *C); | ||||
| 651 | void exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name, | ||||
| 652 | uint32_t Const, uint32_t &Storage); | ||||
| 653 | |||||
| 654 | // This function is called during the import phase to create a reference to | ||||
| 655 | // the symbol definition created during the export phase. | ||||
| 656 | Constant *importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, | ||||
| 657 | StringRef Name); | ||||
| 658 | Constant *importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, | ||||
| 659 | StringRef Name, IntegerType *IntTy, | ||||
| 660 | uint32_t Storage); | ||||
| 661 | |||||
| 662 | Constant *getMemberAddr(const TypeMemberInfo *M); | ||||
| 663 | |||||
| 664 | void applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, bool IsOne, | ||||
| 665 | Constant *UniqueMemberAddr); | ||||
| 666 | bool tryUniqueRetValOpt(unsigned BitWidth, | ||||
| 667 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 668 | CallSiteInfo &CSInfo, | ||||
| 669 | WholeProgramDevirtResolution::ByArg *Res, | ||||
| 670 | VTableSlot Slot, ArrayRef<uint64_t> Args); | ||||
| 671 | |||||
| 672 | void applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName, | ||||
| 673 | Constant *Byte, Constant *Bit); | ||||
| 674 | bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 675 | VTableSlotInfo &SlotInfo, | ||||
| 676 | WholeProgramDevirtResolution *Res, VTableSlot Slot); | ||||
| 677 | |||||
| 678 | void rebuildGlobal(VTableBits &B); | ||||
| 679 | |||||
| 680 | // Apply the summary resolution for Slot to all virtual calls in SlotInfo. | ||||
| 681 | void importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo); | ||||
| 682 | |||||
| 683 | // If we were able to eliminate all unsafe uses for a type checked load, | ||||
| 684 | // eliminate the associated type tests by replacing them with true. | ||||
| 685 | void removeRedundantTypeTests(); | ||||
| 686 | |||||
| 687 | bool run(); | ||||
| 688 | |||||
| 689 | // Look up the corresponding ValueInfo entry of `TheFn` in `ExportSummary`. | ||||
| 690 | // | ||||
| 691 | // Caller guarantees that `ExportSummary` is not nullptr. | ||||
| 692 | static ValueInfo lookUpFunctionValueInfo(Function *TheFn, | ||||
| 693 | ModuleSummaryIndex *ExportSummary); | ||||
| 694 | |||||
| 695 | // Returns true if the function definition must be unreachable. | ||||
| 696 | // | ||||
| 697 | // Note if this helper function returns true, `F` is guaranteed | ||||
| 698 | // to be unreachable; if it returns false, `F` might still | ||||
| 699 | // be unreachable but not covered by this helper function. | ||||
| 700 | // | ||||
| 701 | // Implementation-wise, if function definition is present, IR is analyzed; if | ||||
| 702 | // not, look up function flags from ExportSummary as a fallback. | ||||
| 703 | static bool mustBeUnreachableFunction(Function *const F, | ||||
| 704 | ModuleSummaryIndex *ExportSummary); | ||||
| 705 | |||||
| 706 | // Lower the module using the action and summary passed as command line | ||||
| 707 | // arguments. For testing purposes only. | ||||
| 708 | static bool | ||||
| 709 | runForTesting(Module &M, function_ref<AAResults &(Function &)> AARGetter, | ||||
| 710 | function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter, | ||||
| 711 | function_ref<DominatorTree &(Function &)> LookupDomTree); | ||||
| 712 | }; | ||||
| 713 | |||||
| 714 | struct DevirtIndex { | ||||
| 715 | ModuleSummaryIndex &ExportSummary; | ||||
| 716 | // The set in which to record GUIDs exported from their module by | ||||
| 717 | // devirtualization, used by client to ensure they are not internalized. | ||||
| 718 | std::set<GlobalValue::GUID> &ExportedGUIDs; | ||||
| 719 | // A map in which to record the information necessary to locate the WPD | ||||
| 720 | // resolution for local targets in case they are exported by cross module | ||||
| 721 | // importing. | ||||
| 722 | std::map<ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap; | ||||
| 723 | |||||
| 724 | MapVector<VTableSlotSummary, VTableSlotInfo> CallSlots; | ||||
| 725 | |||||
| 726 | PatternList FunctionsToSkip; | ||||
| 727 | |||||
| 728 | DevirtIndex( | ||||
| 729 | ModuleSummaryIndex &ExportSummary, | ||||
| 730 | std::set<GlobalValue::GUID> &ExportedGUIDs, | ||||
| 731 | std::map<ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap) | ||||
| 732 | : ExportSummary(ExportSummary), ExportedGUIDs(ExportedGUIDs), | ||||
| 733 | LocalWPDTargetsMap(LocalWPDTargetsMap) { | ||||
| 734 | FunctionsToSkip.init(SkipFunctionNames); | ||||
| 735 | } | ||||
| 736 | |||||
| 737 | bool tryFindVirtualCallTargets(std::vector<ValueInfo> &TargetsForSlot, | ||||
| 738 | const TypeIdCompatibleVtableInfo TIdInfo, | ||||
| 739 | uint64_t ByteOffset); | ||||
| 740 | |||||
| 741 | bool trySingleImplDevirt(MutableArrayRef<ValueInfo> TargetsForSlot, | ||||
| 742 | VTableSlotSummary &SlotSummary, | ||||
| 743 | VTableSlotInfo &SlotInfo, | ||||
| 744 | WholeProgramDevirtResolution *Res, | ||||
| 745 | std::set<ValueInfo> &DevirtTargets); | ||||
| 746 | |||||
| 747 | void run(); | ||||
| 748 | }; | ||||
| 749 | } // end anonymous namespace | ||||
| 750 | |||||
| 751 | PreservedAnalyses WholeProgramDevirtPass::run(Module &M, | ||||
| 752 | ModuleAnalysisManager &AM) { | ||||
| 753 | auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); | ||||
| 754 | auto AARGetter = [&](Function &F) -> AAResults & { | ||||
| 755 | return FAM.getResult<AAManager>(F); | ||||
| 756 | }; | ||||
| 757 | auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & { | ||||
| 758 | return FAM.getResult<OptimizationRemarkEmitterAnalysis>(*F); | ||||
| 759 | }; | ||||
| 760 | auto LookupDomTree = [&FAM](Function &F) -> DominatorTree & { | ||||
| 761 | return FAM.getResult<DominatorTreeAnalysis>(F); | ||||
| 762 | }; | ||||
| 763 | if (UseCommandLine) { | ||||
| 764 | if (!DevirtModule::runForTesting(M, AARGetter, OREGetter, LookupDomTree)) | ||||
| 765 | return PreservedAnalyses::all(); | ||||
| 766 | return PreservedAnalyses::none(); | ||||
| 767 | } | ||||
| 768 | if (!DevirtModule(M, AARGetter, OREGetter, LookupDomTree, ExportSummary, | ||||
| 769 | ImportSummary) | ||||
| 770 | .run()) | ||||
| 771 | return PreservedAnalyses::all(); | ||||
| 772 | return PreservedAnalyses::none(); | ||||
| 773 | } | ||||
| 774 | |||||
| 775 | namespace llvm { | ||||
| 776 | // Enable whole program visibility if enabled by client (e.g. linker) or | ||||
| 777 | // internal option, and not force disabled. | ||||
| 778 | bool hasWholeProgramVisibility(bool WholeProgramVisibilityEnabledInLTO) { | ||||
| 779 | return (WholeProgramVisibilityEnabledInLTO || WholeProgramVisibility) && | ||||
| 780 | !DisableWholeProgramVisibility; | ||||
| 781 | } | ||||
| 782 | |||||
| 783 | /// If whole program visibility asserted, then upgrade all public vcall | ||||
| 784 | /// visibility metadata on vtable definitions to linkage unit visibility in | ||||
| 785 | /// Module IR (for regular or hybrid LTO). | ||||
| 786 | void updateVCallVisibilityInModule( | ||||
| 787 | Module &M, bool WholeProgramVisibilityEnabledInLTO, | ||||
| 788 | const DenseSet<GlobalValue::GUID> &DynamicExportSymbols) { | ||||
| 789 | if (!hasWholeProgramVisibility(WholeProgramVisibilityEnabledInLTO)) | ||||
| 790 | return; | ||||
| 791 | for (GlobalVariable &GV : M.globals()) { | ||||
| 792 | // Add linkage unit visibility to any variable with type metadata, which are | ||||
| 793 | // the vtable definitions. We won't have an existing vcall_visibility | ||||
| 794 | // metadata on vtable definitions with public visibility. | ||||
| 795 | if (GV.hasMetadata(LLVMContext::MD_type) && | ||||
| 796 | GV.getVCallVisibility() == GlobalObject::VCallVisibilityPublic && | ||||
| 797 | // Don't upgrade the visibility for symbols exported to the dynamic | ||||
| 798 | // linker, as we have no information on their eventual use. | ||||
| 799 | !DynamicExportSymbols.count(GV.getGUID())) | ||||
| 800 | GV.setVCallVisibilityMetadata(GlobalObject::VCallVisibilityLinkageUnit); | ||||
| 801 | } | ||||
| 802 | } | ||||
| 803 | |||||
| 804 | void updatePublicTypeTestCalls(Module &M, | ||||
| 805 | bool WholeProgramVisibilityEnabledInLTO) { | ||||
| 806 | Function *PublicTypeTestFunc = | ||||
| 807 | M.getFunction(Intrinsic::getName(Intrinsic::public_type_test)); | ||||
| 808 | if (!PublicTypeTestFunc) | ||||
| 809 | return; | ||||
| 810 | if (hasWholeProgramVisibility(WholeProgramVisibilityEnabledInLTO)) { | ||||
| 811 | Function *TypeTestFunc = | ||||
| 812 | Intrinsic::getDeclaration(&M, Intrinsic::type_test); | ||||
| 813 | for (Use &U : make_early_inc_range(PublicTypeTestFunc->uses())) { | ||||
| 814 | auto *CI = cast<CallInst>(U.getUser()); | ||||
| 815 | auto *NewCI = CallInst::Create( | ||||
| 816 | TypeTestFunc, {CI->getArgOperand(0), CI->getArgOperand(1)}, | ||||
| 817 | std::nullopt, "", CI); | ||||
| 818 | CI->replaceAllUsesWith(NewCI); | ||||
| 819 | CI->eraseFromParent(); | ||||
| 820 | } | ||||
| 821 | } else { | ||||
| 822 | auto *True = ConstantInt::getTrue(M.getContext()); | ||||
| 823 | for (Use &U : make_early_inc_range(PublicTypeTestFunc->uses())) { | ||||
| 824 | auto *CI = cast<CallInst>(U.getUser()); | ||||
| 825 | CI->replaceAllUsesWith(True); | ||||
| 826 | CI->eraseFromParent(); | ||||
| 827 | } | ||||
| 828 | } | ||||
| 829 | } | ||||
| 830 | |||||
| 831 | /// If whole program visibility asserted, then upgrade all public vcall | ||||
| 832 | /// visibility metadata on vtable definition summaries to linkage unit | ||||
| 833 | /// visibility in Module summary index (for ThinLTO). | ||||
| 834 | void updateVCallVisibilityInIndex( | ||||
| 835 | ModuleSummaryIndex &Index, bool WholeProgramVisibilityEnabledInLTO, | ||||
| 836 | const DenseSet<GlobalValue::GUID> &DynamicExportSymbols) { | ||||
| 837 | if (!hasWholeProgramVisibility(WholeProgramVisibilityEnabledInLTO)) | ||||
| 838 | return; | ||||
| 839 | for (auto &P : Index) { | ||||
| 840 | // Don't upgrade the visibility for symbols exported to the dynamic | ||||
| 841 | // linker, as we have no information on their eventual use. | ||||
| 842 | if (DynamicExportSymbols.count(P.first)) | ||||
| 843 | continue; | ||||
| 844 | for (auto &S : P.second.SummaryList) { | ||||
| 845 | auto *GVar = dyn_cast<GlobalVarSummary>(S.get()); | ||||
| 846 | if (!GVar || | ||||
| 847 | GVar->getVCallVisibility() != GlobalObject::VCallVisibilityPublic) | ||||
| 848 | continue; | ||||
| 849 | GVar->setVCallVisibility(GlobalObject::VCallVisibilityLinkageUnit); | ||||
| 850 | } | ||||
| 851 | } | ||||
| 852 | } | ||||
| 853 | |||||
| 854 | void runWholeProgramDevirtOnIndex( | ||||
| 855 | ModuleSummaryIndex &Summary, std::set<GlobalValue::GUID> &ExportedGUIDs, | ||||
| 856 | std::map<ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap) { | ||||
| 857 | DevirtIndex(Summary, ExportedGUIDs, LocalWPDTargetsMap).run(); | ||||
| 858 | } | ||||
| 859 | |||||
| 860 | void updateIndexWPDForExports( | ||||
| 861 | ModuleSummaryIndex &Summary, | ||||
| 862 | function_ref<bool(StringRef, ValueInfo)> isExported, | ||||
| 863 | std::map<ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap) { | ||||
| 864 | for (auto &T : LocalWPDTargetsMap) { | ||||
| 865 | auto &VI = T.first; | ||||
| 866 | // This was enforced earlier during trySingleImplDevirt. | ||||
| 867 | assert(VI.getSummaryList().size() == 1 &&(static_cast <bool> (VI.getSummaryList().size() == 1 && "Devirt of local target has more than one copy") ? void (0) : __assert_fail ("VI.getSummaryList().size() == 1 && \"Devirt of local target has more than one copy\"" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 868, __extension__ __PRETTY_FUNCTION__)) | ||||
| 868 | "Devirt of local target has more than one copy")(static_cast <bool> (VI.getSummaryList().size() == 1 && "Devirt of local target has more than one copy") ? void (0) : __assert_fail ("VI.getSummaryList().size() == 1 && \"Devirt of local target has more than one copy\"" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 868, __extension__ __PRETTY_FUNCTION__)); | ||||
| 869 | auto &S = VI.getSummaryList()[0]; | ||||
| 870 | if (!isExported(S->modulePath(), VI)) | ||||
| 871 | continue; | ||||
| 872 | |||||
| 873 | // It's been exported by a cross module import. | ||||
| 874 | for (auto &SlotSummary : T.second) { | ||||
| 875 | auto *TIdSum = Summary.getTypeIdSummary(SlotSummary.TypeID); | ||||
| 876 | assert(TIdSum)(static_cast <bool> (TIdSum) ? void (0) : __assert_fail ("TIdSum", "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 876, __extension__ __PRETTY_FUNCTION__)); | ||||
| 877 | auto WPDRes = TIdSum->WPDRes.find(SlotSummary.ByteOffset); | ||||
| 878 | assert(WPDRes != TIdSum->WPDRes.end())(static_cast <bool> (WPDRes != TIdSum->WPDRes.end()) ? void (0) : __assert_fail ("WPDRes != TIdSum->WPDRes.end()" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 878, __extension__ __PRETTY_FUNCTION__)); | ||||
| 879 | WPDRes->second.SingleImplName = ModuleSummaryIndex::getGlobalNameForLocal( | ||||
| 880 | WPDRes->second.SingleImplName, | ||||
| 881 | Summary.getModuleHash(S->modulePath())); | ||||
| 882 | } | ||||
| 883 | } | ||||
| 884 | } | ||||
| 885 | |||||
| 886 | } // end namespace llvm | ||||
| 887 | |||||
| 888 | static Error checkCombinedSummaryForTesting(ModuleSummaryIndex *Summary) { | ||||
| 889 | // Check that summary index contains regular LTO module when performing | ||||
| 890 | // export to prevent occasional use of index from pure ThinLTO compilation | ||||
| 891 | // (-fno-split-lto-module). This kind of summary index is passed to | ||||
| 892 | // DevirtIndex::run, not to DevirtModule::run used by opt/runForTesting. | ||||
| 893 | const auto &ModPaths = Summary->modulePaths(); | ||||
| 894 | if (ClSummaryAction != PassSummaryAction::Import && | ||||
| 895 | !ModPaths.contains(ModuleSummaryIndex::getRegularLTOModuleName())) | ||||
| 896 | return createStringError( | ||||
| 897 | errc::invalid_argument, | ||||
| 898 | "combined summary should contain Regular LTO module"); | ||||
| 899 | return ErrorSuccess(); | ||||
| 900 | } | ||||
| 901 | |||||
| 902 | bool DevirtModule::runForTesting( | ||||
| 903 | Module &M, function_ref<AAResults &(Function &)> AARGetter, | ||||
| 904 | function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter, | ||||
| 905 | function_ref<DominatorTree &(Function &)> LookupDomTree) { | ||||
| 906 | std::unique_ptr<ModuleSummaryIndex> Summary = | ||||
| 907 | std::make_unique<ModuleSummaryIndex>(/*HaveGVs=*/false); | ||||
| 908 | |||||
| 909 | // Handle the command-line summary arguments. This code is for testing | ||||
| 910 | // purposes only, so we handle errors directly. | ||||
| 911 | if (!ClReadSummary.empty()) { | ||||
| 912 | ExitOnError ExitOnErr("-wholeprogramdevirt-read-summary: " + ClReadSummary + | ||||
| 913 | ": "); | ||||
| 914 | auto ReadSummaryFile = | ||||
| 915 | ExitOnErr(errorOrToExpected(MemoryBuffer::getFile(ClReadSummary))); | ||||
| 916 | if (Expected<std::unique_ptr<ModuleSummaryIndex>> SummaryOrErr = | ||||
| 917 | getModuleSummaryIndex(*ReadSummaryFile)) { | ||||
| 918 | Summary = std::move(*SummaryOrErr); | ||||
| 919 | ExitOnErr(checkCombinedSummaryForTesting(Summary.get())); | ||||
| 920 | } else { | ||||
| 921 | // Try YAML if we've failed with bitcode. | ||||
| 922 | consumeError(SummaryOrErr.takeError()); | ||||
| 923 | yaml::Input In(ReadSummaryFile->getBuffer()); | ||||
| 924 | In >> *Summary; | ||||
| 925 | ExitOnErr(errorCodeToError(In.error())); | ||||
| 926 | } | ||||
| 927 | } | ||||
| 928 | |||||
| 929 | bool Changed = | ||||
| 930 | DevirtModule(M, AARGetter, OREGetter, LookupDomTree, | ||||
| 931 | ClSummaryAction == PassSummaryAction::Export ? Summary.get() | ||||
| 932 | : nullptr, | ||||
| 933 | ClSummaryAction == PassSummaryAction::Import ? Summary.get() | ||||
| 934 | : nullptr) | ||||
| 935 | .run(); | ||||
| 936 | |||||
| 937 | if (!ClWriteSummary.empty()) { | ||||
| 938 | ExitOnError ExitOnErr( | ||||
| 939 | "-wholeprogramdevirt-write-summary: " + ClWriteSummary + ": "); | ||||
| 940 | std::error_code EC; | ||||
| 941 | if (StringRef(ClWriteSummary).endswith(".bc")) { | ||||
| 942 | raw_fd_ostream OS(ClWriteSummary, EC, sys::fs::OF_None); | ||||
| 943 | ExitOnErr(errorCodeToError(EC)); | ||||
| 944 | writeIndexToFile(*Summary, OS); | ||||
| 945 | } else { | ||||
| 946 | raw_fd_ostream OS(ClWriteSummary, EC, sys::fs::OF_TextWithCRLF); | ||||
| 947 | ExitOnErr(errorCodeToError(EC)); | ||||
| 948 | yaml::Output Out(OS); | ||||
| 949 | Out << *Summary; | ||||
| 950 | } | ||||
| 951 | } | ||||
| 952 | |||||
| 953 | return Changed; | ||||
| 954 | } | ||||
| 955 | |||||
| 956 | void DevirtModule::buildTypeIdentifierMap( | ||||
| 957 | std::vector<VTableBits> &Bits, | ||||
| 958 | DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) { | ||||
| 959 | DenseMap<GlobalVariable *, VTableBits *> GVToBits; | ||||
| 960 | Bits.reserve(M.global_size()); | ||||
| 961 | SmallVector<MDNode *, 2> Types; | ||||
| 962 | for (GlobalVariable &GV : M.globals()) { | ||||
| 963 | Types.clear(); | ||||
| 964 | GV.getMetadata(LLVMContext::MD_type, Types); | ||||
| 965 | if (GV.isDeclaration() || Types.empty()) | ||||
| 966 | continue; | ||||
| 967 | |||||
| 968 | VTableBits *&BitsPtr = GVToBits[&GV]; | ||||
| 969 | if (!BitsPtr) { | ||||
| 970 | Bits.emplace_back(); | ||||
| 971 | Bits.back().GV = &GV; | ||||
| 972 | Bits.back().ObjectSize = | ||||
| 973 | M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType()); | ||||
| 974 | BitsPtr = &Bits.back(); | ||||
| 975 | } | ||||
| 976 | |||||
| 977 | for (MDNode *Type : Types) { | ||||
| 978 | auto TypeID = Type->getOperand(1).get(); | ||||
| 979 | |||||
| 980 | uint64_t Offset = | ||||
| 981 | cast<ConstantInt>( | ||||
| 982 | cast<ConstantAsMetadata>(Type->getOperand(0))->getValue()) | ||||
| 983 | ->getZExtValue(); | ||||
| 984 | |||||
| 985 | TypeIdMap[TypeID].insert({BitsPtr, Offset}); | ||||
| 986 | } | ||||
| 987 | } | ||||
| 988 | } | ||||
| 989 | |||||
| 990 | bool DevirtModule::tryFindVirtualCallTargets( | ||||
| 991 | std::vector<VirtualCallTarget> &TargetsForSlot, | ||||
| 992 | const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset, | ||||
| 993 | ModuleSummaryIndex *ExportSummary) { | ||||
| 994 | for (const TypeMemberInfo &TM : TypeMemberInfos) { | ||||
| 995 | if (!TM.Bits->GV->isConstant()) | ||||
| 996 | return false; | ||||
| 997 | |||||
| 998 | // We cannot perform whole program devirtualization analysis on a vtable | ||||
| 999 | // with public LTO visibility. | ||||
| 1000 | if (TM.Bits->GV->getVCallVisibility() == | ||||
| 1001 | GlobalObject::VCallVisibilityPublic) | ||||
| 1002 | return false; | ||||
| 1003 | |||||
| 1004 | Constant *Ptr = getPointerAtOffset(TM.Bits->GV->getInitializer(), | ||||
| 1005 | TM.Offset + ByteOffset, M); | ||||
| 1006 | if (!Ptr) | ||||
| 1007 | return false; | ||||
| 1008 | |||||
| 1009 | auto C = Ptr->stripPointerCasts(); | ||||
| 1010 | // Make sure this is a function or alias to a function. | ||||
| 1011 | auto Fn = dyn_cast<Function>(C); | ||||
| 1012 | auto A = dyn_cast<GlobalAlias>(C); | ||||
| 1013 | if (!Fn && A) | ||||
| 1014 | Fn = dyn_cast<Function>(A->getAliasee()); | ||||
| 1015 | |||||
| 1016 | if (!Fn) | ||||
| 1017 | return false; | ||||
| 1018 | |||||
| 1019 | if (FunctionsToSkip.match(Fn->getName())) | ||||
| 1020 | return false; | ||||
| 1021 | |||||
| 1022 | // We can disregard __cxa_pure_virtual as a possible call target, as | ||||
| 1023 | // calls to pure virtuals are UB. | ||||
| 1024 | if (Fn->getName() == "__cxa_pure_virtual") | ||||
| 1025 | continue; | ||||
| 1026 | |||||
| 1027 | // We can disregard unreachable functions as possible call targets, as | ||||
| 1028 | // unreachable functions shouldn't be called. | ||||
| 1029 | if (mustBeUnreachableFunction(Fn, ExportSummary)) | ||||
| 1030 | continue; | ||||
| 1031 | |||||
| 1032 | // Save the symbol used in the vtable to use as the devirtualization | ||||
| 1033 | // target. | ||||
| 1034 | auto GV = dyn_cast<GlobalValue>(C); | ||||
| 1035 | assert(GV)(static_cast <bool> (GV) ? void (0) : __assert_fail ("GV" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 1035, __extension__ __PRETTY_FUNCTION__)); | ||||
| 1036 | TargetsForSlot.push_back({GV, &TM}); | ||||
| 1037 | } | ||||
| 1038 | |||||
| 1039 | // Give up if we couldn't find any targets. | ||||
| 1040 | return !TargetsForSlot.empty(); | ||||
| 1041 | } | ||||
| 1042 | |||||
| 1043 | bool DevirtIndex::tryFindVirtualCallTargets( | ||||
| 1044 | std::vector<ValueInfo> &TargetsForSlot, const TypeIdCompatibleVtableInfo TIdInfo, | ||||
| 1045 | uint64_t ByteOffset) { | ||||
| 1046 | for (const TypeIdOffsetVtableInfo &P : TIdInfo) { | ||||
| 1047 | // Find a representative copy of the vtable initializer. | ||||
| 1048 | // We can have multiple available_externally, linkonce_odr and weak_odr | ||||
| 1049 | // vtable initializers. We can also have multiple external vtable | ||||
| 1050 | // initializers in the case of comdats, which we cannot check here. | ||||
| 1051 | // The linker should give an error in this case. | ||||
| 1052 | // | ||||
| 1053 | // Also, handle the case of same-named local Vtables with the same path | ||||
| 1054 | // and therefore the same GUID. This can happen if there isn't enough | ||||
| 1055 | // distinguishing path when compiling the source file. In that case we | ||||
| 1056 | // conservatively return false early. | ||||
| 1057 | const GlobalVarSummary *VS = nullptr; | ||||
| 1058 | bool LocalFound = false; | ||||
| 1059 | for (const auto &S : P.VTableVI.getSummaryList()) { | ||||
| 1060 | if (GlobalValue::isLocalLinkage(S->linkage())) { | ||||
| 1061 | if (LocalFound) | ||||
| 1062 | return false; | ||||
| 1063 | LocalFound = true; | ||||
| 1064 | } | ||||
| 1065 | auto *CurVS = cast<GlobalVarSummary>(S->getBaseObject()); | ||||
| 1066 | if (!CurVS->vTableFuncs().empty() || | ||||
| 1067 | // Previously clang did not attach the necessary type metadata to | ||||
| 1068 | // available_externally vtables, in which case there would not | ||||
| 1069 | // be any vtable functions listed in the summary and we need | ||||
| 1070 | // to treat this case conservatively (in case the bitcode is old). | ||||
| 1071 | // However, we will also not have any vtable functions in the | ||||
| 1072 | // case of a pure virtual base class. In that case we do want | ||||
| 1073 | // to set VS to avoid treating it conservatively. | ||||
| 1074 | !GlobalValue::isAvailableExternallyLinkage(S->linkage())) { | ||||
| 1075 | VS = CurVS; | ||||
| 1076 | // We cannot perform whole program devirtualization analysis on a vtable | ||||
| 1077 | // with public LTO visibility. | ||||
| 1078 | if (VS->getVCallVisibility() == GlobalObject::VCallVisibilityPublic) | ||||
| 1079 | return false; | ||||
| 1080 | } | ||||
| 1081 | } | ||||
| 1082 | // There will be no VS if all copies are available_externally having no | ||||
| 1083 | // type metadata. In that case we can't safely perform WPD. | ||||
| 1084 | if (!VS) | ||||
| 1085 | return false; | ||||
| 1086 | if (!VS->isLive()) | ||||
| 1087 | continue; | ||||
| 1088 | for (auto VTP : VS->vTableFuncs()) { | ||||
| 1089 | if (VTP.VTableOffset != P.AddressPointOffset + ByteOffset) | ||||
| 1090 | continue; | ||||
| 1091 | |||||
| 1092 | if (mustBeUnreachableFunction(VTP.FuncVI)) | ||||
| 1093 | continue; | ||||
| 1094 | |||||
| 1095 | TargetsForSlot.push_back(VTP.FuncVI); | ||||
| 1096 | } | ||||
| 1097 | } | ||||
| 1098 | |||||
| 1099 | // Give up if we couldn't find any targets. | ||||
| 1100 | return !TargetsForSlot.empty(); | ||||
| 1101 | } | ||||
| 1102 | |||||
| 1103 | void DevirtModule::applySingleImplDevirt(VTableSlotInfo &SlotInfo, | ||||
| 1104 | Constant *TheFn, bool &IsExported) { | ||||
| 1105 | // Don't devirtualize function if we're told to skip it | ||||
| 1106 | // in -wholeprogramdevirt-skip. | ||||
| 1107 | if (FunctionsToSkip.match(TheFn->stripPointerCasts()->getName())) | ||||
| 1108 | return; | ||||
| 1109 | auto Apply = [&](CallSiteInfo &CSInfo) { | ||||
| 1110 | for (auto &&VCallSite : CSInfo.CallSites) { | ||||
| 1111 | if (!OptimizedCalls.insert(&VCallSite.CB).second) | ||||
| 1112 | continue; | ||||
| 1113 | |||||
| 1114 | if (RemarksEnabled) | ||||
| 1115 | VCallSite.emitRemark("single-impl", | ||||
| 1116 | TheFn->stripPointerCasts()->getName(), OREGetter); | ||||
| 1117 | NumSingleImpl++; | ||||
| 1118 | auto &CB = VCallSite.CB; | ||||
| 1119 | assert(!CB.getCalledFunction() && "devirtualizing direct call?")(static_cast <bool> (!CB.getCalledFunction() && "devirtualizing direct call?") ? void (0) : __assert_fail ("!CB.getCalledFunction() && \"devirtualizing direct call?\"" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 1119, __extension__ __PRETTY_FUNCTION__)); | ||||
| 1120 | IRBuilder<> Builder(&CB); | ||||
| 1121 | Value *Callee = | ||||
| 1122 | Builder.CreateBitCast(TheFn, CB.getCalledOperand()->getType()); | ||||
| 1123 | |||||
| 1124 | // If trap checking is enabled, add support to compare the virtual | ||||
| 1125 | // function pointer to the devirtualized target. In case of a mismatch, | ||||
| 1126 | // perform a debug trap. | ||||
| 1127 | if (DevirtCheckMode == WPDCheckMode::Trap) { | ||||
| 1128 | auto *Cond = Builder.CreateICmpNE(CB.getCalledOperand(), Callee); | ||||
| 1129 | Instruction *ThenTerm = | ||||
| 1130 | SplitBlockAndInsertIfThen(Cond, &CB, /*Unreachable=*/false); | ||||
| 1131 | Builder.SetInsertPoint(ThenTerm); | ||||
| 1132 | Function *TrapFn = Intrinsic::getDeclaration(&M, Intrinsic::debugtrap); | ||||
| 1133 | auto *CallTrap = Builder.CreateCall(TrapFn); | ||||
| 1134 | CallTrap->setDebugLoc(CB.getDebugLoc()); | ||||
| 1135 | } | ||||
| 1136 | |||||
| 1137 | // If fallback checking is enabled, add support to compare the virtual | ||||
| 1138 | // function pointer to the devirtualized target. In case of a mismatch, | ||||
| 1139 | // fall back to indirect call. | ||||
| 1140 | if (DevirtCheckMode == WPDCheckMode::Fallback) { | ||||
| 1141 | MDNode *Weights = | ||||
| 1142 | MDBuilder(M.getContext()).createBranchWeights((1U << 20) - 1, 1); | ||||
| 1143 | // Version the indirect call site. If the called value is equal to the | ||||
| 1144 | // given callee, 'NewInst' will be executed, otherwise the original call | ||||
| 1145 | // site will be executed. | ||||
| 1146 | CallBase &NewInst = versionCallSite(CB, Callee, Weights); | ||||
| 1147 | NewInst.setCalledOperand(Callee); | ||||
| 1148 | // Since the new call site is direct, we must clear metadata that | ||||
| 1149 | // is only appropriate for indirect calls. This includes !prof and | ||||
| 1150 | // !callees metadata. | ||||
| 1151 | NewInst.setMetadata(LLVMContext::MD_prof, nullptr); | ||||
| 1152 | NewInst.setMetadata(LLVMContext::MD_callees, nullptr); | ||||
| 1153 | // Additionally, we should remove them from the fallback indirect call, | ||||
| 1154 | // so that we don't attempt to perform indirect call promotion later. | ||||
| 1155 | CB.setMetadata(LLVMContext::MD_prof, nullptr); | ||||
| 1156 | CB.setMetadata(LLVMContext::MD_callees, nullptr); | ||||
| 1157 | } | ||||
| 1158 | |||||
| 1159 | // In either trapping or non-checking mode, devirtualize original call. | ||||
| 1160 | else { | ||||
| 1161 | // Devirtualize unconditionally. | ||||
| 1162 | CB.setCalledOperand(Callee); | ||||
| 1163 | // Since the call site is now direct, we must clear metadata that | ||||
| 1164 | // is only appropriate for indirect calls. This includes !prof and | ||||
| 1165 | // !callees metadata. | ||||
| 1166 | CB.setMetadata(LLVMContext::MD_prof, nullptr); | ||||
| 1167 | CB.setMetadata(LLVMContext::MD_callees, nullptr); | ||||
| 1168 | } | ||||
| 1169 | |||||
| 1170 | // This use is no longer unsafe. | ||||
| 1171 | if (VCallSite.NumUnsafeUses) | ||||
| 1172 | --*VCallSite.NumUnsafeUses; | ||||
| 1173 | } | ||||
| 1174 | if (CSInfo.isExported()) | ||||
| 1175 | IsExported = true; | ||||
| 1176 | CSInfo.markDevirt(); | ||||
| 1177 | }; | ||||
| 1178 | Apply(SlotInfo.CSInfo); | ||||
| 1179 | for (auto &P : SlotInfo.ConstCSInfo) | ||||
| 1180 | Apply(P.second); | ||||
| 1181 | } | ||||
| 1182 | |||||
| 1183 | static bool AddCalls(VTableSlotInfo &SlotInfo, const ValueInfo &Callee) { | ||||
| 1184 | // We can't add calls if we haven't seen a definition | ||||
| 1185 | if (Callee.getSummaryList().empty()) | ||||
| 1186 | return false; | ||||
| 1187 | |||||
| 1188 | // Insert calls into the summary index so that the devirtualized targets | ||||
| 1189 | // are eligible for import. | ||||
| 1190 | // FIXME: Annotate type tests with hotness. For now, mark these as hot | ||||
| 1191 | // to better ensure we have the opportunity to inline them. | ||||
| 1192 | bool IsExported = false; | ||||
| 1193 | auto &S = Callee.getSummaryList()[0]; | ||||
| 1194 | CalleeInfo CI(CalleeInfo::HotnessType::Hot, /* RelBF = */ 0); | ||||
| 1195 | auto AddCalls = [&](CallSiteInfo &CSInfo) { | ||||
| 1196 | for (auto *FS : CSInfo.SummaryTypeCheckedLoadUsers) { | ||||
| 1197 | FS->addCall({Callee, CI}); | ||||
| 1198 | IsExported |= S->modulePath() != FS->modulePath(); | ||||
| 1199 | } | ||||
| 1200 | for (auto *FS : CSInfo.SummaryTypeTestAssumeUsers) { | ||||
| 1201 | FS->addCall({Callee, CI}); | ||||
| 1202 | IsExported |= S->modulePath() != FS->modulePath(); | ||||
| 1203 | } | ||||
| 1204 | }; | ||||
| 1205 | AddCalls(SlotInfo.CSInfo); | ||||
| 1206 | for (auto &P : SlotInfo.ConstCSInfo) | ||||
| 1207 | AddCalls(P.second); | ||||
| 1208 | return IsExported; | ||||
| 1209 | } | ||||
| 1210 | |||||
| 1211 | bool DevirtModule::trySingleImplDevirt( | ||||
| 1212 | ModuleSummaryIndex *ExportSummary, | ||||
| 1213 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo, | ||||
| 1214 | WholeProgramDevirtResolution *Res) { | ||||
| 1215 | // See if the program contains a single implementation of this virtual | ||||
| 1216 | // function. | ||||
| 1217 | auto *TheFn = TargetsForSlot[0].Fn; | ||||
| 1218 | for (auto &&Target : TargetsForSlot) | ||||
| 1219 | if (TheFn != Target.Fn) | ||||
| 1220 | return false; | ||||
| 1221 | |||||
| 1222 | // If so, update each call site to call that implementation directly. | ||||
| 1223 | if (RemarksEnabled || AreStatisticsEnabled()) | ||||
| 1224 | TargetsForSlot[0].WasDevirt = true; | ||||
| 1225 | |||||
| 1226 | bool IsExported = false; | ||||
| 1227 | applySingleImplDevirt(SlotInfo, TheFn, IsExported); | ||||
| 1228 | if (!IsExported) | ||||
| 1229 | return false; | ||||
| 1230 | |||||
| 1231 | // If the only implementation has local linkage, we must promote to external | ||||
| 1232 | // to make it visible to thin LTO objects. We can only get here during the | ||||
| 1233 | // ThinLTO export phase. | ||||
| 1234 | if (TheFn->hasLocalLinkage()) { | ||||
| 1235 | std::string NewName = (TheFn->getName() + ".llvm.merged").str(); | ||||
| 1236 | |||||
| 1237 | // Since we are renaming the function, any comdats with the same name must | ||||
| 1238 | // also be renamed. This is required when targeting COFF, as the comdat name | ||||
| 1239 | // must match one of the names of the symbols in the comdat. | ||||
| 1240 | if (Comdat *C = TheFn->getComdat()) { | ||||
| 1241 | if (C->getName() == TheFn->getName()) { | ||||
| 1242 | Comdat *NewC = M.getOrInsertComdat(NewName); | ||||
| 1243 | NewC->setSelectionKind(C->getSelectionKind()); | ||||
| 1244 | for (GlobalObject &GO : M.global_objects()) | ||||
| 1245 | if (GO.getComdat() == C) | ||||
| 1246 | GO.setComdat(NewC); | ||||
| 1247 | } | ||||
| 1248 | } | ||||
| 1249 | |||||
| 1250 | TheFn->setLinkage(GlobalValue::ExternalLinkage); | ||||
| 1251 | TheFn->setVisibility(GlobalValue::HiddenVisibility); | ||||
| 1252 | TheFn->setName(NewName); | ||||
| 1253 | } | ||||
| 1254 | if (ValueInfo TheFnVI = ExportSummary->getValueInfo(TheFn->getGUID())) | ||||
| 1255 | // Any needed promotion of 'TheFn' has already been done during | ||||
| 1256 | // LTO unit split, so we can ignore return value of AddCalls. | ||||
| 1257 | AddCalls(SlotInfo, TheFnVI); | ||||
| 1258 | |||||
| 1259 | Res->TheKind = WholeProgramDevirtResolution::SingleImpl; | ||||
| 1260 | Res->SingleImplName = std::string(TheFn->getName()); | ||||
| 1261 | |||||
| 1262 | return true; | ||||
| 1263 | } | ||||
| 1264 | |||||
| 1265 | bool DevirtIndex::trySingleImplDevirt(MutableArrayRef<ValueInfo> TargetsForSlot, | ||||
| 1266 | VTableSlotSummary &SlotSummary, | ||||
| 1267 | VTableSlotInfo &SlotInfo, | ||||
| 1268 | WholeProgramDevirtResolution *Res, | ||||
| 1269 | std::set<ValueInfo> &DevirtTargets) { | ||||
| 1270 | // See if the program contains a single implementation of this virtual | ||||
| 1271 | // function. | ||||
| 1272 | auto TheFn = TargetsForSlot[0]; | ||||
| 1273 | for (auto &&Target : TargetsForSlot) | ||||
| 1274 | if (TheFn != Target) | ||||
| 1275 | return false; | ||||
| 1276 | |||||
| 1277 | // Don't devirtualize if we don't have target definition. | ||||
| 1278 | auto Size = TheFn.getSummaryList().size(); | ||||
| 1279 | if (!Size) | ||||
| 1280 | return false; | ||||
| 1281 | |||||
| 1282 | // Don't devirtualize function if we're told to skip it | ||||
| 1283 | // in -wholeprogramdevirt-skip. | ||||
| 1284 | if (FunctionsToSkip.match(TheFn.name())) | ||||
| 1285 | return false; | ||||
| 1286 | |||||
| 1287 | // If the summary list contains multiple summaries where at least one is | ||||
| 1288 | // a local, give up, as we won't know which (possibly promoted) name to use. | ||||
| 1289 | for (const auto &S : TheFn.getSummaryList()) | ||||
| 1290 | if (GlobalValue::isLocalLinkage(S->linkage()) && Size > 1) | ||||
| 1291 | return false; | ||||
| 1292 | |||||
| 1293 | // Collect functions devirtualized at least for one call site for stats. | ||||
| 1294 | if (PrintSummaryDevirt || AreStatisticsEnabled()) | ||||
| 1295 | DevirtTargets.insert(TheFn); | ||||
| 1296 | |||||
| 1297 | auto &S = TheFn.getSummaryList()[0]; | ||||
| 1298 | bool IsExported = AddCalls(SlotInfo, TheFn); | ||||
| 1299 | if (IsExported) | ||||
| 1300 | ExportedGUIDs.insert(TheFn.getGUID()); | ||||
| 1301 | |||||
| 1302 | // Record in summary for use in devirtualization during the ThinLTO import | ||||
| 1303 | // step. | ||||
| 1304 | Res->TheKind = WholeProgramDevirtResolution::SingleImpl; | ||||
| 1305 | if (GlobalValue::isLocalLinkage(S->linkage())) { | ||||
| 1306 | if (IsExported) | ||||
| 1307 | // If target is a local function and we are exporting it by | ||||
| 1308 | // devirtualizing a call in another module, we need to record the | ||||
| 1309 | // promoted name. | ||||
| 1310 | Res->SingleImplName = ModuleSummaryIndex::getGlobalNameForLocal( | ||||
| 1311 | TheFn.name(), ExportSummary.getModuleHash(S->modulePath())); | ||||
| 1312 | else { | ||||
| 1313 | LocalWPDTargetsMap[TheFn].push_back(SlotSummary); | ||||
| 1314 | Res->SingleImplName = std::string(TheFn.name()); | ||||
| 1315 | } | ||||
| 1316 | } else | ||||
| 1317 | Res->SingleImplName = std::string(TheFn.name()); | ||||
| 1318 | |||||
| 1319 | // Name will be empty if this thin link driven off of serialized combined | ||||
| 1320 | // index (e.g. llvm-lto). However, WPD is not supported/invoked for the | ||||
| 1321 | // legacy LTO API anyway. | ||||
| 1322 | assert(!Res->SingleImplName.empty())(static_cast <bool> (!Res->SingleImplName.empty()) ? void (0) : __assert_fail ("!Res->SingleImplName.empty()", "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 1322, __extension__ __PRETTY_FUNCTION__)); | ||||
| 1323 | |||||
| 1324 | return true; | ||||
| 1325 | } | ||||
| 1326 | |||||
| 1327 | void DevirtModule::tryICallBranchFunnel( | ||||
| 1328 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo, | ||||
| 1329 | WholeProgramDevirtResolution *Res, VTableSlot Slot) { | ||||
| 1330 | Triple T(M.getTargetTriple()); | ||||
| 1331 | if (T.getArch() != Triple::x86_64) | ||||
| 1332 | return; | ||||
| 1333 | |||||
| 1334 | if (TargetsForSlot.size() > ClThreshold) | ||||
| 1335 | return; | ||||
| 1336 | |||||
| 1337 | bool HasNonDevirt = !SlotInfo.CSInfo.AllCallSitesDevirted; | ||||
| 1338 | if (!HasNonDevirt
| ||||
| 1339 | for (auto &P : SlotInfo.ConstCSInfo) | ||||
| 1340 | if (!P.second.AllCallSitesDevirted) { | ||||
| 1341 | HasNonDevirt = true; | ||||
| 1342 | break; | ||||
| 1343 | } | ||||
| 1344 | |||||
| 1345 | if (!HasNonDevirt
| ||||
| 1346 | return; | ||||
| 1347 | |||||
| 1348 | FunctionType *FT = | ||||
| 1349 | FunctionType::get(Type::getVoidTy(M.getContext()), {Int8PtrTy}, true); | ||||
| 1350 | Function *JT; | ||||
| 1351 | if (isa<MDString>(Slot.TypeID)) { | ||||
| 1352 | JT = Function::Create(FT, Function::ExternalLinkage, | ||||
| 1353 | M.getDataLayout().getProgramAddressSpace(), | ||||
| 1354 | getGlobalName(Slot, {}, "branch_funnel"), &M); | ||||
| 1355 | JT->setVisibility(GlobalValue::HiddenVisibility); | ||||
| 1356 | } else { | ||||
| 1357 | JT = Function::Create(FT, Function::InternalLinkage, | ||||
| 1358 | M.getDataLayout().getProgramAddressSpace(), | ||||
| 1359 | "branch_funnel", &M); | ||||
| 1360 | } | ||||
| 1361 | JT->addParamAttr(0, Attribute::Nest); | ||||
| 1362 | |||||
| 1363 | std::vector<Value *> JTArgs; | ||||
| 1364 | JTArgs.push_back(JT->arg_begin()); | ||||
| 1365 | for (auto &T : TargetsForSlot) { | ||||
| 1366 | JTArgs.push_back(getMemberAddr(T.TM)); | ||||
| 1367 | JTArgs.push_back(T.Fn); | ||||
| 1368 | } | ||||
| 1369 | |||||
| 1370 | BasicBlock *BB = BasicBlock::Create(M.getContext(), "", JT, nullptr); | ||||
| 1371 | Function *Intr = | ||||
| 1372 | Intrinsic::getDeclaration(&M, llvm::Intrinsic::icall_branch_funnel, {}); | ||||
| 1373 | |||||
| 1374 | auto *CI = CallInst::Create(Intr, JTArgs, "", BB); | ||||
| 1375 | CI->setTailCallKind(CallInst::TCK_MustTail); | ||||
| 1376 | ReturnInst::Create(M.getContext(), nullptr, BB); | ||||
| 1377 | |||||
| 1378 | bool IsExported = false; | ||||
| 1379 | applyICallBranchFunnel(SlotInfo, JT, IsExported); | ||||
| 1380 | if (IsExported
| ||||
| 1381 | Res->TheKind = WholeProgramDevirtResolution::BranchFunnel; | ||||
| |||||
| 1382 | } | ||||
| 1383 | |||||
| 1384 | void DevirtModule::applyICallBranchFunnel(VTableSlotInfo &SlotInfo, | ||||
| 1385 | Constant *JT, bool &IsExported) { | ||||
| 1386 | auto Apply = [&](CallSiteInfo &CSInfo) { | ||||
| 1387 | if (CSInfo.isExported()) | ||||
| 1388 | IsExported = true; | ||||
| 1389 | if (CSInfo.AllCallSitesDevirted) | ||||
| 1390 | return; | ||||
| 1391 | |||||
| 1392 | std::map<CallBase *, CallBase *> CallBases; | ||||
| 1393 | for (auto &&VCallSite : CSInfo.CallSites) { | ||||
| 1394 | CallBase &CB = VCallSite.CB; | ||||
| 1395 | |||||
| 1396 | if (CallBases.find(&CB) != CallBases.end()) { | ||||
| 1397 | // When finding devirtualizable calls, it's possible to find the same | ||||
| 1398 | // vtable passed to multiple llvm.type.test or llvm.type.checked.load | ||||
| 1399 | // calls, which can cause duplicate call sites to be recorded in | ||||
| 1400 | // [Const]CallSites. If we've already found one of these | ||||
| 1401 | // call instances, just ignore it. It will be replaced later. | ||||
| 1402 | continue; | ||||
| 1403 | } | ||||
| 1404 | |||||
| 1405 | // Jump tables are only profitable if the retpoline mitigation is enabled. | ||||
| 1406 | Attribute FSAttr = CB.getCaller()->getFnAttribute("target-features"); | ||||
| 1407 | if (!FSAttr.isValid() || | ||||
| 1408 | !FSAttr.getValueAsString().contains("+retpoline")) | ||||
| 1409 | continue; | ||||
| 1410 | |||||
| 1411 | NumBranchFunnel++; | ||||
| 1412 | if (RemarksEnabled) | ||||
| 1413 | VCallSite.emitRemark("branch-funnel", | ||||
| 1414 | JT->stripPointerCasts()->getName(), OREGetter); | ||||
| 1415 | |||||
| 1416 | // Pass the address of the vtable in the nest register, which is r10 on | ||||
| 1417 | // x86_64. | ||||
| 1418 | std::vector<Type *> NewArgs; | ||||
| 1419 | NewArgs.push_back(Int8PtrTy); | ||||
| 1420 | append_range(NewArgs, CB.getFunctionType()->params()); | ||||
| 1421 | FunctionType *NewFT = | ||||
| 1422 | FunctionType::get(CB.getFunctionType()->getReturnType(), NewArgs, | ||||
| 1423 | CB.getFunctionType()->isVarArg()); | ||||
| 1424 | PointerType *NewFTPtr = PointerType::getUnqual(NewFT); | ||||
| 1425 | |||||
| 1426 | IRBuilder<> IRB(&CB); | ||||
| 1427 | std::vector<Value *> Args; | ||||
| 1428 | Args.push_back(IRB.CreateBitCast(VCallSite.VTable, Int8PtrTy)); | ||||
| 1429 | llvm::append_range(Args, CB.args()); | ||||
| 1430 | |||||
| 1431 | CallBase *NewCS = nullptr; | ||||
| 1432 | if (isa<CallInst>(CB)) | ||||
| 1433 | NewCS = IRB.CreateCall(NewFT, IRB.CreateBitCast(JT, NewFTPtr), Args); | ||||
| 1434 | else | ||||
| 1435 | NewCS = IRB.CreateInvoke(NewFT, IRB.CreateBitCast(JT, NewFTPtr), | ||||
| 1436 | cast<InvokeInst>(CB).getNormalDest(), | ||||
| 1437 | cast<InvokeInst>(CB).getUnwindDest(), Args); | ||||
| 1438 | NewCS->setCallingConv(CB.getCallingConv()); | ||||
| 1439 | |||||
| 1440 | AttributeList Attrs = CB.getAttributes(); | ||||
| 1441 | std::vector<AttributeSet> NewArgAttrs; | ||||
| 1442 | NewArgAttrs.push_back(AttributeSet::get( | ||||
| 1443 | M.getContext(), ArrayRef<Attribute>{Attribute::get( | ||||
| 1444 | M.getContext(), Attribute::Nest)})); | ||||
| 1445 | for (unsigned I = 0; I + 2 < Attrs.getNumAttrSets(); ++I) | ||||
| 1446 | NewArgAttrs.push_back(Attrs.getParamAttrs(I)); | ||||
| 1447 | NewCS->setAttributes( | ||||
| 1448 | AttributeList::get(M.getContext(), Attrs.getFnAttrs(), | ||||
| 1449 | Attrs.getRetAttrs(), NewArgAttrs)); | ||||
| 1450 | |||||
| 1451 | CallBases[&CB] = NewCS; | ||||
| 1452 | |||||
| 1453 | // This use is no longer unsafe. | ||||
| 1454 | if (VCallSite.NumUnsafeUses) | ||||
| 1455 | --*VCallSite.NumUnsafeUses; | ||||
| 1456 | } | ||||
| 1457 | // Don't mark as devirtualized because there may be callers compiled without | ||||
| 1458 | // retpoline mitigation, which would mean that they are lowered to | ||||
| 1459 | // llvm.type.test and therefore require an llvm.type.test resolution for the | ||||
| 1460 | // type identifier. | ||||
| 1461 | |||||
| 1462 | std::for_each(CallBases.begin(), CallBases.end(), [](auto &CBs) { | ||||
| 1463 | CBs.first->replaceAllUsesWith(CBs.second); | ||||
| 1464 | CBs.first->eraseFromParent(); | ||||
| 1465 | }); | ||||
| 1466 | }; | ||||
| 1467 | Apply(SlotInfo.CSInfo); | ||||
| 1468 | for (auto &P : SlotInfo.ConstCSInfo) | ||||
| 1469 | Apply(P.second); | ||||
| 1470 | } | ||||
| 1471 | |||||
| 1472 | bool DevirtModule::tryEvaluateFunctionsWithArgs( | ||||
| 1473 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 1474 | ArrayRef<uint64_t> Args) { | ||||
| 1475 | // Evaluate each function and store the result in each target's RetVal | ||||
| 1476 | // field. | ||||
| 1477 | for (VirtualCallTarget &Target : TargetsForSlot) { | ||||
| 1478 | // TODO: Skip for now if the vtable symbol was an alias to a function, | ||||
| 1479 | // need to evaluate whether it would be correct to analyze the aliasee | ||||
| 1480 | // function for this optimization. | ||||
| 1481 | auto Fn = dyn_cast<Function>(Target.Fn); | ||||
| 1482 | if (!Fn) | ||||
| 1483 | return false; | ||||
| 1484 | |||||
| 1485 | if (Fn->arg_size() != Args.size() + 1) | ||||
| 1486 | return false; | ||||
| 1487 | |||||
| 1488 | Evaluator Eval(M.getDataLayout(), nullptr); | ||||
| 1489 | SmallVector<Constant *, 2> EvalArgs; | ||||
| 1490 | EvalArgs.push_back( | ||||
| 1491 | Constant::getNullValue(Fn->getFunctionType()->getParamType(0))); | ||||
| 1492 | for (unsigned I = 0; I != Args.size(); ++I) { | ||||
| 1493 | auto *ArgTy = | ||||
| 1494 | dyn_cast<IntegerType>(Fn->getFunctionType()->getParamType(I + 1)); | ||||
| 1495 | if (!ArgTy) | ||||
| 1496 | return false; | ||||
| 1497 | EvalArgs.push_back(ConstantInt::get(ArgTy, Args[I])); | ||||
| 1498 | } | ||||
| 1499 | |||||
| 1500 | Constant *RetVal; | ||||
| 1501 | if (!Eval.EvaluateFunction(Fn, RetVal, EvalArgs) || | ||||
| 1502 | !isa<ConstantInt>(RetVal)) | ||||
| 1503 | return false; | ||||
| 1504 | Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue(); | ||||
| 1505 | } | ||||
| 1506 | return true; | ||||
| 1507 | } | ||||
| 1508 | |||||
| 1509 | void DevirtModule::applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, | ||||
| 1510 | uint64_t TheRetVal) { | ||||
| 1511 | for (auto Call : CSInfo.CallSites) { | ||||
| 1512 | if (!OptimizedCalls.insert(&Call.CB).second) | ||||
| 1513 | continue; | ||||
| 1514 | NumUniformRetVal++; | ||||
| 1515 | Call.replaceAndErase( | ||||
| 1516 | "uniform-ret-val", FnName, RemarksEnabled, OREGetter, | ||||
| 1517 | ConstantInt::get(cast<IntegerType>(Call.CB.getType()), TheRetVal)); | ||||
| 1518 | } | ||||
| 1519 | CSInfo.markDevirt(); | ||||
| 1520 | } | ||||
| 1521 | |||||
| 1522 | bool DevirtModule::tryUniformRetValOpt( | ||||
| 1523 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, CallSiteInfo &CSInfo, | ||||
| 1524 | WholeProgramDevirtResolution::ByArg *Res) { | ||||
| 1525 | // Uniform return value optimization. If all functions return the same | ||||
| 1526 | // constant, replace all calls with that constant. | ||||
| 1527 | uint64_t TheRetVal = TargetsForSlot[0].RetVal; | ||||
| 1528 | for (const VirtualCallTarget &Target : TargetsForSlot) | ||||
| 1529 | if (Target.RetVal != TheRetVal) | ||||
| 1530 | return false; | ||||
| 1531 | |||||
| 1532 | if (CSInfo.isExported()) { | ||||
| 1533 | Res->TheKind = WholeProgramDevirtResolution::ByArg::UniformRetVal; | ||||
| 1534 | Res->Info = TheRetVal; | ||||
| 1535 | } | ||||
| 1536 | |||||
| 1537 | applyUniformRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), TheRetVal); | ||||
| 1538 | if (RemarksEnabled || AreStatisticsEnabled()) | ||||
| 1539 | for (auto &&Target : TargetsForSlot) | ||||
| 1540 | Target.WasDevirt = true; | ||||
| 1541 | return true; | ||||
| 1542 | } | ||||
| 1543 | |||||
| 1544 | std::string DevirtModule::getGlobalName(VTableSlot Slot, | ||||
| 1545 | ArrayRef<uint64_t> Args, | ||||
| 1546 | StringRef Name) { | ||||
| 1547 | std::string FullName = "__typeid_"; | ||||
| 1548 | raw_string_ostream OS(FullName); | ||||
| 1549 | OS << cast<MDString>(Slot.TypeID)->getString() << '_' << Slot.ByteOffset; | ||||
| 1550 | for (uint64_t Arg : Args) | ||||
| 1551 | OS << '_' << Arg; | ||||
| 1552 | OS << '_' << Name; | ||||
| 1553 | return OS.str(); | ||||
| 1554 | } | ||||
| 1555 | |||||
| 1556 | bool DevirtModule::shouldExportConstantsAsAbsoluteSymbols() { | ||||
| 1557 | Triple T(M.getTargetTriple()); | ||||
| 1558 | return T.isX86() && T.getObjectFormat() == Triple::ELF; | ||||
| 1559 | } | ||||
| 1560 | |||||
| 1561 | void DevirtModule::exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, | ||||
| 1562 | StringRef Name, Constant *C) { | ||||
| 1563 | GlobalAlias *GA = GlobalAlias::create(Int8Ty, 0, GlobalValue::ExternalLinkage, | ||||
| 1564 | getGlobalName(Slot, Args, Name), C, &M); | ||||
| 1565 | GA->setVisibility(GlobalValue::HiddenVisibility); | ||||
| 1566 | } | ||||
| 1567 | |||||
| 1568 | void DevirtModule::exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, | ||||
| 1569 | StringRef Name, uint32_t Const, | ||||
| 1570 | uint32_t &Storage) { | ||||
| 1571 | if (shouldExportConstantsAsAbsoluteSymbols()) { | ||||
| 1572 | exportGlobal( | ||||
| 1573 | Slot, Args, Name, | ||||
| 1574 | ConstantExpr::getIntToPtr(ConstantInt::get(Int32Ty, Const), Int8PtrTy)); | ||||
| 1575 | return; | ||||
| 1576 | } | ||||
| 1577 | |||||
| 1578 | Storage = Const; | ||||
| 1579 | } | ||||
| 1580 | |||||
| 1581 | Constant *DevirtModule::importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, | ||||
| 1582 | StringRef Name) { | ||||
| 1583 | Constant *C = | ||||
| 1584 | M.getOrInsertGlobal(getGlobalName(Slot, Args, Name), Int8Arr0Ty); | ||||
| 1585 | auto *GV = dyn_cast<GlobalVariable>(C); | ||||
| 1586 | if (GV) | ||||
| 1587 | GV->setVisibility(GlobalValue::HiddenVisibility); | ||||
| 1588 | return C; | ||||
| 1589 | } | ||||
| 1590 | |||||
| 1591 | Constant *DevirtModule::importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, | ||||
| 1592 | StringRef Name, IntegerType *IntTy, | ||||
| 1593 | uint32_t Storage) { | ||||
| 1594 | if (!shouldExportConstantsAsAbsoluteSymbols()) | ||||
| 1595 | return ConstantInt::get(IntTy, Storage); | ||||
| 1596 | |||||
| 1597 | Constant *C = importGlobal(Slot, Args, Name); | ||||
| 1598 | auto *GV = cast<GlobalVariable>(C->stripPointerCasts()); | ||||
| 1599 | C = ConstantExpr::getPtrToInt(C, IntTy); | ||||
| 1600 | |||||
| 1601 | // We only need to set metadata if the global is newly created, in which | ||||
| 1602 | // case it would not have hidden visibility. | ||||
| 1603 | if (GV->hasMetadata(LLVMContext::MD_absolute_symbol)) | ||||
| 1604 | return C; | ||||
| 1605 | |||||
| 1606 | auto SetAbsRange = [&](uint64_t Min, uint64_t Max) { | ||||
| 1607 | auto *MinC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Min)); | ||||
| 1608 | auto *MaxC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Max)); | ||||
| 1609 | GV->setMetadata(LLVMContext::MD_absolute_symbol, | ||||
| 1610 | MDNode::get(M.getContext(), {MinC, MaxC})); | ||||
| 1611 | }; | ||||
| 1612 | unsigned AbsWidth = IntTy->getBitWidth(); | ||||
| 1613 | if (AbsWidth == IntPtrTy->getBitWidth()) | ||||
| 1614 | SetAbsRange(~0ull, ~0ull); // Full set. | ||||
| 1615 | else | ||||
| 1616 | SetAbsRange(0, 1ull << AbsWidth); | ||||
| 1617 | return C; | ||||
| 1618 | } | ||||
| 1619 | |||||
| 1620 | void DevirtModule::applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, | ||||
| 1621 | bool IsOne, | ||||
| 1622 | Constant *UniqueMemberAddr) { | ||||
| 1623 | for (auto &&Call : CSInfo.CallSites) { | ||||
| 1624 | if (!OptimizedCalls.insert(&Call.CB).second) | ||||
| 1625 | continue; | ||||
| 1626 | IRBuilder<> B(&Call.CB); | ||||
| 1627 | Value *Cmp = | ||||
| 1628 | B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE, Call.VTable, | ||||
| 1629 | B.CreateBitCast(UniqueMemberAddr, Call.VTable->getType())); | ||||
| 1630 | Cmp = B.CreateZExt(Cmp, Call.CB.getType()); | ||||
| 1631 | NumUniqueRetVal++; | ||||
| 1632 | Call.replaceAndErase("unique-ret-val", FnName, RemarksEnabled, OREGetter, | ||||
| 1633 | Cmp); | ||||
| 1634 | } | ||||
| 1635 | CSInfo.markDevirt(); | ||||
| 1636 | } | ||||
| 1637 | |||||
| 1638 | Constant *DevirtModule::getMemberAddr(const TypeMemberInfo *M) { | ||||
| 1639 | Constant *C = ConstantExpr::getBitCast(M->Bits->GV, Int8PtrTy); | ||||
| 1640 | return ConstantExpr::getGetElementPtr(Int8Ty, C, | ||||
| 1641 | ConstantInt::get(Int64Ty, M->Offset)); | ||||
| 1642 | } | ||||
| 1643 | |||||
| 1644 | bool DevirtModule::tryUniqueRetValOpt( | ||||
| 1645 | unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot, | ||||
| 1646 | CallSiteInfo &CSInfo, WholeProgramDevirtResolution::ByArg *Res, | ||||
| 1647 | VTableSlot Slot, ArrayRef<uint64_t> Args) { | ||||
| 1648 | // IsOne controls whether we look for a 0 or a 1. | ||||
| 1649 | auto tryUniqueRetValOptFor = [&](bool IsOne) { | ||||
| 1650 | const TypeMemberInfo *UniqueMember = nullptr; | ||||
| 1651 | for (const VirtualCallTarget &Target : TargetsForSlot) { | ||||
| 1652 | if (Target.RetVal == (IsOne ? 1 : 0)) { | ||||
| 1653 | if (UniqueMember) | ||||
| 1654 | return false; | ||||
| 1655 | UniqueMember = Target.TM; | ||||
| 1656 | } | ||||
| 1657 | } | ||||
| 1658 | |||||
| 1659 | // We should have found a unique member or bailed out by now. We already | ||||
| 1660 | // checked for a uniform return value in tryUniformRetValOpt. | ||||
| 1661 | assert(UniqueMember)(static_cast <bool> (UniqueMember) ? void (0) : __assert_fail ("UniqueMember", "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp" , 1661, __extension__ __PRETTY_FUNCTION__)); | ||||
| 1662 | |||||
| 1663 | Constant *UniqueMemberAddr = getMemberAddr(UniqueMember); | ||||
| 1664 | if (CSInfo.isExported()) { | ||||
| 1665 | Res->TheKind = WholeProgramDevirtResolution::ByArg::UniqueRetVal; | ||||
| 1666 | Res->Info = IsOne; | ||||
| 1667 | |||||
| 1668 | exportGlobal(Slot, Args, "unique_member", UniqueMemberAddr); | ||||
| 1669 | } | ||||
| 1670 | |||||
| 1671 | // Replace each call with the comparison. | ||||
| 1672 | applyUniqueRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), IsOne, | ||||
| 1673 | UniqueMemberAddr); | ||||
| 1674 | |||||
| 1675 | // Update devirtualization statistics for targets. | ||||
| 1676 | if (RemarksEnabled || AreStatisticsEnabled()) | ||||
| 1677 | for (auto &&Target : TargetsForSlot) | ||||
| 1678 | Target.WasDevirt = true; | ||||
| 1679 | |||||
| 1680 | return true; | ||||
| 1681 | }; | ||||
| 1682 | |||||
| 1683 | if (BitWidth == 1) { | ||||
| 1684 | if (tryUniqueRetValOptFor(true)) | ||||
| 1685 | return true; | ||||
| 1686 | if (tryUniqueRetValOptFor(false)) | ||||
| 1687 | return true; | ||||
| 1688 | } | ||||
| 1689 | return false; | ||||
| 1690 | } | ||||
| 1691 | |||||
| 1692 | void DevirtModule::applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName, | ||||
| 1693 | Constant *Byte, Constant *Bit) { | ||||
| 1694 | for (auto Call : CSInfo.CallSites) { | ||||
| 1695 | if (!OptimizedCalls.insert(&Call.CB).second) | ||||
| 1696 | continue; | ||||
| 1697 | auto *RetType = cast<IntegerType>(Call.CB.getType()); | ||||
| 1698 | IRBuilder<> B(&Call.CB); | ||||
| 1699 | Value *Addr = | ||||
| 1700 | B.CreateGEP(Int8Ty, B.CreateBitCast(Call.VTable, Int8PtrTy), Byte); | ||||
| 1701 | if (RetType->getBitWidth() == 1) { | ||||
| 1702 | Value *Bits = B.CreateLoad(Int8Ty, Addr); | ||||
| 1703 | Value *BitsAndBit = B.CreateAnd(Bits, Bit); | ||||
| 1704 | auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0)); | ||||
| 1705 | NumVirtConstProp1Bit++; | ||||
| 1706 | Call.replaceAndErase("virtual-const-prop-1-bit", FnName, RemarksEnabled, | ||||
| 1707 | OREGetter, IsBitSet); | ||||
| 1708 | } else { | ||||
| 1709 | Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo()); | ||||
| 1710 | Value *Val = B.CreateLoad(RetType, ValAddr); | ||||
| 1711 | NumVirtConstProp++; | ||||
| 1712 | Call.replaceAndErase("virtual-const-prop", FnName, RemarksEnabled, | ||||
| 1713 | OREGetter, Val); | ||||
| 1714 | } | ||||
| 1715 | } | ||||
| 1716 | CSInfo.markDevirt(); | ||||
| 1717 | } | ||||
| 1718 | |||||
| 1719 | bool DevirtModule::tryVirtualConstProp( | ||||
| 1720 | MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo, | ||||
| 1721 | WholeProgramDevirtResolution *Res, VTableSlot Slot) { | ||||
| 1722 | // TODO: Skip for now if the vtable symbol was an alias to a function, | ||||
| 1723 | // need to evaluate whether it would be correct to analyze the aliasee | ||||
| 1724 | // function for this optimization. | ||||
| 1725 | auto Fn = dyn_cast<Function>(TargetsForSlot[0].Fn); | ||||
| 1726 | if (!Fn
| ||||
| 1727 | return false; | ||||
| 1728 | // This only works if the function returns an integer. | ||||
| 1729 | auto RetType = dyn_cast<IntegerType>(Fn->getReturnType()); | ||||
| 1730 | if (!RetType) | ||||
| 1731 | return false; | ||||
| 1732 | unsigned BitWidth = RetType->getBitWidth(); | ||||
| 1733 | if (BitWidth > 64) | ||||
| 1734 | return false; | ||||
| 1735 | |||||
| 1736 | // Make sure that each function is defined, does not access memory, takes at | ||||
| 1737 | // least one argument, does not use its first argument (which we assume is | ||||
| 1738 | // 'this'), and has the same return type. | ||||
| 1739 | // | ||||
| 1740 | // Note that we test whether this copy of the function is readnone, rather | ||||
| 1741 | // than testing function attributes, which must hold for any copy of the | ||||
| 1742 | // function, even a less optimized version substituted at link time. This is | ||||
| 1743 | // sound because the virtual constant propagation optimizations effectively | ||||
| 1744 | // inline all implementations of the virtual function into each call site, | ||||
| 1745 | // rather than using function attributes to perform local optimization. | ||||
| 1746 | for (VirtualCallTarget &Target : TargetsForSlot) { | ||||
| 1747 | // TODO: Skip for now if the vtable symbol was an alias to a function, | ||||
| 1748 | // need to evaluate whether it would be correct to analyze the aliasee | ||||
| 1749 | // function for this optimization. | ||||
| 1750 | auto Fn = dyn_cast<Function>(Target.Fn); | ||||
| 1751 | if (!Fn) | ||||
| 1752 | return false; | ||||
| 1753 | |||||
| 1754 | if (Fn->isDeclaration() || | ||||
| 1755 | !computeFunctionBodyMemoryAccess(*Fn, AARGetter(*Fn)) | ||||
| 1756 | .doesNotAccessMemory() || | ||||
| 1757 | Fn->arg_empty() || !Fn->arg_begin()->use_empty() || | ||||
| 1758 | Fn->getReturnType() != RetType) | ||||
| 1759 | return false; | ||||
| 1760 | } | ||||
| 1761 | |||||
| 1762 | for (auto &&CSByConstantArg : SlotInfo.ConstCSInfo) { | ||||
| 1763 | if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first)) | ||||
| 1764 | continue; | ||||
| 1765 | |||||
| 1766 | WholeProgramDevirtResolution::ByArg *ResByArg = nullptr; | ||||
| 1767 | if (Res) | ||||
| 1768 | ResByArg = &Res->ResByArg[CSByConstantArg.first]; | ||||
| 1769 | |||||
| 1770 | if (tryUniformRetValOpt(TargetsForSlot, CSByConstantArg.second, ResByArg)) | ||||
| 1771 | continue; | ||||
| 1772 | |||||
| 1773 | if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second, | ||||
| 1774 | ResByArg, Slot, CSByConstantArg.first)) | ||||
| 1775 | continue; | ||||
| 1776 | |||||
| 1777 | // Find an allocation offset in bits in all vtables associated with the | ||||
| 1778 | // type. | ||||
| 1779 | uint64_t AllocBefore = | ||||
| 1780 | findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth); | ||||
| 1781 | uint64_t AllocAfter = | ||||
| 1782 | findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth); | ||||
| 1783 | |||||
| 1784 | // Calculate the total amount of padding needed to store a value at both | ||||
| 1785 | // ends of the object. | ||||
| 1786 | uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0; | ||||
| 1787 | for (auto &&Target : TargetsForSlot) { | ||||
| 1788 | TotalPaddingBefore += std::max<int64_t>( | ||||
| 1789 | (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0); | ||||
| 1790 | TotalPaddingAfter += std::max<int64_t>( | ||||
| 1791 | (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0); | ||||
| 1792 | } | ||||
| 1793 | |||||
| 1794 | // If the amount of padding is too large, give up. | ||||
| 1795 | // FIXME: do something smarter here. | ||||
| 1796 | if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128) | ||||
| 1797 | continue; | ||||
| 1798 | |||||
| 1799 | // Calculate the offset to the value as a (possibly negative) byte offset | ||||
| 1800 | // and (if applicable) a bit offset, and store the values in the targets. | ||||
| 1801 | int64_t OffsetByte; | ||||
| 1802 | uint64_t OffsetBit; | ||||
| 1803 | if (TotalPaddingBefore <= TotalPaddingAfter) | ||||
| 1804 | setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte, | ||||
| 1805 | OffsetBit); | ||||
| 1806 | else | ||||
| 1807 | setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte, | ||||
| 1808 | OffsetBit); | ||||
| 1809 | |||||
| 1810 | if (RemarksEnabled || AreStatisticsEnabled()) | ||||
| 1811 | for (auto &&Target : TargetsForSlot) | ||||
| 1812 | Target.WasDevirt = true; | ||||
| 1813 | |||||
| 1814 | |||||
| 1815 | if (CSByConstantArg.second.isExported()) { | ||||
| 1816 | ResByArg->TheKind = WholeProgramDevirtResolution::ByArg::VirtualConstProp; | ||||
| 1817 | exportConstant(Slot, CSByConstantArg.first, "byte", OffsetByte, | ||||
| 1818 | ResByArg->Byte); | ||||
| 1819 | exportConstant(Slot, CSByConstantArg.first, "bit", 1ULL << OffsetBit, | ||||
| 1820 | ResByArg->Bit); | ||||
| 1821 | } | ||||
| 1822 | |||||
| 1823 | // Rewrite each call to a load from OffsetByte/OffsetBit. | ||||
| 1824 | Constant *ByteConst = ConstantInt::get(Int32Ty, OffsetByte); | ||||
| 1825 | Constant *BitConst = ConstantInt::get(Int8Ty, 1ULL << OffsetBit); | ||||
| 1826 | applyVirtualConstProp(CSByConstantArg.second, | ||||
| 1827 | TargetsForSlot[0].Fn->getName(), ByteConst, BitConst); | ||||
| 1828 | } | ||||
| 1829 | return true; | ||||
| 1830 | } | ||||
| 1831 | |||||
| 1832 | void DevirtModule::rebuildGlobal(VTableBits &B) { | ||||
| 1833 | if (B.Before.Bytes.empty() && B.After.Bytes.empty()) | ||||
| 1834 | return; | ||||
| 1835 | |||||
| 1836 | // Align the before byte array to the global's minimum alignment so that we | ||||
| 1837 | // don't break any alignment requirements on the global. | ||||
| 1838 | Align Alignment = M.getDataLayout().getValueOrABITypeAlignment( | ||||
| 1839 | B.GV->getAlign(), B.GV->getValueType()); | ||||
| 1840 | B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), Alignment)); | ||||
| 1841 | |||||
| 1842 | // Before was stored in reverse order; flip it now. | ||||
| 1843 | for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I) | ||||
| 1844 | std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]); | ||||
| 1845 | |||||
| 1846 | // Build an anonymous global containing the before bytes, followed by the | ||||
| 1847 | // original initializer, followed by the after bytes. | ||||
| 1848 | auto NewInit = ConstantStruct::getAnon( | ||||
| 1849 | {ConstantDataArray::get(M.getContext(), B.Before.Bytes), | ||||
| 1850 | B.GV->getInitializer(), | ||||
| 1851 | ConstantDataArray::get(M.getContext(), B.After.Bytes)}); | ||||
| 1852 | auto NewGV = | ||||
| 1853 | new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(), | ||||
| 1854 | GlobalVariable::PrivateLinkage, NewInit, "", B.GV); | ||||
| 1855 | NewGV->setSection(B.GV->getSection()); | ||||
| 1856 | NewGV->setComdat(B.GV->getComdat()); | ||||
| 1857 | NewGV->setAlignment(B.GV->getAlign()); | ||||
| 1858 | |||||
| 1859 | // Copy the original vtable's metadata to the anonymous global, adjusting | ||||
| 1860 | // offsets as required. | ||||
| 1861 | NewGV->copyMetadata(B.GV, B.Before.Bytes.size()); | ||||
| 1862 | |||||
| 1863 | // Build an alias named after the original global, pointing at the second | ||||
| 1864 | // element (the original initializer). | ||||
| 1865 | auto Alias = GlobalAlias::create( | ||||
| 1866 | B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "", | ||||
| 1867 | ConstantExpr::getGetElementPtr( | ||||
| 1868 | NewInit->getType(), NewGV, | ||||
| 1869 | ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0), | ||||
| 1870 | ConstantInt::get(Int32Ty, 1)}), | ||||
| 1871 | &M); | ||||
| 1872 | Alias->setVisibility(B.GV->getVisibility()); | ||||
| 1873 | Alias->takeName(B.GV); | ||||
| 1874 | |||||
| 1875 | B.GV->replaceAllUsesWith(Alias); | ||||
| 1876 | B.GV->eraseFromParent(); | ||||
| 1877 | } | ||||
| 1878 | |||||
| 1879 | bool DevirtModule::areRemarksEnabled() { | ||||
| 1880 | const auto &FL = M.getFunctionList(); | ||||
| 1881 | for (const Function &Fn : FL) { | ||||
| 1882 | if (Fn.empty()) | ||||
| 1883 | continue; | ||||
| 1884 | auto DI = OptimizationRemark(DEBUG_TYPE"wholeprogramdevirt", "", DebugLoc(), &Fn.front()); | ||||
| 1885 | return DI.isEnabled(); | ||||
| 1886 | } | ||||
| 1887 | return false; | ||||
| 1888 | } | ||||
| 1889 | |||||
| 1890 | void DevirtModule::scanTypeTestUsers( | ||||
| 1891 | Function *TypeTestFunc, | ||||
| 1892 | DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) { | ||||
| 1893 | // Find all virtual calls via a virtual table pointer %p under an assumption | ||||
| 1894 | // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p | ||||
| 1895 | // points to a member of the type identifier %md. Group calls by (type ID, | ||||
| 1896 | // offset) pair (effectively the identity of the virtual function) and store | ||||
| 1897 | // to CallSlots. | ||||
| 1898 | for (Use &U : llvm::make_early_inc_range(TypeTestFunc->uses())) { | ||||
| 1899 | auto *CI = dyn_cast<CallInst>(U.getUser()); | ||||
| 1900 | if (!CI) | ||||
| 1901 | continue; | ||||
| 1902 | |||||
| 1903 | // Search for virtual calls based on %p and add them to DevirtCalls. | ||||
| 1904 | SmallVector<DevirtCallSite, 1> DevirtCalls; | ||||
| 1905 | SmallVector<CallInst *, 1> Assumes; | ||||
| 1906 | auto &DT = LookupDomTree(*CI->getFunction()); | ||||
| 1907 | findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI, DT); | ||||
| 1908 | |||||
| 1909 | Metadata *TypeId = | ||||
| 1910 | cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata(); | ||||
| 1911 | // If we found any, add them to CallSlots. | ||||
| 1912 | if (!Assumes.empty()) { | ||||
| 1913 | Value *Ptr = CI->getArgOperand(0)->stripPointerCasts(); | ||||
| 1914 | for (DevirtCallSite Call : DevirtCalls) | ||||
| 1915 | CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CB, nullptr); | ||||
| 1916 | } | ||||
| 1917 | |||||
| 1918 | auto RemoveTypeTestAssumes = [&]() { | ||||
| 1919 | // We no longer need the assumes or the type test. | ||||
| 1920 | for (auto *Assume : Assumes) | ||||
| 1921 | Assume->eraseFromParent(); | ||||
| 1922 | // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we | ||||
| 1923 | // may use the vtable argument later. | ||||
| 1924 | if (CI->use_empty()) | ||||
| 1925 | CI->eraseFromParent(); | ||||
| 1926 | }; | ||||
| 1927 | |||||
| 1928 | // At this point we could remove all type test assume sequences, as they | ||||
| 1929 | // were originally inserted for WPD. However, we can keep these in the | ||||
| 1930 | // code stream for later analysis (e.g. to help drive more efficient ICP | ||||
| 1931 | // sequences). They will eventually be removed by a second LowerTypeTests | ||||
| 1932 | // invocation that cleans them up. In order to do this correctly, the first | ||||
| 1933 | // LowerTypeTests invocation needs to know that they have "Unknown" type | ||||
| 1934 | // test resolution, so that they aren't treated as Unsat and lowered to | ||||
| 1935 | // False, which will break any uses on assumes. Below we remove any type | ||||
| 1936 | // test assumes that will not be treated as Unknown by LTT. | ||||
| 1937 | |||||
| 1938 | // The type test assumes will be treated by LTT as Unsat if the type id is | ||||
| 1939 | // not used on a global (in which case it has no entry in the TypeIdMap). | ||||
| 1940 | if (!TypeIdMap.count(TypeId)) | ||||
| 1941 | RemoveTypeTestAssumes(); | ||||
| 1942 | |||||
| 1943 | // For ThinLTO importing, we need to remove the type test assumes if this is | ||||
| 1944 | // an MDString type id without a corresponding TypeIdSummary. Any | ||||
| 1945 | // non-MDString type ids are ignored and treated as Unknown by LTT, so their | ||||
| 1946 | // type test assumes can be kept. If the MDString type id is missing a | ||||
| 1947 | // TypeIdSummary (e.g. because there was no use on a vcall, preventing the | ||||
| 1948 | // exporting phase of WPD from analyzing it), then it would be treated as | ||||
| 1949 | // Unsat by LTT and we need to remove its type test assumes here. If not | ||||
| 1950 | // used on a vcall we don't need them for later optimization use in any | ||||
| 1951 | // case. | ||||
| 1952 | else if (ImportSummary && isa<MDString>(TypeId)) { | ||||
| 1953 | const TypeIdSummary *TidSummary = | ||||
| 1954 | ImportSummary->getTypeIdSummary(cast<MDString>(TypeId)->getString()); | ||||
| 1955 | if (!TidSummary) | ||||
| 1956 | RemoveTypeTestAssumes(); | ||||
| 1957 | else | ||||
| 1958 | // If one was created it should not be Unsat, because if we reached here | ||||
| 1959 | // the type id was used on a global. | ||||
| 1960 | assert(TidSummary->TTRes.TheKind != TypeTestResolution::Unsat)(static_cast <bool> (TidSummary->TTRes.TheKind != TypeTestResolution ::Unsat) ? void (0) : __assert_fail ("TidSummary->TTRes.TheKind != TypeTestResolution::Unsat" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 1960, __extension__ __PRETTY_FUNCTION__)); | ||||
| 1961 | } | ||||
| 1962 | } | ||||
| 1963 | } | ||||
| 1964 | |||||
| 1965 | void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) { | ||||
| 1966 | Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test); | ||||
| 1967 | |||||
| 1968 | for (Use &U : llvm::make_early_inc_range(TypeCheckedLoadFunc->uses())) { | ||||
| 1969 | auto *CI = dyn_cast<CallInst>(U.getUser()); | ||||
| 1970 | if (!CI) | ||||
| 1971 | continue; | ||||
| 1972 | |||||
| 1973 | Value *Ptr = CI->getArgOperand(0); | ||||
| 1974 | Value *Offset = CI->getArgOperand(1); | ||||
| 1975 | Value *TypeIdValue = CI->getArgOperand(2); | ||||
| 1976 | Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata(); | ||||
| 1977 | |||||
| 1978 | SmallVector<DevirtCallSite, 1> DevirtCalls; | ||||
| 1979 | SmallVector<Instruction *, 1> LoadedPtrs; | ||||
| 1980 | SmallVector<Instruction *, 1> Preds; | ||||
| 1981 | bool HasNonCallUses = false; | ||||
| 1982 | auto &DT = LookupDomTree(*CI->getFunction()); | ||||
| 1983 | findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds, | ||||
| 1984 | HasNonCallUses, CI, DT); | ||||
| 1985 | |||||
| 1986 | // Start by generating "pessimistic" code that explicitly loads the function | ||||
| 1987 | // pointer from the vtable and performs the type check. If possible, we will | ||||
| 1988 | // eliminate the load and the type check later. | ||||
| 1989 | |||||
| 1990 | // If possible, only generate the load at the point where it is used. | ||||
| 1991 | // This helps avoid unnecessary spills. | ||||
| 1992 | IRBuilder<> LoadB( | ||||
| 1993 | (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI); | ||||
| 1994 | Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset); | ||||
| 1995 | Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy)); | ||||
| 1996 | Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr); | ||||
| 1997 | |||||
| 1998 | for (Instruction *LoadedPtr : LoadedPtrs) { | ||||
| 1999 | LoadedPtr->replaceAllUsesWith(LoadedValue); | ||||
| 2000 | LoadedPtr->eraseFromParent(); | ||||
| 2001 | } | ||||
| 2002 | |||||
| 2003 | // Likewise for the type test. | ||||
| 2004 | IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI); | ||||
| 2005 | CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue}); | ||||
| 2006 | |||||
| 2007 | for (Instruction *Pred : Preds) { | ||||
| 2008 | Pred->replaceAllUsesWith(TypeTestCall); | ||||
| 2009 | Pred->eraseFromParent(); | ||||
| 2010 | } | ||||
| 2011 | |||||
| 2012 | // We have already erased any extractvalue instructions that refer to the | ||||
| 2013 | // intrinsic call, but the intrinsic may have other non-extractvalue uses | ||||
| 2014 | // (although this is unlikely). In that case, explicitly build a pair and | ||||
| 2015 | // RAUW it. | ||||
| 2016 | if (!CI->use_empty()) { | ||||
| 2017 | Value *Pair = PoisonValue::get(CI->getType()); | ||||
| 2018 | IRBuilder<> B(CI); | ||||
| 2019 | Pair = B.CreateInsertValue(Pair, LoadedValue, {0}); | ||||
| 2020 | Pair = B.CreateInsertValue(Pair, TypeTestCall, {1}); | ||||
| 2021 | CI->replaceAllUsesWith(Pair); | ||||
| 2022 | } | ||||
| 2023 | |||||
| 2024 | // The number of unsafe uses is initially the number of uses. | ||||
| 2025 | auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall]; | ||||
| 2026 | NumUnsafeUses = DevirtCalls.size(); | ||||
| 2027 | |||||
| 2028 | // If the function pointer has a non-call user, we cannot eliminate the type | ||||
| 2029 | // check, as one of those users may eventually call the pointer. Increment | ||||
| 2030 | // the unsafe use count to make sure it cannot reach zero. | ||||
| 2031 | if (HasNonCallUses) | ||||
| 2032 | ++NumUnsafeUses; | ||||
| 2033 | for (DevirtCallSite Call : DevirtCalls) { | ||||
| 2034 | CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CB, | ||||
| 2035 | &NumUnsafeUses); | ||||
| 2036 | } | ||||
| 2037 | |||||
| 2038 | CI->eraseFromParent(); | ||||
| 2039 | } | ||||
| 2040 | } | ||||
| 2041 | |||||
| 2042 | void DevirtModule::importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo) { | ||||
| 2043 | auto *TypeId = dyn_cast<MDString>(Slot.TypeID); | ||||
| 2044 | if (!TypeId) | ||||
| 2045 | return; | ||||
| 2046 | const TypeIdSummary *TidSummary = | ||||
| 2047 | ImportSummary->getTypeIdSummary(TypeId->getString()); | ||||
| 2048 | if (!TidSummary) | ||||
| 2049 | return; | ||||
| 2050 | auto ResI = TidSummary->WPDRes.find(Slot.ByteOffset); | ||||
| 2051 | if (ResI == TidSummary->WPDRes.end()) | ||||
| 2052 | return; | ||||
| 2053 | const WholeProgramDevirtResolution &Res = ResI->second; | ||||
| 2054 | |||||
| 2055 | if (Res.TheKind == WholeProgramDevirtResolution::SingleImpl) { | ||||
| 2056 | assert(!Res.SingleImplName.empty())(static_cast <bool> (!Res.SingleImplName.empty()) ? void (0) : __assert_fail ("!Res.SingleImplName.empty()", "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp" , 2056, __extension__ __PRETTY_FUNCTION__)); | ||||
| 2057 | // The type of the function in the declaration is irrelevant because every | ||||
| 2058 | // call site will cast it to the correct type. | ||||
| 2059 | Constant *SingleImpl = | ||||
| 2060 | cast<Constant>(M.getOrInsertFunction(Res.SingleImplName, | ||||
| 2061 | Type::getVoidTy(M.getContext())) | ||||
| 2062 | .getCallee()); | ||||
| 2063 | |||||
| 2064 | // This is the import phase so we should not be exporting anything. | ||||
| 2065 | bool IsExported = false; | ||||
| 2066 | applySingleImplDevirt(SlotInfo, SingleImpl, IsExported); | ||||
| 2067 | assert(!IsExported)(static_cast <bool> (!IsExported) ? void (0) : __assert_fail ("!IsExported", "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp" , 2067, __extension__ __PRETTY_FUNCTION__)); | ||||
| 2068 | } | ||||
| 2069 | |||||
| 2070 | for (auto &CSByConstantArg : SlotInfo.ConstCSInfo) { | ||||
| 2071 | auto I = Res.ResByArg.find(CSByConstantArg.first); | ||||
| 2072 | if (I == Res.ResByArg.end()) | ||||
| 2073 | continue; | ||||
| 2074 | auto &ResByArg = I->second; | ||||
| 2075 | // FIXME: We should figure out what to do about the "function name" argument | ||||
| 2076 | // to the apply* functions, as the function names are unavailable during the | ||||
| 2077 | // importing phase. For now we just pass the empty string. This does not | ||||
| 2078 | // impact correctness because the function names are just used for remarks. | ||||
| 2079 | switch (ResByArg.TheKind) { | ||||
| 2080 | case WholeProgramDevirtResolution::ByArg::UniformRetVal: | ||||
| 2081 | applyUniformRetValOpt(CSByConstantArg.second, "", ResByArg.Info); | ||||
| 2082 | break; | ||||
| 2083 | case WholeProgramDevirtResolution::ByArg::UniqueRetVal: { | ||||
| 2084 | Constant *UniqueMemberAddr = | ||||
| 2085 | importGlobal(Slot, CSByConstantArg.first, "unique_member"); | ||||
| 2086 | applyUniqueRetValOpt(CSByConstantArg.second, "", ResByArg.Info, | ||||
| 2087 | UniqueMemberAddr); | ||||
| 2088 | break; | ||||
| 2089 | } | ||||
| 2090 | case WholeProgramDevirtResolution::ByArg::VirtualConstProp: { | ||||
| 2091 | Constant *Byte = importConstant(Slot, CSByConstantArg.first, "byte", | ||||
| 2092 | Int32Ty, ResByArg.Byte); | ||||
| 2093 | Constant *Bit = importConstant(Slot, CSByConstantArg.first, "bit", Int8Ty, | ||||
| 2094 | ResByArg.Bit); | ||||
| 2095 | applyVirtualConstProp(CSByConstantArg.second, "", Byte, Bit); | ||||
| 2096 | break; | ||||
| 2097 | } | ||||
| 2098 | default: | ||||
| 2099 | break; | ||||
| 2100 | } | ||||
| 2101 | } | ||||
| 2102 | |||||
| 2103 | if (Res.TheKind == WholeProgramDevirtResolution::BranchFunnel) { | ||||
| 2104 | // The type of the function is irrelevant, because it's bitcast at calls | ||||
| 2105 | // anyhow. | ||||
| 2106 | Constant *JT = cast<Constant>( | ||||
| 2107 | M.getOrInsertFunction(getGlobalName(Slot, {}, "branch_funnel"), | ||||
| 2108 | Type::getVoidTy(M.getContext())) | ||||
| 2109 | .getCallee()); | ||||
| 2110 | bool IsExported = false; | ||||
| 2111 | applyICallBranchFunnel(SlotInfo, JT, IsExported); | ||||
| 2112 | assert(!IsExported)(static_cast <bool> (!IsExported) ? void (0) : __assert_fail ("!IsExported", "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp" , 2112, __extension__ __PRETTY_FUNCTION__)); | ||||
| 2113 | } | ||||
| 2114 | } | ||||
| 2115 | |||||
| 2116 | void DevirtModule::removeRedundantTypeTests() { | ||||
| 2117 | auto True = ConstantInt::getTrue(M.getContext()); | ||||
| 2118 | for (auto &&U : NumUnsafeUsesForTypeTest) { | ||||
| 2119 | if (U.second == 0) { | ||||
| 2120 | U.first->replaceAllUsesWith(True); | ||||
| 2121 | U.first->eraseFromParent(); | ||||
| 2122 | } | ||||
| 2123 | } | ||||
| 2124 | } | ||||
| 2125 | |||||
| 2126 | ValueInfo | ||||
| 2127 | DevirtModule::lookUpFunctionValueInfo(Function *TheFn, | ||||
| 2128 | ModuleSummaryIndex *ExportSummary) { | ||||
| 2129 | assert((ExportSummary != nullptr) &&(static_cast <bool> ((ExportSummary != nullptr) && "Caller guarantees ExportSummary is not nullptr") ? void (0) : __assert_fail ("(ExportSummary != nullptr) && \"Caller guarantees ExportSummary is not nullptr\"" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 2130, __extension__ __PRETTY_FUNCTION__)) | ||||
| 2130 | "Caller guarantees ExportSummary is not nullptr")(static_cast <bool> ((ExportSummary != nullptr) && "Caller guarantees ExportSummary is not nullptr") ? void (0) : __assert_fail ("(ExportSummary != nullptr) && \"Caller guarantees ExportSummary is not nullptr\"" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 2130, __extension__ __PRETTY_FUNCTION__)); | ||||
| 2131 | |||||
| 2132 | const auto TheFnGUID = TheFn->getGUID(); | ||||
| 2133 | const auto TheFnGUIDWithExportedName = GlobalValue::getGUID(TheFn->getName()); | ||||
| 2134 | // Look up ValueInfo with the GUID in the current linkage. | ||||
| 2135 | ValueInfo TheFnVI = ExportSummary->getValueInfo(TheFnGUID); | ||||
| 2136 | // If no entry is found and GUID is different from GUID computed using | ||||
| 2137 | // exported name, look up ValueInfo with the exported name unconditionally. | ||||
| 2138 | // This is a fallback. | ||||
| 2139 | // | ||||
| 2140 | // The reason to have a fallback: | ||||
| 2141 | // 1. LTO could enable global value internalization via | ||||
| 2142 | // `enable-lto-internalization`. | ||||
| 2143 | // 2. The GUID in ExportedSummary is computed using exported name. | ||||
| 2144 | if ((!TheFnVI) && (TheFnGUID != TheFnGUIDWithExportedName)) { | ||||
| 2145 | TheFnVI = ExportSummary->getValueInfo(TheFnGUIDWithExportedName); | ||||
| 2146 | } | ||||
| 2147 | return TheFnVI; | ||||
| 2148 | } | ||||
| 2149 | |||||
| 2150 | bool DevirtModule::mustBeUnreachableFunction( | ||||
| 2151 | Function *const F, ModuleSummaryIndex *ExportSummary) { | ||||
| 2152 | // First, learn unreachability by analyzing function IR. | ||||
| 2153 | if (!F->isDeclaration()) { | ||||
| 2154 | // A function must be unreachable if its entry block ends with an | ||||
| 2155 | // 'unreachable'. | ||||
| 2156 | return isa<UnreachableInst>(F->getEntryBlock().getTerminator()); | ||||
| 2157 | } | ||||
| 2158 | // Learn unreachability from ExportSummary if ExportSummary is present. | ||||
| 2159 | return ExportSummary && | ||||
| 2160 | ::mustBeUnreachableFunction( | ||||
| 2161 | DevirtModule::lookUpFunctionValueInfo(F, ExportSummary)); | ||||
| 2162 | } | ||||
| 2163 | |||||
| 2164 | bool DevirtModule::run() { | ||||
| 2165 | // If only some of the modules were split, we cannot correctly perform | ||||
| 2166 | // this transformation. We already checked for the presense of type tests | ||||
| 2167 | // with partially split modules during the thin link, and would have emitted | ||||
| 2168 | // an error if any were found, so here we can simply return. | ||||
| 2169 | if ((ExportSummary && ExportSummary->partiallySplitLTOUnits()) || | ||||
| |||||
| 2170 | (ImportSummary && ImportSummary->partiallySplitLTOUnits())) | ||||
| 2171 | return false; | ||||
| 2172 | |||||
| 2173 | Function *TypeTestFunc = | ||||
| 2174 | M.getFunction(Intrinsic::getName(Intrinsic::type_test)); | ||||
| 2175 | Function *TypeCheckedLoadFunc = | ||||
| 2176 | M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load)); | ||||
| 2177 | Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume)); | ||||
| 2178 | |||||
| 2179 | // Normally if there are no users of the devirtualization intrinsics in the | ||||
| 2180 | // module, this pass has nothing to do. But if we are exporting, we also need | ||||
| 2181 | // to handle any users that appear only in the function summaries. | ||||
| 2182 | if (!ExportSummary
| ||||
| 2183 | (!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc || | ||||
| 2184 | AssumeFunc->use_empty()) && | ||||
| 2185 | (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty())) | ||||
| 2186 | return false; | ||||
| 2187 | |||||
| 2188 | // Rebuild type metadata into a map for easy lookup. | ||||
| 2189 | std::vector<VTableBits> Bits; | ||||
| 2190 | DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap; | ||||
| 2191 | buildTypeIdentifierMap(Bits, TypeIdMap); | ||||
| 2192 | |||||
| 2193 | if (TypeTestFunc
| ||||
| 2194 | scanTypeTestUsers(TypeTestFunc, TypeIdMap); | ||||
| 2195 | |||||
| 2196 | if (TypeCheckedLoadFunc) | ||||
| 2197 | scanTypeCheckedLoadUsers(TypeCheckedLoadFunc); | ||||
| 2198 | |||||
| 2199 | if (ImportSummary
| ||||
| 2200 | for (auto &S : CallSlots) | ||||
| 2201 | importResolution(S.first, S.second); | ||||
| 2202 | |||||
| 2203 | removeRedundantTypeTests(); | ||||
| 2204 | |||||
| 2205 | // We have lowered or deleted the type intrinsics, so we will no longer have | ||||
| 2206 | // enough information to reason about the liveness of virtual function | ||||
| 2207 | // pointers in GlobalDCE. | ||||
| 2208 | for (GlobalVariable &GV : M.globals()) | ||||
| 2209 | GV.eraseMetadata(LLVMContext::MD_vcall_visibility); | ||||
| 2210 | |||||
| 2211 | // The rest of the code is only necessary when exporting or during regular | ||||
| 2212 | // LTO, so we are done. | ||||
| 2213 | return true; | ||||
| 2214 | } | ||||
| 2215 | |||||
| 2216 | if (TypeIdMap.empty()) | ||||
| 2217 | return true; | ||||
| 2218 | |||||
| 2219 | // Collect information from summary about which calls to try to devirtualize. | ||||
| 2220 | if (ExportSummary
| ||||
| 2221 | DenseMap<GlobalValue::GUID, TinyPtrVector<Metadata *>> MetadataByGUID; | ||||
| 2222 | for (auto &P : TypeIdMap) { | ||||
| 2223 | if (auto *TypeId = dyn_cast<MDString>(P.first)) | ||||
| 2224 | MetadataByGUID[GlobalValue::getGUID(TypeId->getString())].push_back( | ||||
| 2225 | TypeId); | ||||
| 2226 | } | ||||
| 2227 | |||||
| 2228 | for (auto &P : *ExportSummary) { | ||||
| 2229 | for (auto &S : P.second.SummaryList) { | ||||
| 2230 | auto *FS = dyn_cast<FunctionSummary>(S.get()); | ||||
| 2231 | if (!FS) | ||||
| 2232 | continue; | ||||
| 2233 | // FIXME: Only add live functions. | ||||
| 2234 | for (FunctionSummary::VFuncId VF : FS->type_test_assume_vcalls()) { | ||||
| 2235 | for (Metadata *MD : MetadataByGUID[VF.GUID]) { | ||||
| 2236 | CallSlots[{MD, VF.Offset}].CSInfo.addSummaryTypeTestAssumeUser(FS); | ||||
| 2237 | } | ||||
| 2238 | } | ||||
| 2239 | for (FunctionSummary::VFuncId VF : FS->type_checked_load_vcalls()) { | ||||
| 2240 | for (Metadata *MD : MetadataByGUID[VF.GUID]) { | ||||
| 2241 | CallSlots[{MD, VF.Offset}].CSInfo.addSummaryTypeCheckedLoadUser(FS); | ||||
| 2242 | } | ||||
| 2243 | } | ||||
| 2244 | for (const FunctionSummary::ConstVCall &VC : | ||||
| 2245 | FS->type_test_assume_const_vcalls()) { | ||||
| 2246 | for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) { | ||||
| 2247 | CallSlots[{MD, VC.VFunc.Offset}] | ||||
| 2248 | .ConstCSInfo[VC.Args] | ||||
| 2249 | .addSummaryTypeTestAssumeUser(FS); | ||||
| 2250 | } | ||||
| 2251 | } | ||||
| 2252 | for (const FunctionSummary::ConstVCall &VC : | ||||
| 2253 | FS->type_checked_load_const_vcalls()) { | ||||
| 2254 | for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) { | ||||
| 2255 | CallSlots[{MD, VC.VFunc.Offset}] | ||||
| 2256 | .ConstCSInfo[VC.Args] | ||||
| 2257 | .addSummaryTypeCheckedLoadUser(FS); | ||||
| 2258 | } | ||||
| 2259 | } | ||||
| 2260 | } | ||||
| 2261 | } | ||||
| 2262 | } | ||||
| 2263 | |||||
| 2264 | // For each (type, offset) pair: | ||||
| 2265 | bool DidVirtualConstProp = false; | ||||
| 2266 | std::map<std::string, GlobalValue *> DevirtTargets; | ||||
| 2267 | for (auto &S : CallSlots) { | ||||
| 2268 | // Search each of the members of the type identifier for the virtual | ||||
| 2269 | // function implementation at offset S.first.ByteOffset, and add to | ||||
| 2270 | // TargetsForSlot. | ||||
| 2271 | std::vector<VirtualCallTarget> TargetsForSlot; | ||||
| 2272 | WholeProgramDevirtResolution *Res = nullptr; | ||||
| 2273 | const std::set<TypeMemberInfo> &TypeMemberInfos = TypeIdMap[S.first.TypeID]; | ||||
| 2274 | if (ExportSummary && isa<MDString>(S.first.TypeID) && | ||||
| 2275 | TypeMemberInfos.size()) | ||||
| 2276 | // For any type id used on a global's type metadata, create the type id | ||||
| 2277 | // summary resolution regardless of whether we can devirtualize, so that | ||||
| 2278 | // lower type tests knows the type id is not Unsat. If it was not used on | ||||
| 2279 | // a global's type metadata, the TypeIdMap entry set will be empty, and | ||||
| 2280 | // we don't want to create an entry (with the default Unknown type | ||||
| 2281 | // resolution), which can prevent detection of the Unsat. | ||||
| 2282 | Res = &ExportSummary | ||||
| 2283 | ->getOrInsertTypeIdSummary( | ||||
| 2284 | cast<MDString>(S.first.TypeID)->getString()) | ||||
| 2285 | .WPDRes[S.first.ByteOffset]; | ||||
| 2286 | if (tryFindVirtualCallTargets(TargetsForSlot, TypeMemberInfos, | ||||
| 2287 | S.first.ByteOffset, ExportSummary)) { | ||||
| 2288 | |||||
| 2289 | if (!trySingleImplDevirt(ExportSummary, TargetsForSlot, S.second, Res)) { | ||||
| 2290 | DidVirtualConstProp |= | ||||
| 2291 | tryVirtualConstProp(TargetsForSlot, S.second, Res, S.first); | ||||
| 2292 | |||||
| 2293 | tryICallBranchFunnel(TargetsForSlot, S.second, Res, S.first); | ||||
| 2294 | } | ||||
| 2295 | |||||
| 2296 | // Collect functions devirtualized at least for one call site for stats. | ||||
| 2297 | if (RemarksEnabled || AreStatisticsEnabled()) | ||||
| 2298 | for (const auto &T : TargetsForSlot) | ||||
| 2299 | if (T.WasDevirt) | ||||
| 2300 | DevirtTargets[std::string(T.Fn->getName())] = T.Fn; | ||||
| 2301 | } | ||||
| 2302 | |||||
| 2303 | // CFI-specific: if we are exporting and any llvm.type.checked.load | ||||
| 2304 | // intrinsics were *not* devirtualized, we need to add the resulting | ||||
| 2305 | // llvm.type.test intrinsics to the function summaries so that the | ||||
| 2306 | // LowerTypeTests pass will export them. | ||||
| 2307 | if (ExportSummary && isa<MDString>(S.first.TypeID)) { | ||||
| 2308 | auto GUID = | ||||
| 2309 | GlobalValue::getGUID(cast<MDString>(S.first.TypeID)->getString()); | ||||
| 2310 | for (auto *FS : S.second.CSInfo.SummaryTypeCheckedLoadUsers) | ||||
| 2311 | FS->addTypeTest(GUID); | ||||
| 2312 | for (auto &CCS : S.second.ConstCSInfo) | ||||
| 2313 | for (auto *FS : CCS.second.SummaryTypeCheckedLoadUsers) | ||||
| 2314 | FS->addTypeTest(GUID); | ||||
| 2315 | } | ||||
| 2316 | } | ||||
| 2317 | |||||
| 2318 | if (RemarksEnabled) { | ||||
| 2319 | // Generate remarks for each devirtualized function. | ||||
| 2320 | for (const auto &DT : DevirtTargets) { | ||||
| 2321 | GlobalValue *GV = DT.second; | ||||
| 2322 | auto F = dyn_cast<Function>(GV); | ||||
| 2323 | if (!F) { | ||||
| 2324 | auto A = dyn_cast<GlobalAlias>(GV); | ||||
| 2325 | assert(A && isa<Function>(A->getAliasee()))(static_cast <bool> (A && isa<Function>(A ->getAliasee())) ? void (0) : __assert_fail ("A && isa<Function>(A->getAliasee())" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 2325, __extension__ __PRETTY_FUNCTION__)); | ||||
| 2326 | F = dyn_cast<Function>(A->getAliasee()); | ||||
| 2327 | assert(F)(static_cast <bool> (F) ? void (0) : __assert_fail ("F" , "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp", 2327, __extension__ __PRETTY_FUNCTION__)); | ||||
| 2328 | } | ||||
| 2329 | |||||
| 2330 | using namespace ore; | ||||
| 2331 | OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE"wholeprogramdevirt", "Devirtualized", F) | ||||
| 2332 | << "devirtualized " | ||||
| 2333 | << NV("FunctionName", DT.first)); | ||||
| 2334 | } | ||||
| 2335 | } | ||||
| 2336 | |||||
| 2337 | NumDevirtTargets += DevirtTargets.size(); | ||||
| 2338 | |||||
| 2339 | removeRedundantTypeTests(); | ||||
| 2340 | |||||
| 2341 | // Rebuild each global we touched as part of virtual constant propagation to | ||||
| 2342 | // include the before and after bytes. | ||||
| 2343 | if (DidVirtualConstProp) | ||||
| 2344 | for (VTableBits &B : Bits) | ||||
| 2345 | rebuildGlobal(B); | ||||
| 2346 | |||||
| 2347 | // We have lowered or deleted the type intrinsics, so we will no longer have | ||||
| 2348 | // enough information to reason about the liveness of virtual function | ||||
| 2349 | // pointers in GlobalDCE. | ||||
| 2350 | for (GlobalVariable &GV : M.globals()) | ||||
| 2351 | GV.eraseMetadata(LLVMContext::MD_vcall_visibility); | ||||
| 2352 | |||||
| 2353 | return true; | ||||
| 2354 | } | ||||
| 2355 | |||||
| 2356 | void DevirtIndex::run() { | ||||
| 2357 | if (ExportSummary.typeIdCompatibleVtableMap().empty()) | ||||
| 2358 | return; | ||||
| 2359 | |||||
| 2360 | DenseMap<GlobalValue::GUID, std::vector<StringRef>> NameByGUID; | ||||
| 2361 | for (const auto &P : ExportSummary.typeIdCompatibleVtableMap()) { | ||||
| 2362 | NameByGUID[GlobalValue::getGUID(P.first)].push_back(P.first); | ||||
| 2363 | // Create the type id summary resolution regardlness of whether we can | ||||
| 2364 | // devirtualize, so that lower type tests knows the type id is used on | ||||
| 2365 | // a global and not Unsat. We do this here rather than in the loop over the | ||||
| 2366 | // CallSlots, since that handling will only see type tests that directly | ||||
| 2367 | // feed assumes, and we would miss any that aren't currently handled by WPD | ||||
| 2368 | // (such as type tests that feed assumes via phis). | ||||
| 2369 | ExportSummary.getOrInsertTypeIdSummary(P.first); | ||||
| 2370 | } | ||||
| 2371 | |||||
| 2372 | // Collect information from summary about which calls to try to devirtualize. | ||||
| 2373 | for (auto &P : ExportSummary) { | ||||
| 2374 | for (auto &S : P.second.SummaryList) { | ||||
| 2375 | auto *FS = dyn_cast<FunctionSummary>(S.get()); | ||||
| 2376 | if (!FS) | ||||
| 2377 | continue; | ||||
| 2378 | // FIXME: Only add live functions. | ||||
| 2379 | for (FunctionSummary::VFuncId VF : FS->type_test_assume_vcalls()) { | ||||
| 2380 | for (StringRef Name : NameByGUID[VF.GUID]) { | ||||
| 2381 | CallSlots[{Name, VF.Offset}].CSInfo.addSummaryTypeTestAssumeUser(FS); | ||||
| 2382 | } | ||||
| 2383 | } | ||||
| 2384 | for (FunctionSummary::VFuncId VF : FS->type_checked_load_vcalls()) { | ||||
| 2385 | for (StringRef Name : NameByGUID[VF.GUID]) { | ||||
| 2386 | CallSlots[{Name, VF.Offset}].CSInfo.addSummaryTypeCheckedLoadUser(FS); | ||||
| 2387 | } | ||||
| 2388 | } | ||||
| 2389 | for (const FunctionSummary::ConstVCall &VC : | ||||
| 2390 | FS->type_test_assume_const_vcalls()) { | ||||
| 2391 | for (StringRef Name : NameByGUID[VC.VFunc.GUID]) { | ||||
| 2392 | CallSlots[{Name, VC.VFunc.Offset}] | ||||
| 2393 | .ConstCSInfo[VC.Args] | ||||
| 2394 | .addSummaryTypeTestAssumeUser(FS); | ||||
| 2395 | } | ||||
| 2396 | } | ||||
| 2397 | for (const FunctionSummary::ConstVCall &VC : | ||||
| 2398 | FS->type_checked_load_const_vcalls()) { | ||||
| 2399 | for (StringRef Name : NameByGUID[VC.VFunc.GUID]) { | ||||
| 2400 | CallSlots[{Name, VC.VFunc.Offset}] | ||||
| 2401 | .ConstCSInfo[VC.Args] | ||||
| 2402 | .addSummaryTypeCheckedLoadUser(FS); | ||||
| 2403 | } | ||||
| 2404 | } | ||||
| 2405 | } | ||||
| 2406 | } | ||||
| 2407 | |||||
| 2408 | std::set<ValueInfo> DevirtTargets; | ||||
| 2409 | // For each (type, offset) pair: | ||||
| 2410 | for (auto &S : CallSlots) { | ||||
| 2411 | // Search each of the members of the type identifier for the virtual | ||||
| 2412 | // function implementation at offset S.first.ByteOffset, and add to | ||||
| 2413 | // TargetsForSlot. | ||||
| 2414 | std::vector<ValueInfo> TargetsForSlot; | ||||
| 2415 | auto TidSummary = ExportSummary.getTypeIdCompatibleVtableSummary(S.first.TypeID); | ||||
| 2416 | assert(TidSummary)(static_cast <bool> (TidSummary) ? void (0) : __assert_fail ("TidSummary", "llvm/lib/Transforms/IPO/WholeProgramDevirt.cpp" , 2416, __extension__ __PRETTY_FUNCTION__)); | ||||
| 2417 | // The type id summary would have been created while building the NameByGUID | ||||
| 2418 | // map earlier. | ||||
| 2419 | WholeProgramDevirtResolution *Res = | ||||
| 2420 | &ExportSummary.getTypeIdSummary(S.first.TypeID) | ||||
| 2421 | ->WPDRes[S.first.ByteOffset]; | ||||
| 2422 | if (tryFindVirtualCallTargets(TargetsForSlot, *TidSummary, | ||||
| 2423 | S.first.ByteOffset)) { | ||||
| 2424 | |||||
| 2425 | if (!trySingleImplDevirt(TargetsForSlot, S.first, S.second, Res, | ||||
| 2426 | DevirtTargets)) | ||||
| 2427 | continue; | ||||
| 2428 | } | ||||
| 2429 | } | ||||
| 2430 | |||||
| 2431 | // Optionally have the thin link print message for each devirtualized | ||||
| 2432 | // function. | ||||
| 2433 | if (PrintSummaryDevirt) | ||||
| 2434 | for (const auto &DT : DevirtTargets) | ||||
| 2435 | errs() << "Devirtualized call to " << DT << "\n"; | ||||
| 2436 | |||||
| 2437 | NumDevirtTargets += DevirtTargets.size(); | ||||
| 2438 | } |