LLVM  4.0.0
AddressSanitizer.cpp
Go to the documentation of this file.
1 //===-- AddressSanitizer.cpp - memory error detector ------------*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file is a part of AddressSanitizer, an address sanity checker.
11 // Details of the algorithm:
12 // http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
13 //
14 //===----------------------------------------------------------------------===//
15 
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/DenseMap.h"
19 #include "llvm/ADT/SetVector.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/ADT/StringExtras.h"
24 #include "llvm/ADT/Triple.h"
28 #include "llvm/IR/CallSite.h"
29 #include "llvm/IR/DIBuilder.h"
30 #include "llvm/IR/DataLayout.h"
31 #include "llvm/IR/Dominators.h"
32 #include "llvm/IR/Function.h"
33 #include "llvm/IR/IRBuilder.h"
34 #include "llvm/IR/InlineAsm.h"
35 #include "llvm/IR/InstVisitor.h"
36 #include "llvm/IR/IntrinsicInst.h"
37 #include "llvm/IR/LLVMContext.h"
38 #include "llvm/IR/MDBuilder.h"
39 #include "llvm/IR/Module.h"
40 #include "llvm/IR/Type.h"
41 #include "llvm/MC/MCSectionMachO.h"
43 #include "llvm/Support/DataTypes.h"
44 #include "llvm/Support/Debug.h"
45 #include "llvm/Support/Endian.h"
49 #include "llvm/Transforms/Scalar.h"
56 #include <algorithm>
57 #include <iomanip>
58 #include <limits>
59 #include <sstream>
60 #include <string>
61 #include <system_error>
62 
63 using namespace llvm;
64 
65 #define DEBUG_TYPE "asan"
66 
67 static const uint64_t kDefaultShadowScale = 3;
68 static const uint64_t kDefaultShadowOffset32 = 1ULL << 29;
69 static const uint64_t kDefaultShadowOffset64 = 1ULL << 44;
70 static const uint64_t kDynamicShadowSentinel = ~(uint64_t)0;
71 static const uint64_t kIOSShadowOffset32 = 1ULL << 30;
72 static const uint64_t kIOSSimShadowOffset32 = 1ULL << 30;
74 static const uint64_t kSmallX86_64ShadowOffset = 0x7FFF8000; // < 2G.
75 static const uint64_t kLinuxKasan_ShadowOffset64 = 0xdffffc0000000000;
76 static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 41;
77 static const uint64_t kSystemZ_ShadowOffset64 = 1ULL << 52;
78 static const uint64_t kMIPS32_ShadowOffset32 = 0x0aaa0000;
79 static const uint64_t kMIPS64_ShadowOffset64 = 1ULL << 37;
80 static const uint64_t kAArch64_ShadowOffset64 = 1ULL << 36;
81 static const uint64_t kFreeBSD_ShadowOffset32 = 1ULL << 30;
82 static const uint64_t kFreeBSD_ShadowOffset64 = 1ULL << 46;
83 static const uint64_t kWindowsShadowOffset32 = 3ULL << 28;
84 // The shadow memory space is dynamically allocated.
86 
87 static const size_t kMinStackMallocSize = 1 << 6; // 64B
88 static const size_t kMaxStackMallocSize = 1 << 16; // 64K
89 static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3;
90 static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E;
91 
92 static const char *const kAsanModuleCtorName = "asan.module_ctor";
93 static const char *const kAsanModuleDtorName = "asan.module_dtor";
94 static const uint64_t kAsanCtorAndDtorPriority = 1;
95 static const char *const kAsanReportErrorTemplate = "__asan_report_";
96 static const char *const kAsanRegisterGlobalsName = "__asan_register_globals";
97 static const char *const kAsanUnregisterGlobalsName =
98  "__asan_unregister_globals";
99 static const char *const kAsanRegisterImageGlobalsName =
100  "__asan_register_image_globals";
101 static const char *const kAsanUnregisterImageGlobalsName =
102  "__asan_unregister_image_globals";
103 static const char *const kAsanPoisonGlobalsName = "__asan_before_dynamic_init";
104 static const char *const kAsanUnpoisonGlobalsName = "__asan_after_dynamic_init";
105 static const char *const kAsanInitName = "__asan_init";
106 static const char *const kAsanVersionCheckName =
107  "__asan_version_mismatch_check_v8";
108 static const char *const kAsanPtrCmp = "__sanitizer_ptr_cmp";
109 static const char *const kAsanPtrSub = "__sanitizer_ptr_sub";
110 static const char *const kAsanHandleNoReturnName = "__asan_handle_no_return";
111 static const int kMaxAsanStackMallocSizeClass = 10;
112 static const char *const kAsanStackMallocNameTemplate = "__asan_stack_malloc_";
113 static const char *const kAsanStackFreeNameTemplate = "__asan_stack_free_";
114 static const char *const kAsanGenPrefix = "__asan_gen_";
115 static const char *const kODRGenPrefix = "__odr_asan_gen_";
116 static const char *const kSanCovGenPrefix = "__sancov_gen_";
117 static const char *const kAsanSetShadowPrefix = "__asan_set_shadow_";
118 static const char *const kAsanPoisonStackMemoryName =
119  "__asan_poison_stack_memory";
120 static const char *const kAsanUnpoisonStackMemoryName =
121  "__asan_unpoison_stack_memory";
122 static const char *const kAsanGlobalsRegisteredFlagName =
123  "__asan_globals_registered";
124 
125 static const char *const kAsanOptionDetectUseAfterReturn =
126  "__asan_option_detect_stack_use_after_return";
127 
128 static const char *const kAsanShadowMemoryDynamicAddress =
129  "__asan_shadow_memory_dynamic_address";
130 
131 static const char *const kAsanAllocaPoison = "__asan_alloca_poison";
132 static const char *const kAsanAllocasUnpoison = "__asan_allocas_unpoison";
133 
134 // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
135 static const size_t kNumberOfAccessSizes = 5;
136 
137 static const unsigned kAllocaRzSize = 32;
138 
139 // Command-line flags.
141  "asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"),
142  cl::Hidden, cl::init(false));
143 static cl::opt<bool> ClRecover(
144  "asan-recover",
145  cl::desc("Enable recovery mode (continue-after-error)."),
146  cl::Hidden, cl::init(false));
147 
148 // This flag may need to be replaced with -f[no-]asan-reads.
149 static cl::opt<bool> ClInstrumentReads("asan-instrument-reads",
150  cl::desc("instrument read instructions"),
151  cl::Hidden, cl::init(true));
153  "asan-instrument-writes", cl::desc("instrument write instructions"),
154  cl::Hidden, cl::init(true));
156  "asan-instrument-atomics",
157  cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
158  cl::init(true));
160  "asan-always-slow-path",
161  cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden,
162  cl::init(false));
164  "asan-force-dynamic-shadow",
165  cl::desc("Load shadow address into a local variable for each function"),
166  cl::Hidden, cl::init(false));
167 
168 // This flag limits the number of instructions to be instrumented
169 // in any given BB. Normally, this should be set to unlimited (INT_MAX),
170 // but due to http://llvm.org/bugs/show_bug.cgi?id=12652 we temporary
171 // set it to 10000.
173  "asan-max-ins-per-bb", cl::init(10000),
174  cl::desc("maximal number of instructions to instrument in any given BB"),
175  cl::Hidden);
176 // This flag may need to be replaced with -f[no]asan-stack.
177 static cl::opt<bool> ClStack("asan-stack", cl::desc("Handle stack memory"),
178  cl::Hidden, cl::init(true));
180  "asan-max-inline-poisoning-size",
181  cl::desc(
182  "Inline shadow poisoning for blocks up to the given size in bytes."),
183  cl::Hidden, cl::init(64));
184 static cl::opt<bool> ClUseAfterReturn("asan-use-after-return",
185  cl::desc("Check stack-use-after-return"),
186  cl::Hidden, cl::init(true));
187 static cl::opt<bool> ClUseAfterScope("asan-use-after-scope",
188  cl::desc("Check stack-use-after-scope"),
189  cl::Hidden, cl::init(false));
190 // This flag may need to be replaced with -f[no]asan-globals.
191 static cl::opt<bool> ClGlobals("asan-globals",
192  cl::desc("Handle global objects"), cl::Hidden,
193  cl::init(true));
194 static cl::opt<bool> ClInitializers("asan-initialization-order",
195  cl::desc("Handle C++ initializer order"),
196  cl::Hidden, cl::init(true));
198  "asan-detect-invalid-pointer-pair",
199  cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden,
200  cl::init(false));
202  "asan-realign-stack",
203  cl::desc("Realign stack to the value of this flag (power of two)"),
204  cl::Hidden, cl::init(32));
206  "asan-instrumentation-with-call-threshold",
207  cl::desc(
208  "If the function being instrumented contains more than "
209  "this number of memory accesses, use callbacks instead of "
210  "inline checks (-1 means never use callbacks)."),
211  cl::Hidden, cl::init(7000));
213  "asan-memory-access-callback-prefix",
214  cl::desc("Prefix for memory access callbacks"), cl::Hidden,
215  cl::init("__asan_"));
216 static cl::opt<bool>
217  ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas",
218  cl::desc("instrument dynamic allocas"),
219  cl::Hidden, cl::init(true));
221  "asan-skip-promotable-allocas",
222  cl::desc("Do not instrument promotable allocas"), cl::Hidden,
223  cl::init(true));
224 
225 // These flags allow to change the shadow mapping.
226 // The shadow mapping looks like
227 // Shadow = (Mem >> scale) + offset
228 static cl::opt<int> ClMappingScale("asan-mapping-scale",
229  cl::desc("scale of asan shadow mapping"),
230  cl::Hidden, cl::init(0));
232  "asan-mapping-offset",
233  cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden,
234  cl::init(0));
235 
236 // Optimization flags. Not user visible, used mostly for testing
237 // and benchmarking the tool.
238 static cl::opt<bool> ClOpt("asan-opt", cl::desc("Optimize instrumentation"),
239  cl::Hidden, cl::init(true));
241  "asan-opt-same-temp", cl::desc("Instrument the same temp just once"),
242  cl::Hidden, cl::init(true));
243 static cl::opt<bool> ClOptGlobals("asan-opt-globals",
244  cl::desc("Don't instrument scalar globals"),
245  cl::Hidden, cl::init(true));
247  "asan-opt-stack", cl::desc("Don't instrument scalar stack variables"),
248  cl::Hidden, cl::init(false));
249 
251  "asan-stack-dynamic-alloca",
252  cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden,
253  cl::init(true));
254 
256  "asan-force-experiment",
257  cl::desc("Force optimization experiment (for testing)"), cl::Hidden,
258  cl::init(0));
259 
260 static cl::opt<bool>
261  ClUsePrivateAliasForGlobals("asan-use-private-alias",
262  cl::desc("Use private aliases for global"
263  " variables"),
264  cl::Hidden, cl::init(false));
265 
266 static cl::opt<bool>
267  ClUseMachOGlobalsSection("asan-globals-live-support",
268  cl::desc("Use linker features to support dead "
269  "code stripping of globals "
270  "(Mach-O only)"),
271  cl::Hidden, cl::init(true));
272 
273 // Debug flags.
274 static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden,
275  cl::init(0));
276 static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"),
277  cl::Hidden, cl::init(0));
278 static cl::opt<std::string> ClDebugFunc("asan-debug-func", cl::Hidden,
279  cl::desc("Debug func"));
280 static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"),
281  cl::Hidden, cl::init(-1));
282 static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug max inst"),
283  cl::Hidden, cl::init(-1));
284 
285 STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
286 STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
287 STATISTIC(NumOptimizedAccessesToGlobalVar,
288  "Number of optimized accesses to global vars");
289 STATISTIC(NumOptimizedAccessesToStackVar,
290  "Number of optimized accesses to stack vars");
291 
292 namespace {
293 /// Frontend-provided metadata for source location.
294 struct LocationMetadata {
295  StringRef Filename;
296  int LineNo;
297  int ColumnNo;
298 
299  LocationMetadata() : Filename(), LineNo(0), ColumnNo(0) {}
300 
301  bool empty() const { return Filename.empty(); }
302 
303  void parse(MDNode *MDN) {
304  assert(MDN->getNumOperands() == 3);
305  MDString *DIFilename = cast<MDString>(MDN->getOperand(0));
306  Filename = DIFilename->getString();
307  LineNo =
308  mdconst::extract<ConstantInt>(MDN->getOperand(1))->getLimitedValue();
309  ColumnNo =
310  mdconst::extract<ConstantInt>(MDN->getOperand(2))->getLimitedValue();
311  }
312 };
313 
314 /// Frontend-provided metadata for global variables.
315 class GlobalsMetadata {
316  public:
317  struct Entry {
318  Entry() : SourceLoc(), Name(), IsDynInit(false), IsBlacklisted(false) {}
319  LocationMetadata SourceLoc;
320  StringRef Name;
321  bool IsDynInit;
322  bool IsBlacklisted;
323  };
324 
325  GlobalsMetadata() : inited_(false) {}
326 
327  void reset() {
328  inited_ = false;
329  Entries.clear();
330  }
331 
332  void init(Module &M) {
333  assert(!inited_);
334  inited_ = true;
335  NamedMDNode *Globals = M.getNamedMetadata("llvm.asan.globals");
336  if (!Globals) return;
337  for (auto MDN : Globals->operands()) {
338  // Metadata node contains the global and the fields of "Entry".
339  assert(MDN->getNumOperands() == 5);
340  auto *GV = mdconst::extract_or_null<GlobalVariable>(MDN->getOperand(0));
341  // The optimizer may optimize away a global entirely.
342  if (!GV) continue;
343  // We can already have an entry for GV if it was merged with another
344  // global.
345  Entry &E = Entries[GV];
346  if (auto *Loc = cast_or_null<MDNode>(MDN->getOperand(1)))
347  E.SourceLoc.parse(Loc);
348  if (auto *Name = cast_or_null<MDString>(MDN->getOperand(2)))
349  E.Name = Name->getString();
350  ConstantInt *IsDynInit =
351  mdconst::extract<ConstantInt>(MDN->getOperand(3));
352  E.IsDynInit |= IsDynInit->isOne();
353  ConstantInt *IsBlacklisted =
354  mdconst::extract<ConstantInt>(MDN->getOperand(4));
355  E.IsBlacklisted |= IsBlacklisted->isOne();
356  }
357  }
358 
359  /// Returns metadata entry for a given global.
360  Entry get(GlobalVariable *G) const {
361  auto Pos = Entries.find(G);
362  return (Pos != Entries.end()) ? Pos->second : Entry();
363  }
364 
365  private:
366  bool inited_;
368 };
369 
370 /// This struct defines the shadow mapping using the rule:
371 /// shadow = (mem >> Scale) ADD-or-OR Offset.
372 struct ShadowMapping {
373  int Scale;
374  uint64_t Offset;
375  bool OrShadowOffset;
376 };
377 
378 static ShadowMapping getShadowMapping(Triple &TargetTriple, int LongSize,
379  bool IsKasan) {
380  bool IsAndroid = TargetTriple.isAndroid();
381  bool IsIOS = TargetTriple.isiOS() || TargetTriple.isWatchOS();
382  bool IsFreeBSD = TargetTriple.isOSFreeBSD();
383  bool IsLinux = TargetTriple.isOSLinux();
384  bool IsPPC64 = TargetTriple.getArch() == llvm::Triple::ppc64 ||
385  TargetTriple.getArch() == llvm::Triple::ppc64le;
386  bool IsSystemZ = TargetTriple.getArch() == llvm::Triple::systemz;
387  bool IsX86 = TargetTriple.getArch() == llvm::Triple::x86;
388  bool IsX86_64 = TargetTriple.getArch() == llvm::Triple::x86_64;
389  bool IsMIPS32 = TargetTriple.getArch() == llvm::Triple::mips ||
390  TargetTriple.getArch() == llvm::Triple::mipsel;
391  bool IsMIPS64 = TargetTriple.getArch() == llvm::Triple::mips64 ||
392  TargetTriple.getArch() == llvm::Triple::mips64el;
393  bool IsAArch64 = TargetTriple.getArch() == llvm::Triple::aarch64;
394  bool IsWindows = TargetTriple.isOSWindows();
395 
396  ShadowMapping Mapping;
397 
398  if (LongSize == 32) {
399  // Android is always PIE, which means that the beginning of the address
400  // space is always available.
401  if (IsAndroid)
402  Mapping.Offset = 0;
403  else if (IsMIPS32)
404  Mapping.Offset = kMIPS32_ShadowOffset32;
405  else if (IsFreeBSD)
406  Mapping.Offset = kFreeBSD_ShadowOffset32;
407  else if (IsIOS)
408  // If we're targeting iOS and x86, the binary is built for iOS simulator.
409  Mapping.Offset = IsX86 ? kIOSSimShadowOffset32 : kIOSShadowOffset32;
410  else if (IsWindows)
411  Mapping.Offset = kWindowsShadowOffset32;
412  else
413  Mapping.Offset = kDefaultShadowOffset32;
414  } else { // LongSize == 64
415  if (IsPPC64)
416  Mapping.Offset = kPPC64_ShadowOffset64;
417  else if (IsSystemZ)
418  Mapping.Offset = kSystemZ_ShadowOffset64;
419  else if (IsFreeBSD)
420  Mapping.Offset = kFreeBSD_ShadowOffset64;
421  else if (IsLinux && IsX86_64) {
422  if (IsKasan)
423  Mapping.Offset = kLinuxKasan_ShadowOffset64;
424  else
425  Mapping.Offset = kSmallX86_64ShadowOffset;
426  } else if (IsWindows && IsX86_64) {
427  Mapping.Offset = kWindowsShadowOffset64;
428  } else if (IsMIPS64)
429  Mapping.Offset = kMIPS64_ShadowOffset64;
430  else if (IsIOS)
431  // If we're targeting iOS and x86, the binary is built for iOS simulator.
432  // We are using dynamic shadow offset on the 64-bit devices.
433  Mapping.Offset =
435  else if (IsAArch64)
436  Mapping.Offset = kAArch64_ShadowOffset64;
437  else
438  Mapping.Offset = kDefaultShadowOffset64;
439  }
440 
441  if (ClForceDynamicShadow) {
442  Mapping.Offset = kDynamicShadowSentinel;
443  }
444 
445  Mapping.Scale = kDefaultShadowScale;
446  if (ClMappingScale.getNumOccurrences() > 0) {
447  Mapping.Scale = ClMappingScale;
448  }
449 
450  if (ClMappingOffset.getNumOccurrences() > 0) {
451  Mapping.Offset = ClMappingOffset;
452  }
453 
454  // OR-ing shadow offset if more efficient (at least on x86) if the offset
455  // is a power of two, but on ppc64 we have to use add since the shadow
456  // offset is not necessary 1/8-th of the address space. On SystemZ,
457  // we could OR the constant in a single instruction, but it's more
458  // efficient to load it once and use indexed addressing.
459  Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ
460  && !(Mapping.Offset & (Mapping.Offset - 1))
461  && Mapping.Offset != kDynamicShadowSentinel;
462 
463  return Mapping;
464 }
465 
466 static size_t RedzoneSizeForScale(int MappingScale) {
467  // Redzone used for stack and globals is at least 32 bytes.
468  // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively.
469  return std::max(32U, 1U << MappingScale);
470 }
471 
472 /// AddressSanitizer: instrument the code in module to find memory bugs.
473 struct AddressSanitizer : public FunctionPass {
474  explicit AddressSanitizer(bool CompileKernel = false, bool Recover = false,
475  bool UseAfterScope = false)
476  : FunctionPass(ID), CompileKernel(CompileKernel || ClEnableKasan),
477  Recover(Recover || ClRecover),
478  UseAfterScope(UseAfterScope || ClUseAfterScope),
479  LocalDynamicShadow(nullptr) {
481  }
482  StringRef getPassName() const override {
483  return "AddressSanitizerFunctionPass";
484  }
485  void getAnalysisUsage(AnalysisUsage &AU) const override {
488  }
489  uint64_t getAllocaSizeInBytes(const AllocaInst &AI) const {
490  uint64_t ArraySize = 1;
491  if (AI.isArrayAllocation()) {
492  const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
493  assert(CI && "non-constant array size");
494  ArraySize = CI->getZExtValue();
495  }
496  Type *Ty = AI.getAllocatedType();
497  uint64_t SizeInBytes =
499  return SizeInBytes * ArraySize;
500  }
501  /// Check if we want (and can) handle this alloca.
502  bool isInterestingAlloca(const AllocaInst &AI);
503 
504  /// If it is an interesting memory access, return the PointerOperand
505  /// and set IsWrite/Alignment. Otherwise return nullptr.
506  /// MaybeMask is an output parameter for the mask Value, if we're looking at a
507  /// masked load/store.
508  Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite,
509  uint64_t *TypeSize, unsigned *Alignment,
510  Value **MaybeMask = nullptr);
511  void instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis, Instruction *I,
512  bool UseCalls, const DataLayout &DL);
513  void instrumentPointerComparisonOrSubtraction(Instruction *I);
514  void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore,
515  Value *Addr, uint32_t TypeSize, bool IsWrite,
516  Value *SizeArgument, bool UseCalls, uint32_t Exp);
517  void instrumentUnusualSizeOrAlignment(Instruction *I,
518  Instruction *InsertBefore, Value *Addr,
519  uint32_t TypeSize, bool IsWrite,
520  Value *SizeArgument, bool UseCalls,
521  uint32_t Exp);
522  Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
523  Value *ShadowValue, uint32_t TypeSize);
524  Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr,
525  bool IsWrite, size_t AccessSizeIndex,
526  Value *SizeArgument, uint32_t Exp);
527  void instrumentMemIntrinsic(MemIntrinsic *MI);
528  Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
529  bool runOnFunction(Function &F) override;
530  bool maybeInsertAsanInitAtFunctionEntry(Function &F);
531  void maybeInsertDynamicShadowAtFunctionEntry(Function &F);
532  void markEscapedLocalAllocas(Function &F);
533  bool doInitialization(Module &M) override;
534  bool doFinalization(Module &M) override;
535  static char ID; // Pass identification, replacement for typeid
536 
537  DominatorTree &getDominatorTree() const { return *DT; }
538 
539  private:
540  void initializeCallbacks(Module &M);
541 
542  bool LooksLikeCodeInBug11395(Instruction *I);
543  bool GlobalIsLinkerInitialized(GlobalVariable *G);
544  bool isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis, Value *Addr,
545  uint64_t TypeSize) const;
546 
547  /// Helper to cleanup per-function state.
548  struct FunctionStateRAII {
549  AddressSanitizer *Pass;
550  FunctionStateRAII(AddressSanitizer *Pass) : Pass(Pass) {
551  assert(Pass->ProcessedAllocas.empty() &&
552  "last pass forgot to clear cache");
553  assert(!Pass->LocalDynamicShadow);
554  }
555  ~FunctionStateRAII() {
556  Pass->LocalDynamicShadow = nullptr;
557  Pass->ProcessedAllocas.clear();
558  }
559  };
560 
561  LLVMContext *C;
562  Triple TargetTriple;
563  int LongSize;
564  bool CompileKernel;
565  bool Recover;
566  bool UseAfterScope;
567  Type *IntptrTy;
568  ShadowMapping Mapping;
569  DominatorTree *DT;
570  Function *AsanCtorFunction = nullptr;
571  Function *AsanInitFunction = nullptr;
572  Function *AsanHandleNoReturnFunc;
573  Function *AsanPtrCmpFunction, *AsanPtrSubFunction;
574  // This array is indexed by AccessIsWrite, Experiment and log2(AccessSize).
575  Function *AsanErrorCallback[2][2][kNumberOfAccessSizes];
576  Function *AsanMemoryAccessCallback[2][2][kNumberOfAccessSizes];
577  // This array is indexed by AccessIsWrite and Experiment.
578  Function *AsanErrorCallbackSized[2][2];
579  Function *AsanMemoryAccessCallbackSized[2][2];
580  Function *AsanMemmove, *AsanMemcpy, *AsanMemset;
581  InlineAsm *EmptyAsm;
582  Value *LocalDynamicShadow;
583  GlobalsMetadata GlobalsMD;
584  DenseMap<const AllocaInst *, bool> ProcessedAllocas;
585 
586  friend struct FunctionStackPoisoner;
587 };
588 
589 class AddressSanitizerModule : public ModulePass {
590  public:
591  explicit AddressSanitizerModule(bool CompileKernel = false,
592  bool Recover = false)
593  : ModulePass(ID), CompileKernel(CompileKernel || ClEnableKasan),
594  Recover(Recover || ClRecover) {}
595  bool runOnModule(Module &M) override;
596  static char ID; // Pass identification, replacement for typeid
597  StringRef getPassName() const override { return "AddressSanitizerModule"; }
598 
599 private:
600  void initializeCallbacks(Module &M);
601 
602  bool InstrumentGlobals(IRBuilder<> &IRB, Module &M);
603  void InstrumentGlobalsCOFF(IRBuilder<> &IRB, Module &M,
604  ArrayRef<GlobalVariable *> ExtendedGlobals,
605  ArrayRef<Constant *> MetadataInitializers);
606  void InstrumentGlobalsMachO(IRBuilder<> &IRB, Module &M,
607  ArrayRef<GlobalVariable *> ExtendedGlobals,
608  ArrayRef<Constant *> MetadataInitializers);
609  void
610  InstrumentGlobalsWithMetadataArray(IRBuilder<> &IRB, Module &M,
611  ArrayRef<GlobalVariable *> ExtendedGlobals,
612  ArrayRef<Constant *> MetadataInitializers);
613 
614  GlobalVariable *CreateMetadataGlobal(Module &M, Constant *Initializer,
615  StringRef OriginalName);
616  void SetComdatForGlobalMetadata(GlobalVariable *G, GlobalVariable *Metadata);
617  IRBuilder<> CreateAsanModuleDtor(Module &M);
618 
619  bool ShouldInstrumentGlobal(GlobalVariable *G);
620  bool ShouldUseMachOGlobalsSection() const;
621  StringRef getGlobalMetadataSection() const;
622  void poisonOneInitializer(Function &GlobalInit, GlobalValue *ModuleName);
623  void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName);
624  size_t MinRedzoneSizeForGlobal() const {
625  return RedzoneSizeForScale(Mapping.Scale);
626  }
627 
628  GlobalsMetadata GlobalsMD;
629  bool CompileKernel;
630  bool Recover;
631  Type *IntptrTy;
632  LLVMContext *C;
633  Triple TargetTriple;
634  ShadowMapping Mapping;
635  Function *AsanPoisonGlobals;
636  Function *AsanUnpoisonGlobals;
637  Function *AsanRegisterGlobals;
638  Function *AsanUnregisterGlobals;
639  Function *AsanRegisterImageGlobals;
640  Function *AsanUnregisterImageGlobals;
641 };
642 
643 // Stack poisoning does not play well with exception handling.
644 // When an exception is thrown, we essentially bypass the code
645 // that unpoisones the stack. This is why the run-time library has
646 // to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire
647 // stack in the interceptor. This however does not work inside the
648 // actual function which catches the exception. Most likely because the
649 // compiler hoists the load of the shadow value somewhere too high.
650 // This causes asan to report a non-existing bug on 453.povray.
651 // It sounds like an LLVM bug.
652 struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
653  Function &F;
654  AddressSanitizer &ASan;
655  DIBuilder DIB;
656  LLVMContext *C;
657  Type *IntptrTy;
658  Type *IntptrPtrTy;
659  ShadowMapping Mapping;
660 
662  SmallVector<AllocaInst *, 16> StaticAllocasToMoveUp;
664  unsigned StackAlignment;
665 
666  Function *AsanStackMallocFunc[kMaxAsanStackMallocSizeClass + 1],
667  *AsanStackFreeFunc[kMaxAsanStackMallocSizeClass + 1];
668  Function *AsanSetShadowFunc[0x100] = {};
669  Function *AsanPoisonStackMemoryFunc, *AsanUnpoisonStackMemoryFunc;
670  Function *AsanAllocaPoisonFunc, *AsanAllocasUnpoisonFunc;
671 
672  // Stores a place and arguments of poisoning/unpoisoning call for alloca.
673  struct AllocaPoisonCall {
674  IntrinsicInst *InsBefore;
675  AllocaInst *AI;
676  uint64_t Size;
677  bool DoPoison;
678  };
679  SmallVector<AllocaPoisonCall, 8> DynamicAllocaPoisonCallVec;
680  SmallVector<AllocaPoisonCall, 8> StaticAllocaPoisonCallVec;
681 
682  SmallVector<AllocaInst *, 1> DynamicAllocaVec;
683  SmallVector<IntrinsicInst *, 1> StackRestoreVec;
684  AllocaInst *DynamicAllocaLayout = nullptr;
685  IntrinsicInst *LocalEscapeCall = nullptr;
686 
687  // Maps Value to an AllocaInst from which the Value is originated.
688  typedef DenseMap<Value *, AllocaInst *> AllocaForValueMapTy;
689  AllocaForValueMapTy AllocaForValue;
690 
691  bool HasNonEmptyInlineAsm = false;
692  bool HasReturnsTwiceCall = false;
693  std::unique_ptr<CallInst> EmptyInlineAsm;
694 
695  FunctionStackPoisoner(Function &F, AddressSanitizer &ASan)
696  : F(F),
697  ASan(ASan),
698  DIB(*F.getParent(), /*AllowUnresolved*/ false),
699  C(ASan.C),
700  IntptrTy(ASan.IntptrTy),
701  IntptrPtrTy(PointerType::get(IntptrTy, 0)),
702  Mapping(ASan.Mapping),
703  StackAlignment(1 << Mapping.Scale),
704  EmptyInlineAsm(CallInst::Create(ASan.EmptyAsm)) {}
705 
706  bool runOnFunction() {
707  if (!ClStack) return false;
708  // Collect alloca, ret, lifetime instructions etc.
709  for (BasicBlock *BB : depth_first(&F.getEntryBlock())) visit(*BB);
710 
711  if (AllocaVec.empty() && DynamicAllocaVec.empty()) return false;
712 
713  initializeCallbacks(*F.getParent());
714 
715  processDynamicAllocas();
716  processStaticAllocas();
717 
718  if (ClDebugStack) {
719  DEBUG(dbgs() << F);
720  }
721  return true;
722  }
723 
724  // Finds all Alloca instructions and puts
725  // poisoned red zones around all of them.
726  // Then unpoison everything back before the function returns.
727  void processStaticAllocas();
728  void processDynamicAllocas();
729 
730  void createDynamicAllocasInitStorage();
731 
732  // ----------------------- Visitors.
733  /// \brief Collect all Ret instructions.
734  void visitReturnInst(ReturnInst &RI) { RetVec.push_back(&RI); }
735 
736  /// \brief Collect all Resume instructions.
737  void visitResumeInst(ResumeInst &RI) { RetVec.push_back(&RI); }
738 
739  /// \brief Collect all CatchReturnInst instructions.
740  void visitCleanupReturnInst(CleanupReturnInst &CRI) { RetVec.push_back(&CRI); }
741 
742  void unpoisonDynamicAllocasBeforeInst(Instruction *InstBefore,
743  Value *SavedStack) {
744  IRBuilder<> IRB(InstBefore);
745  Value *DynamicAreaPtr = IRB.CreatePtrToInt(SavedStack, IntptrTy);
746  // When we insert _asan_allocas_unpoison before @llvm.stackrestore, we
747  // need to adjust extracted SP to compute the address of the most recent
748  // alloca. We have a special @llvm.get.dynamic.area.offset intrinsic for
749  // this purpose.
750  if (!isa<ReturnInst>(InstBefore)) {
751  Function *DynamicAreaOffsetFunc = Intrinsic::getDeclaration(
752  InstBefore->getModule(), Intrinsic::get_dynamic_area_offset,
753  {IntptrTy});
754 
755  Value *DynamicAreaOffset = IRB.CreateCall(DynamicAreaOffsetFunc, {});
756 
757  DynamicAreaPtr = IRB.CreateAdd(IRB.CreatePtrToInt(SavedStack, IntptrTy),
758  DynamicAreaOffset);
759  }
760 
761  IRB.CreateCall(AsanAllocasUnpoisonFunc,
762  {IRB.CreateLoad(DynamicAllocaLayout), DynamicAreaPtr});
763  }
764 
765  // Unpoison dynamic allocas redzones.
766  void unpoisonDynamicAllocas() {
767  for (auto &Ret : RetVec)
768  unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
769 
770  for (auto &StackRestoreInst : StackRestoreVec)
771  unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
772  StackRestoreInst->getOperand(0));
773  }
774 
775  // Deploy and poison redzones around dynamic alloca call. To do this, we
776  // should replace this call with another one with changed parameters and
777  // replace all its uses with new address, so
778  // addr = alloca type, old_size, align
779  // is replaced by
780  // new_size = (old_size + additional_size) * sizeof(type)
781  // tmp = alloca i8, new_size, max(align, 32)
782  // addr = tmp + 32 (first 32 bytes are for the left redzone).
783  // Additional_size is added to make new memory allocation contain not only
784  // requested memory, but also left, partial and right redzones.
785  void handleDynamicAllocaCall(AllocaInst *AI);
786 
787  /// \brief Collect Alloca instructions we want (and can) handle.
788  void visitAllocaInst(AllocaInst &AI) {
789  if (!ASan.isInterestingAlloca(AI)) {
790  if (AI.isStaticAlloca()) {
791  // Skip over allocas that are present *before* the first instrumented
792  // alloca, we don't want to move those around.
793  if (AllocaVec.empty())
794  return;
795 
796  StaticAllocasToMoveUp.push_back(&AI);
797  }
798  return;
799  }
800 
801  StackAlignment = std::max(StackAlignment, AI.getAlignment());
802  if (!AI.isStaticAlloca())
803  DynamicAllocaVec.push_back(&AI);
804  else
805  AllocaVec.push_back(&AI);
806  }
807 
808  /// \brief Collect lifetime intrinsic calls to check for use-after-scope
809  /// errors.
810  void visitIntrinsicInst(IntrinsicInst &II) {
812  if (ID == Intrinsic::stackrestore) StackRestoreVec.push_back(&II);
813  if (ID == Intrinsic::localescape) LocalEscapeCall = &II;
814  if (!ASan.UseAfterScope)
815  return;
816  if (ID != Intrinsic::lifetime_start && ID != Intrinsic::lifetime_end)
817  return;
818  // Found lifetime intrinsic, add ASan instrumentation if necessary.
820  // If size argument is undefined, don't do anything.
821  if (Size->isMinusOne()) return;
822  // Check that size doesn't saturate uint64_t and can
823  // be stored in IntptrTy.
824  const uint64_t SizeValue = Size->getValue().getLimitedValue();
825  if (SizeValue == ~0ULL ||
826  !ConstantInt::isValueValidForType(IntptrTy, SizeValue))
827  return;
828  // Find alloca instruction that corresponds to llvm.lifetime argument.
829  AllocaInst *AI = findAllocaForValue(II.getArgOperand(1));
830  if (!AI || !ASan.isInterestingAlloca(*AI))
831  return;
832  bool DoPoison = (ID == Intrinsic::lifetime_end);
833  AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
834  if (AI->isStaticAlloca())
835  StaticAllocaPoisonCallVec.push_back(APC);
837  DynamicAllocaPoisonCallVec.push_back(APC);
838  }
839 
840  void visitCallSite(CallSite CS) {
841  Instruction *I = CS.getInstruction();
842  if (CallInst *CI = dyn_cast<CallInst>(I)) {
843  HasNonEmptyInlineAsm |=
844  CI->isInlineAsm() && !CI->isIdenticalTo(EmptyInlineAsm.get());
845  HasReturnsTwiceCall |= CI->canReturnTwice();
846  }
847  }
848 
849  // ---------------------- Helpers.
850  void initializeCallbacks(Module &M);
851 
852  bool doesDominateAllExits(const Instruction *I) const {
853  for (auto Ret : RetVec) {
854  if (!ASan.getDominatorTree().dominates(I, Ret)) return false;
855  }
856  return true;
857  }
858 
859  /// Finds alloca where the value comes from.
860  AllocaInst *findAllocaForValue(Value *V);
861 
862  // Copies bytes from ShadowBytes into shadow memory for indexes where
863  // ShadowMask is not zero. If ShadowMask[i] is zero, we assume that
864  // ShadowBytes[i] is constantly zero and doesn't need to be overwritten.
865  void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
866  IRBuilder<> &IRB, Value *ShadowBase);
867  void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
868  size_t Begin, size_t End, IRBuilder<> &IRB,
869  Value *ShadowBase);
870  void copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
871  ArrayRef<uint8_t> ShadowBytes, size_t Begin,
872  size_t End, IRBuilder<> &IRB, Value *ShadowBase);
873 
874  void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison);
875 
876  Value *createAllocaForLayout(IRBuilder<> &IRB, const ASanStackFrameLayout &L,
877  bool Dynamic);
878  PHINode *createPHI(IRBuilder<> &IRB, Value *Cond, Value *ValueIfTrue,
879  Instruction *ThenTerm, Value *ValueIfFalse);
880 };
881 
882 } // anonymous namespace
883 
884 char AddressSanitizer::ID = 0;
886  AddressSanitizer, "asan",
887  "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", false,
888  false)
892  AddressSanitizer, "asan",
893  "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", false,
894  false)
896  bool Recover,
897  bool UseAfterScope) {
898  assert(!CompileKernel || Recover);
899  return new AddressSanitizer(CompileKernel, Recover, UseAfterScope);
900 }
901 
904  AddressSanitizerModule, "asan-module",
905  "AddressSanitizer: detects use-after-free and out-of-bounds bugs."
906  "ModulePass",
907  false, false)
908 ModulePass *llvm::createAddressSanitizerModulePass(bool CompileKernel,
909  bool Recover) {
910  assert(!CompileKernel || Recover);
911  return new AddressSanitizerModule(CompileKernel, Recover);
912 }
913 
914 static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
915  size_t Res = countTrailingZeros(TypeSize / 8);
917  return Res;
918 }
919 
920 // \brief Create a constant for Str so that we can pass it to the run-time lib.
922  bool AllowMerging) {
923  Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str);
924  // We use private linkage for module-local strings. If they can be merged
925  // with another one, we set the unnamed_addr attribute.
926  GlobalVariable *GV =
927  new GlobalVariable(M, StrConst->getType(), true,
929  if (AllowMerging) GV->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
930  GV->setAlignment(1); // Strings may not be merged w/o setting align 1.
931  return GV;
932 }
933 
934 /// \brief Create a global describing a source location.
936  LocationMetadata MD) {
937  Constant *LocData[] = {
938  createPrivateGlobalForString(M, MD.Filename, true),
940  ConstantInt::get(Type::getInt32Ty(M.getContext()), MD.ColumnNo),
941  };
942  auto LocStruct = ConstantStruct::getAnon(LocData);
943  auto GV = new GlobalVariable(M, LocStruct->getType(), true,
944  GlobalValue::PrivateLinkage, LocStruct,
947  return GV;
948 }
949 
950 /// \brief Check if \p G has been created by a trusted compiler pass.
952  // Do not instrument asan globals.
953  if (G->getName().startswith(kAsanGenPrefix) ||
956  return true;
957 
958  // Do not instrument gcov counter arrays.
959  if (G->getName() == "__llvm_gcov_ctr")
960  return true;
961 
962  return false;
963 }
964 
965 Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) {
966  // Shadow >> scale
967  Shadow = IRB.CreateLShr(Shadow, Mapping.Scale);
968  if (Mapping.Offset == 0) return Shadow;
969  // (Shadow >> scale) | offset
970  Value *ShadowBase;
971  if (LocalDynamicShadow)
972  ShadowBase = LocalDynamicShadow;
973  else
974  ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
975  if (Mapping.OrShadowOffset)
976  return IRB.CreateOr(Shadow, ShadowBase);
977  else
978  return IRB.CreateAdd(Shadow, ShadowBase);
979 }
980 
981 // Instrument memset/memmove/memcpy
982 void AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
983  IRBuilder<> IRB(MI);
984  if (isa<MemTransferInst>(MI)) {
985  IRB.CreateCall(
986  isa<MemMoveInst>(MI) ? AsanMemmove : AsanMemcpy,
987  {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
988  IRB.CreatePointerCast(MI->getOperand(1), IRB.getInt8PtrTy()),
989  IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
990  } else if (isa<MemSetInst>(MI)) {
991  IRB.CreateCall(
992  AsanMemset,
993  {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
994  IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false),
995  IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
996  }
997  MI->eraseFromParent();
998 }
999 
1000 /// Check if we want (and can) handle this alloca.
1001 bool AddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
1002  auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
1003 
1004  if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
1005  return PreviouslySeenAllocaInfo->getSecond();
1006 
1007  bool IsInteresting =
1008  (AI.getAllocatedType()->isSized() &&
1009  // alloca() may be called with 0 size, ignore it.
1010  ((!AI.isStaticAlloca()) || getAllocaSizeInBytes(AI) > 0) &&
1011  // We are only interested in allocas not promotable to registers.
1012  // Promotable allocas are common under -O0.
1014  // inalloca allocas are not treated as static, and we don't want
1015  // dynamic alloca instrumentation for them as well.
1016  !AI.isUsedWithInAlloca() &&
1017  // swifterror allocas are register promoted by ISel
1018  !AI.isSwiftError());
1019 
1020  ProcessedAllocas[&AI] = IsInteresting;
1021  return IsInteresting;
1022 }
1023 
1024 Value *AddressSanitizer::isInterestingMemoryAccess(Instruction *I,
1025  bool *IsWrite,
1026  uint64_t *TypeSize,
1027  unsigned *Alignment,
1028  Value **MaybeMask) {
1029  // Skip memory accesses inserted by another instrumentation.
1030  if (I->getMetadata("nosanitize")) return nullptr;
1031 
1032  // Do not instrument the load fetching the dynamic shadow address.
1033  if (LocalDynamicShadow == I)
1034  return nullptr;
1035 
1036  Value *PtrOperand = nullptr;
1037  const DataLayout &DL = I->getModule()->getDataLayout();
1038  if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1039  if (!ClInstrumentReads) return nullptr;
1040  *IsWrite = false;
1041  *TypeSize = DL.getTypeStoreSizeInBits(LI->getType());
1042  *Alignment = LI->getAlignment();
1043  PtrOperand = LI->getPointerOperand();
1044  } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
1045  if (!ClInstrumentWrites) return nullptr;
1046  *IsWrite = true;
1047  *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType());
1048  *Alignment = SI->getAlignment();
1049  PtrOperand = SI->getPointerOperand();
1050  } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
1051  if (!ClInstrumentAtomics) return nullptr;
1052  *IsWrite = true;
1053  *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType());
1054  *Alignment = 0;
1055  PtrOperand = RMW->getPointerOperand();
1056  } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
1057  if (!ClInstrumentAtomics) return nullptr;
1058  *IsWrite = true;
1059  *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType());
1060  *Alignment = 0;
1061  PtrOperand = XCHG->getPointerOperand();
1062  } else if (auto CI = dyn_cast<CallInst>(I)) {
1063  auto *F = dyn_cast<Function>(CI->getCalledValue());
1064  if (F && (F->getName().startswith("llvm.masked.load.") ||
1065  F->getName().startswith("llvm.masked.store."))) {
1066  unsigned OpOffset = 0;
1067  if (F->getName().startswith("llvm.masked.store.")) {
1068  if (!ClInstrumentWrites)
1069  return nullptr;
1070  // Masked store has an initial operand for the value.
1071  OpOffset = 1;
1072  *IsWrite = true;
1073  } else {
1074  if (!ClInstrumentReads)
1075  return nullptr;
1076  *IsWrite = false;
1077  }
1078 
1079  auto BasePtr = CI->getOperand(0 + OpOffset);
1080  auto Ty = cast<PointerType>(BasePtr->getType())->getElementType();
1081  *TypeSize = DL.getTypeStoreSizeInBits(Ty);
1082  if (auto AlignmentConstant =
1083  dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
1084  *Alignment = (unsigned)AlignmentConstant->getZExtValue();
1085  else
1086  *Alignment = 1; // No alignment guarantees. We probably got Undef
1087  if (MaybeMask)
1088  *MaybeMask = CI->getOperand(2 + OpOffset);
1089  PtrOperand = BasePtr;
1090  }
1091  }
1092 
1093  if (PtrOperand) {
1094  // Do not instrument acesses from different address spaces; we cannot deal
1095  // with them.
1096  Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType());
1097  if (PtrTy->getPointerAddressSpace() != 0)
1098  return nullptr;
1099 
1100  // Ignore swifterror addresses.
1101  // swifterror memory addresses are mem2reg promoted by instruction
1102  // selection. As such they cannot have regular uses like an instrumentation
1103  // function and it makes no sense to track them as memory.
1104  if (PtrOperand->isSwiftError())
1105  return nullptr;
1106  }
1107 
1108  // Treat memory accesses to promotable allocas as non-interesting since they
1109  // will not cause memory violations. This greatly speeds up the instrumented
1110  // executable at -O0.
1112  if (auto AI = dyn_cast_or_null<AllocaInst>(PtrOperand))
1113  return isInterestingAlloca(*AI) ? AI : nullptr;
1114 
1115  return PtrOperand;
1116 }
1117 
1118 static bool isPointerOperand(Value *V) {
1119  return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
1120 }
1121 
1122 // This is a rough heuristic; it may cause both false positives and
1123 // false negatives. The proper implementation requires cooperation with
1124 // the frontend.
1126  if (ICmpInst *Cmp = dyn_cast<ICmpInst>(I)) {
1127  if (!Cmp->isRelational()) return false;
1128  } else if (BinaryOperator *BO = dyn_cast<BinaryOperator>(I)) {
1129  if (BO->getOpcode() != Instruction::Sub) return false;
1130  } else {
1131  return false;
1132  }
1133  return isPointerOperand(I->getOperand(0)) &&
1135 }
1136 
1137 bool AddressSanitizer::GlobalIsLinkerInitialized(GlobalVariable *G) {
1138  // If a global variable does not have dynamic initialization we don't
1139  // have to instrument it. However, if a global does not have initializer
1140  // at all, we assume it has dynamic initializer (in other TU).
1141  return G->hasInitializer() && !GlobalsMD.get(G).IsDynInit;
1142 }
1143 
1144 void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1145  Instruction *I) {
1146  IRBuilder<> IRB(I);
1147  Function *F = isa<ICmpInst>(I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
1148  Value *Param[2] = {I->getOperand(0), I->getOperand(1)};
1149  for (Value *&i : Param) {
1150  if (i->getType()->isPointerTy())
1151  i = IRB.CreatePointerCast(i, IntptrTy);
1152  }
1153  IRB.CreateCall(F, Param);
1154 }
1155 
1156 static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I,
1157  Instruction *InsertBefore, Value *Addr,
1158  unsigned Alignment, unsigned Granularity,
1159  uint32_t TypeSize, bool IsWrite,
1160  Value *SizeArgument, bool UseCalls,
1161  uint32_t Exp) {
1162  // Instrument a 1-, 2-, 4-, 8-, or 16- byte access with one check
1163  // if the data is properly aligned.
1164  if ((TypeSize == 8 || TypeSize == 16 || TypeSize == 32 || TypeSize == 64 ||
1165  TypeSize == 128) &&
1166  (Alignment >= Granularity || Alignment == 0 || Alignment >= TypeSize / 8))
1167  return Pass->instrumentAddress(I, InsertBefore, Addr, TypeSize, IsWrite,
1168  nullptr, UseCalls, Exp);
1169  Pass->instrumentUnusualSizeOrAlignment(I, InsertBefore, Addr, TypeSize,
1170  IsWrite, nullptr, UseCalls, Exp);
1171 }
1172 
1173 static void instrumentMaskedLoadOrStore(AddressSanitizer *Pass,
1174  const DataLayout &DL, Type *IntptrTy,
1175  Value *Mask, Instruction *I,
1176  Value *Addr, unsigned Alignment,
1177  unsigned Granularity, uint32_t TypeSize,
1178  bool IsWrite, Value *SizeArgument,
1179  bool UseCalls, uint32_t Exp) {
1180  auto *VTy = cast<PointerType>(Addr->getType())->getElementType();
1181  uint64_t ElemTypeSize = DL.getTypeStoreSizeInBits(VTy->getScalarType());
1182  unsigned Num = VTy->getVectorNumElements();
1183  auto Zero = ConstantInt::get(IntptrTy, 0);
1184  for (unsigned Idx = 0; Idx < Num; ++Idx) {
1185  Value *InstrumentedAddress = nullptr;
1186  Instruction *InsertBefore = I;
1187  if (auto *Vector = dyn_cast<ConstantVector>(Mask)) {
1188  // dyn_cast as we might get UndefValue
1189  if (auto *Masked = dyn_cast<ConstantInt>(Vector->getOperand(Idx))) {
1190  if (Masked->isNullValue())
1191  // Mask is constant false, so no instrumentation needed.
1192  continue;
1193  // If we have a true or undef value, fall through to doInstrumentAddress
1194  // with InsertBefore == I
1195  }
1196  } else {
1197  IRBuilder<> IRB(I);
1198  Value *MaskElem = IRB.CreateExtractElement(Mask, Idx);
1199  TerminatorInst *ThenTerm = SplitBlockAndInsertIfThen(MaskElem, I, false);
1200  InsertBefore = ThenTerm;
1201  }
1202 
1203  IRBuilder<> IRB(InsertBefore);
1204  InstrumentedAddress =
1205  IRB.CreateGEP(Addr, {Zero, ConstantInt::get(IntptrTy, Idx)});
1206  doInstrumentAddress(Pass, I, InsertBefore, InstrumentedAddress, Alignment,
1207  Granularity, ElemTypeSize, IsWrite, SizeArgument,
1208  UseCalls, Exp);
1209  }
1210 }
1211 
1212 void AddressSanitizer::instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis,
1213  Instruction *I, bool UseCalls,
1214  const DataLayout &DL) {
1215  bool IsWrite = false;
1216  unsigned Alignment = 0;
1217  uint64_t TypeSize = 0;
1218  Value *MaybeMask = nullptr;
1219  Value *Addr =
1220  isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask);
1221  assert(Addr);
1222 
1223  // Optimization experiments.
1224  // The experiments can be used to evaluate potential optimizations that remove
1225  // instrumentation (assess false negatives). Instead of completely removing
1226  // some instrumentation, you set Exp to a non-zero value (mask of optimization
1227  // experiments that want to remove instrumentation of this instruction).
1228  // If Exp is non-zero, this pass will emit special calls into runtime
1229  // (e.g. __asan_report_exp_load1 instead of __asan_report_load1). These calls
1230  // make runtime terminate the program in a special way (with a different
1231  // exit status). Then you run the new compiler on a buggy corpus, collect
1232  // the special terminations (ideally, you don't see them at all -- no false
1233  // negatives) and make the decision on the optimization.
1235 
1236  if (ClOpt && ClOptGlobals) {
1237  // If initialization order checking is disabled, a simple access to a
1238  // dynamically initialized global is always valid.
1240  if (G && (!ClInitializers || GlobalIsLinkerInitialized(G)) &&
1241  isSafeAccess(ObjSizeVis, Addr, TypeSize)) {
1242  NumOptimizedAccessesToGlobalVar++;
1243  return;
1244  }
1245  }
1246 
1247  if (ClOpt && ClOptStack) {
1248  // A direct inbounds access to a stack variable is always valid.
1249  if (isa<AllocaInst>(GetUnderlyingObject(Addr, DL)) &&
1250  isSafeAccess(ObjSizeVis, Addr, TypeSize)) {
1251  NumOptimizedAccessesToStackVar++;
1252  return;
1253  }
1254  }
1255 
1256  if (IsWrite)
1257  NumInstrumentedWrites++;
1258  else
1259  NumInstrumentedReads++;
1260 
1261  unsigned Granularity = 1 << Mapping.Scale;
1262  if (MaybeMask) {
1263  instrumentMaskedLoadOrStore(this, DL, IntptrTy, MaybeMask, I, Addr,
1264  Alignment, Granularity, TypeSize, IsWrite,
1265  nullptr, UseCalls, Exp);
1266  } else {
1267  doInstrumentAddress(this, I, I, Addr, Alignment, Granularity, TypeSize,
1268  IsWrite, nullptr, UseCalls, Exp);
1269  }
1270 }
1271 
1272 Instruction *AddressSanitizer::generateCrashCode(Instruction *InsertBefore,
1273  Value *Addr, bool IsWrite,
1274  size_t AccessSizeIndex,
1275  Value *SizeArgument,
1276  uint32_t Exp) {
1277  IRBuilder<> IRB(InsertBefore);
1278  Value *ExpVal = Exp == 0 ? nullptr : ConstantInt::get(IRB.getInt32Ty(), Exp);
1279  CallInst *Call = nullptr;
1280  if (SizeArgument) {
1281  if (Exp == 0)
1282  Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][0],
1283  {Addr, SizeArgument});
1284  else
1285  Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][1],
1286  {Addr, SizeArgument, ExpVal});
1287  } else {
1288  if (Exp == 0)
1289  Call =
1290  IRB.CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex], Addr);
1291  else
1292  Call = IRB.CreateCall(AsanErrorCallback[IsWrite][1][AccessSizeIndex],
1293  {Addr, ExpVal});
1294  }
1295 
1296  // We don't do Call->setDoesNotReturn() because the BB already has
1297  // UnreachableInst at the end.
1298  // This EmptyAsm is required to avoid callback merge.
1299  IRB.CreateCall(EmptyAsm, {});
1300  return Call;
1301 }
1302 
1303 Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
1304  Value *ShadowValue,
1305  uint32_t TypeSize) {
1306  size_t Granularity = static_cast<size_t>(1) << Mapping.Scale;
1307  // Addr & (Granularity - 1)
1308  Value *LastAccessedByte =
1309  IRB.CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
1310  // (Addr & (Granularity - 1)) + size - 1
1311  if (TypeSize / 8 > 1)
1312  LastAccessedByte = IRB.CreateAdd(
1313  LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1));
1314  // (uint8_t) ((Addr & (Granularity-1)) + size - 1)
1315  LastAccessedByte =
1316  IRB.CreateIntCast(LastAccessedByte, ShadowValue->getType(), false);
1317  // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue
1318  return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue);
1319 }
1320 
1321 void AddressSanitizer::instrumentAddress(Instruction *OrigIns,
1322  Instruction *InsertBefore, Value *Addr,
1323  uint32_t TypeSize, bool IsWrite,
1324  Value *SizeArgument, bool UseCalls,
1325  uint32_t Exp) {
1326  IRBuilder<> IRB(InsertBefore);
1327  Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
1328  size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
1329 
1330  if (UseCalls) {
1331  if (Exp == 0)
1332  IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex],
1333  AddrLong);
1334  else
1335  IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1336  {AddrLong, ConstantInt::get(IRB.getInt32Ty(), Exp)});
1337  return;
1338  }
1339 
1340  Type *ShadowTy =
1341  IntegerType::get(*C, std::max(8U, TypeSize >> Mapping.Scale));
1342  Type *ShadowPtrTy = PointerType::get(ShadowTy, 0);
1343  Value *ShadowPtr = memToShadow(AddrLong, IRB);
1344  Value *CmpVal = Constant::getNullValue(ShadowTy);
1345  Value *ShadowValue =
1346  IRB.CreateLoad(IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy));
1347 
1348  Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal);
1349  size_t Granularity = 1ULL << Mapping.Scale;
1350  TerminatorInst *CrashTerm = nullptr;
1351 
1352  if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) {
1353  // We use branch weights for the slow path check, to indicate that the slow
1354  // path is rarely taken. This seems to be the case for SPEC benchmarks.
1356  Cmp, InsertBefore, false, MDBuilder(*C).createBranchWeights(1, 100000));
1357  assert(cast<BranchInst>(CheckTerm)->isUnconditional());
1358  BasicBlock *NextBB = CheckTerm->getSuccessor(0);
1359  IRB.SetInsertPoint(CheckTerm);
1360  Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize);
1361  if (Recover) {
1362  CrashTerm = SplitBlockAndInsertIfThen(Cmp2, CheckTerm, false);
1363  } else {
1364  BasicBlock *CrashBlock =
1365  BasicBlock::Create(*C, "", NextBB->getParent(), NextBB);
1366  CrashTerm = new UnreachableInst(*C, CrashBlock);
1367  BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2);
1368  ReplaceInstWithInst(CheckTerm, NewTerm);
1369  }
1370  } else {
1371  CrashTerm = SplitBlockAndInsertIfThen(Cmp, InsertBefore, !Recover);
1372  }
1373 
1374  Instruction *Crash = generateCrashCode(CrashTerm, AddrLong, IsWrite,
1375  AccessSizeIndex, SizeArgument, Exp);
1376  Crash->setDebugLoc(OrigIns->getDebugLoc());
1377 }
1378 
1379 // Instrument unusual size or unusual alignment.
1380 // We can not do it with a single check, so we do 1-byte check for the first
1381 // and the last bytes. We call __asan_report_*_n(addr, real_size) to be able
1382 // to report the actual access size.
1383 void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1384  Instruction *I, Instruction *InsertBefore, Value *Addr, uint32_t TypeSize,
1385  bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp) {
1386  IRBuilder<> IRB(InsertBefore);
1387  Value *Size = ConstantInt::get(IntptrTy, TypeSize / 8);
1388  Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
1389  if (UseCalls) {
1390  if (Exp == 0)
1391  IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][0],
1392  {AddrLong, Size});
1393  else
1394  IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][1],
1395  {AddrLong, Size, ConstantInt::get(IRB.getInt32Ty(), Exp)});
1396  } else {
1397  Value *LastByte = IRB.CreateIntToPtr(
1398  IRB.CreateAdd(AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8 - 1)),
1399  Addr->getType());
1400  instrumentAddress(I, InsertBefore, Addr, 8, IsWrite, Size, false, Exp);
1401  instrumentAddress(I, InsertBefore, LastByte, 8, IsWrite, Size, false, Exp);
1402  }
1403 }
1404 
1405 void AddressSanitizerModule::poisonOneInitializer(Function &GlobalInit,
1406  GlobalValue *ModuleName) {
1407  // Set up the arguments to our poison/unpoison functions.
1408  IRBuilder<> IRB(&GlobalInit.front(),
1409  GlobalInit.front().getFirstInsertionPt());
1410 
1411  // Add a call to poison all external globals before the given function starts.
1412  Value *ModuleNameAddr = ConstantExpr::getPointerCast(ModuleName, IntptrTy);
1413  IRB.CreateCall(AsanPoisonGlobals, ModuleNameAddr);
1414 
1415  // Add calls to unpoison all globals before each return instruction.
1416  for (auto &BB : GlobalInit.getBasicBlockList())
1417  if (ReturnInst *RI = dyn_cast<ReturnInst>(BB.getTerminator()))
1418  CallInst::Create(AsanUnpoisonGlobals, "", RI);
1419 }
1420 
1421 void AddressSanitizerModule::createInitializerPoisonCalls(
1422  Module &M, GlobalValue *ModuleName) {
1423  GlobalVariable *GV = M.getGlobalVariable("llvm.global_ctors");
1424 
1425  ConstantArray *CA = cast<ConstantArray>(GV->getInitializer());
1426  for (Use &OP : CA->operands()) {
1427  if (isa<ConstantAggregateZero>(OP)) continue;
1428  ConstantStruct *CS = cast<ConstantStruct>(OP);
1429 
1430  // Must have a function or null ptr.
1431  if (Function *F = dyn_cast<Function>(CS->getOperand(1))) {
1432  if (F->getName() == kAsanModuleCtorName) continue;
1433  ConstantInt *Priority = dyn_cast<ConstantInt>(CS->getOperand(0));
1434  // Don't instrument CTORs that will run before asan.module_ctor.
1435  if (Priority->getLimitedValue() <= kAsanCtorAndDtorPriority) continue;
1436  poisonOneInitializer(*F, ModuleName);
1437  }
1438  }
1439 }
1440 
1441 bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) {
1442  Type *Ty = G->getValueType();
1443  DEBUG(dbgs() << "GLOBAL: " << *G << "\n");
1444 
1445  if (GlobalsMD.get(G).IsBlacklisted) return false;
1446  if (!Ty->isSized()) return false;
1447  if (!G->hasInitializer()) return false;
1448  if (GlobalWasGeneratedByCompiler(G)) return false; // Our own globals.
1449  // Touch only those globals that will not be defined in other modules.
1450  // Don't handle ODR linkage types and COMDATs since other modules may be built
1451  // without ASan.
1455  return false;
1456  if (G->hasComdat()) return false;
1457  // Two problems with thread-locals:
1458  // - The address of the main thread's copy can't be computed at link-time.
1459  // - Need to poison all copies, not just the main thread's one.
1460  if (G->isThreadLocal()) return false;
1461  // For now, just ignore this Global if the alignment is large.
1462  if (G->getAlignment() > MinRedzoneSizeForGlobal()) return false;
1463 
1464  if (G->hasSection()) {
1465  StringRef Section = G->getSection();
1466 
1467  // Globals from llvm.metadata aren't emitted, do not instrument them.
1468  if (Section == "llvm.metadata") return false;
1469  // Do not instrument globals from special LLVM sections.
1470  if (Section.find("__llvm") != StringRef::npos || Section.find("__LLVM") != StringRef::npos) return false;
1471 
1472  // Do not instrument function pointers to initialization and termination
1473  // routines: dynamic linker will not properly handle redzones.
1474  if (Section.startswith(".preinit_array") ||
1475  Section.startswith(".init_array") ||
1476  Section.startswith(".fini_array")) {
1477  return false;
1478  }
1479 
1480  // Callbacks put into the CRT initializer/terminator sections
1481  // should not be instrumented.
1482  // See https://code.google.com/p/address-sanitizer/issues/detail?id=305
1483  // and http://msdn.microsoft.com/en-US/en-en/library/bb918180(v=vs.120).aspx
1484  if (Section.startswith(".CRT")) {
1485  DEBUG(dbgs() << "Ignoring a global initializer callback: " << *G << "\n");
1486  return false;
1487  }
1488 
1489  if (TargetTriple.isOSBinFormatMachO()) {
1490  StringRef ParsedSegment, ParsedSection;
1491  unsigned TAA = 0, StubSize = 0;
1492  bool TAAParsed;
1493  std::string ErrorCode = MCSectionMachO::ParseSectionSpecifier(
1494  Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize);
1495  assert(ErrorCode.empty() && "Invalid section specifier.");
1496 
1497  // Ignore the globals from the __OBJC section. The ObjC runtime assumes
1498  // those conform to /usr/lib/objc/runtime.h, so we can't add redzones to
1499  // them.
1500  if (ParsedSegment == "__OBJC" ||
1501  (ParsedSegment == "__DATA" && ParsedSection.startswith("__objc_"))) {
1502  DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G << "\n");
1503  return false;
1504  }
1505  // See http://code.google.com/p/address-sanitizer/issues/detail?id=32
1506  // Constant CFString instances are compiled in the following way:
1507  // -- the string buffer is emitted into
1508  // __TEXT,__cstring,cstring_literals
1509  // -- the constant NSConstantString structure referencing that buffer
1510  // is placed into __DATA,__cfstring
1511  // Therefore there's no point in placing redzones into __DATA,__cfstring.
1512  // Moreover, it causes the linker to crash on OS X 10.7
1513  if (ParsedSegment == "__DATA" && ParsedSection == "__cfstring") {
1514  DEBUG(dbgs() << "Ignoring CFString: " << *G << "\n");
1515  return false;
1516  }
1517  // The linker merges the contents of cstring_literals and removes the
1518  // trailing zeroes.
1519  if (ParsedSegment == "__TEXT" && (TAA & MachO::S_CSTRING_LITERALS)) {
1520  DEBUG(dbgs() << "Ignoring a cstring literal: " << *G << "\n");
1521  return false;
1522  }
1523  }
1524  }
1525 
1526  return true;
1527 }
1528 
1529 // On Mach-O platforms, we emit global metadata in a separate section of the
1530 // binary in order to allow the linker to properly dead strip. This is only
1531 // supported on recent versions of ld64.
1532 bool AddressSanitizerModule::ShouldUseMachOGlobalsSection() const {
1534  return false;
1535 
1536  if (!TargetTriple.isOSBinFormatMachO())
1537  return false;
1538 
1539  if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
1540  return true;
1541  if (TargetTriple.isiOS() /* or tvOS */ && !TargetTriple.isOSVersionLT(9))
1542  return true;
1543  if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
1544  return true;
1545 
1546  return false;
1547 }
1548 
1549 StringRef AddressSanitizerModule::getGlobalMetadataSection() const {
1550  switch (TargetTriple.getObjectFormat()) {
1551  case Triple::COFF: return ".ASAN$GL";
1552  case Triple::ELF: return "asan_globals";
1553  case Triple::MachO: return "__DATA,__asan_globals,regular";
1554  default: break;
1555  }
1556  llvm_unreachable("unsupported object format");
1557 }
1558 
1559 void AddressSanitizerModule::initializeCallbacks(Module &M) {
1560  IRBuilder<> IRB(*C);
1561 
1562  // Declare our poisoning and unpoisoning functions.
1564  kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy, nullptr));
1565  AsanPoisonGlobals->setLinkage(Function::ExternalLinkage);
1566  AsanUnpoisonGlobals = checkSanitizerInterfaceFunction(M.getOrInsertFunction(
1567  kAsanUnpoisonGlobalsName, IRB.getVoidTy(), nullptr));
1568  AsanUnpoisonGlobals->setLinkage(Function::ExternalLinkage);
1569 
1570  // Declare functions that register/unregister globals.
1571  AsanRegisterGlobals = checkSanitizerInterfaceFunction(M.getOrInsertFunction(
1572  kAsanRegisterGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
1573  AsanRegisterGlobals->setLinkage(Function::ExternalLinkage);
1574  AsanUnregisterGlobals = checkSanitizerInterfaceFunction(
1576  IntptrTy, IntptrTy, nullptr));
1577  AsanUnregisterGlobals->setLinkage(Function::ExternalLinkage);
1578 
1579  // Declare the functions that find globals in a shared object and then invoke
1580  // the (un)register function on them.
1581  AsanRegisterImageGlobals =
1583  kAsanRegisterImageGlobalsName, IRB.getVoidTy(), IntptrTy, nullptr));
1584  AsanRegisterImageGlobals->setLinkage(Function::ExternalLinkage);
1585 
1586  AsanUnregisterImageGlobals =
1588  kAsanUnregisterImageGlobalsName, IRB.getVoidTy(), IntptrTy, nullptr));
1589  AsanUnregisterImageGlobals->setLinkage(Function::ExternalLinkage);
1590 }
1591 
1592 // Put the metadata and the instrumented global in the same group. This ensures
1593 // that the metadata is discarded if the instrumented global is discarded.
1594 void AddressSanitizerModule::SetComdatForGlobalMetadata(
1596  Module &M = *G->getParent();
1597  Comdat *C = G->getComdat();
1598  if (!C) {
1599  if (!G->hasName()) {
1600  // If G is unnamed, it must be internal. Give it an artificial name
1601  // so we can put it in a comdat.
1602  assert(G->hasLocalLinkage());
1603  G->setName(Twine(kAsanGenPrefix) + "_anon_global");
1604  }
1605  C = M.getOrInsertComdat(G->getName());
1606  // Make this IMAGE_COMDAT_SELECT_NODUPLICATES on COFF.
1607  if (TargetTriple.isOSBinFormatCOFF())
1609  G->setComdat(C);
1610  }
1611 
1612  assert(G->hasComdat());
1613  Metadata->setComdat(G->getComdat());
1614 }
1615 
1616 // Create a separate metadata global and put it in the appropriate ASan
1617 // global registration section.
1619 AddressSanitizerModule::CreateMetadataGlobal(Module &M, Constant *Initializer,
1620  StringRef OriginalName) {
1621  GlobalVariable *Metadata =
1622  new GlobalVariable(M, Initializer->getType(), false,
1623  GlobalVariable::InternalLinkage, Initializer,
1624  Twine("__asan_global_") +
1625  GlobalValue::getRealLinkageName(OriginalName));
1626  Metadata->setSection(getGlobalMetadataSection());
1627  return Metadata;
1628 }
1629 
1630 IRBuilder<> AddressSanitizerModule::CreateAsanModuleDtor(Module &M) {
1631  Function *AsanDtorFunction =
1634  BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction);
1635  appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndDtorPriority);
1636 
1637  return IRBuilder<>(ReturnInst::Create(*C, AsanDtorBB));
1638 }
1639 
1640 void AddressSanitizerModule::InstrumentGlobalsCOFF(
1641  IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
1642  ArrayRef<Constant *> MetadataInitializers) {
1643  assert(ExtendedGlobals.size() == MetadataInitializers.size());
1644  auto &DL = M.getDataLayout();
1645 
1646  for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
1647  Constant *Initializer = MetadataInitializers[i];
1648  GlobalVariable *G = ExtendedGlobals[i];
1649  GlobalVariable *Metadata =
1650  CreateMetadataGlobal(M, Initializer, G->getName());
1651 
1652  // The MSVC linker always inserts padding when linking incrementally. We
1653  // cope with that by aligning each struct to its size, which must be a power
1654  // of two.
1655  unsigned SizeOfGlobalStruct = DL.getTypeAllocSize(Initializer->getType());
1656  assert(isPowerOf2_32(SizeOfGlobalStruct) &&
1657  "global metadata will not be padded appropriately");
1658  Metadata->setAlignment(SizeOfGlobalStruct);
1659 
1660  SetComdatForGlobalMetadata(G, Metadata);
1661  }
1662 }
1663 
1664 void AddressSanitizerModule::InstrumentGlobalsMachO(
1665  IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
1666  ArrayRef<Constant *> MetadataInitializers) {
1667  assert(ExtendedGlobals.size() == MetadataInitializers.size());
1668 
1669  // On recent Mach-O platforms, use a structure which binds the liveness of
1670  // the global variable to the metadata struct. Keep the list of "Liveness" GV
1671  // created to be added to llvm.compiler.used
1672  StructType *LivenessTy = StructType::get(IntptrTy, IntptrTy, nullptr);
1673  SmallVector<GlobalValue *, 16> LivenessGlobals(ExtendedGlobals.size());
1674 
1675  for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
1676  Constant *Initializer = MetadataInitializers[i];
1677  GlobalVariable *G = ExtendedGlobals[i];
1678  GlobalVariable *Metadata =
1679  CreateMetadataGlobal(M, Initializer, G->getName());
1680 
1681  // On recent Mach-O platforms, we emit the global metadata in a way that
1682  // allows the linker to properly strip dead globals.
1683  auto LivenessBinder = ConstantStruct::get(
1684  LivenessTy, Initializer->getAggregateElement(0u),
1685  ConstantExpr::getPointerCast(Metadata, IntptrTy), nullptr);
1686  GlobalVariable *Liveness = new GlobalVariable(
1687  M, LivenessTy, false, GlobalVariable::InternalLinkage, LivenessBinder,
1688  Twine("__asan_binder_") + G->getName());
1689  Liveness->setSection("__DATA,__asan_liveness,regular,live_support");
1690  LivenessGlobals[i] = Liveness;
1691  }
1692 
1693  // Update llvm.compiler.used, adding the new liveness globals. This is
1694  // needed so that during LTO these variables stay alive. The alternative
1695  // would be to have the linker handling the LTO symbols, but libLTO
1696  // current API does not expose access to the section for each symbol.
1697  if (!LivenessGlobals.empty())
1698  appendToCompilerUsed(M, LivenessGlobals);
1699 
1700  // RegisteredFlag serves two purposes. First, we can pass it to dladdr()
1701  // to look up the loaded image that contains it. Second, we can store in it
1702  // whether registration has already occurred, to prevent duplicate
1703  // registration.
1704  //
1705  // common linkage ensures that there is only one global per shared library.
1706  GlobalVariable *RegisteredFlag = new GlobalVariable(
1707  M, IntptrTy, false, GlobalVariable::CommonLinkage,
1710 
1711  IRB.CreateCall(AsanRegisterImageGlobals,
1712  {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)});
1713 
1714  // We also need to unregister globals at the end, e.g., when a shared library
1715  // gets closed.
1716  IRBuilder<> IRB_Dtor = CreateAsanModuleDtor(M);
1717  IRB_Dtor.CreateCall(AsanUnregisterImageGlobals,
1718  {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)});
1719 }
1720 
1721 void AddressSanitizerModule::InstrumentGlobalsWithMetadataArray(
1722  IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
1723  ArrayRef<Constant *> MetadataInitializers) {
1724  assert(ExtendedGlobals.size() == MetadataInitializers.size());
1725  unsigned N = ExtendedGlobals.size();
1726  assert(N > 0);
1727 
1728  // On platforms that don't have a custom metadata section, we emit an array
1729  // of global metadata structures.
1730  ArrayType *ArrayOfGlobalStructTy =
1731  ArrayType::get(MetadataInitializers[0]->getType(), N);
1732  auto AllGlobals = new GlobalVariable(
1733  M, ArrayOfGlobalStructTy, false, GlobalVariable::InternalLinkage,
1734  ConstantArray::get(ArrayOfGlobalStructTy, MetadataInitializers), "");
1735 
1736  IRB.CreateCall(AsanRegisterGlobals,
1737  {IRB.CreatePointerCast(AllGlobals, IntptrTy),
1738  ConstantInt::get(IntptrTy, N)});
1739 
1740  // We also need to unregister globals at the end, e.g., when a shared library
1741  // gets closed.
1742  IRBuilder<> IRB_Dtor = CreateAsanModuleDtor(M);
1743  IRB_Dtor.CreateCall(AsanUnregisterGlobals,
1744  {IRB.CreatePointerCast(AllGlobals, IntptrTy),
1745  ConstantInt::get(IntptrTy, N)});
1746 }
1747 
1748 // This function replaces all global variables with new variables that have
1749 // trailing redzones. It also creates a function that poisons
1750 // redzones and inserts this function into llvm.global_ctors.
1751 bool AddressSanitizerModule::InstrumentGlobals(IRBuilder<> &IRB, Module &M) {
1752  GlobalsMD.init(M);
1753 
1754  SmallVector<GlobalVariable *, 16> GlobalsToChange;
1755 
1756  for (auto &G : M.globals()) {
1757  if (ShouldInstrumentGlobal(&G)) GlobalsToChange.push_back(&G);
1758  }
1759 
1760  size_t n = GlobalsToChange.size();
1761  if (n == 0) return false;
1762 
1763  auto &DL = M.getDataLayout();
1764 
1765  // A global is described by a structure
1766  // size_t beg;
1767  // size_t size;
1768  // size_t size_with_redzone;
1769  // const char *name;
1770  // const char *module_name;
1771  // size_t has_dynamic_init;
1772  // void *source_location;
1773  // size_t odr_indicator;
1774  // We initialize an array of such structures and pass it to a run-time call.
1775  StructType *GlobalStructTy =
1776  StructType::get(IntptrTy, IntptrTy, IntptrTy, IntptrTy, IntptrTy,
1777  IntptrTy, IntptrTy, IntptrTy, nullptr);
1778  SmallVector<GlobalVariable *, 16> NewGlobals(n);
1779  SmallVector<Constant *, 16> Initializers(n);
1780 
1781  bool HasDynamicallyInitializedGlobals = false;
1782 
1783  // We shouldn't merge same module names, as this string serves as unique
1784  // module ID in runtime.
1786  M, M.getModuleIdentifier(), /*AllowMerging*/ false);
1787 
1788  for (size_t i = 0; i < n; i++) {
1789  static const uint64_t kMaxGlobalRedzone = 1 << 18;
1790  GlobalVariable *G = GlobalsToChange[i];
1791 
1792  auto MD = GlobalsMD.get(G);
1793  StringRef NameForGlobal = G->getName();
1794  // Create string holding the global name (use global name from metadata
1795  // if it's available, otherwise just write the name of global variable).
1797  M, MD.Name.empty() ? NameForGlobal : MD.Name,
1798  /*AllowMerging*/ true);
1799 
1800  Type *Ty = G->getValueType();
1801  uint64_t SizeInBytes = DL.getTypeAllocSize(Ty);
1802  uint64_t MinRZ = MinRedzoneSizeForGlobal();
1803  // MinRZ <= RZ <= kMaxGlobalRedzone
1804  // and trying to make RZ to be ~ 1/4 of SizeInBytes.
1805  uint64_t RZ = std::max(
1806  MinRZ, std::min(kMaxGlobalRedzone, (SizeInBytes / MinRZ / 4) * MinRZ));
1807  uint64_t RightRedzoneSize = RZ;
1808  // Round up to MinRZ
1809  if (SizeInBytes % MinRZ) RightRedzoneSize += MinRZ - (SizeInBytes % MinRZ);
1810  assert(((RightRedzoneSize + SizeInBytes) % MinRZ) == 0);
1811  Type *RightRedZoneTy = ArrayType::get(IRB.getInt8Ty(), RightRedzoneSize);
1812 
1813  StructType *NewTy = StructType::get(Ty, RightRedZoneTy, nullptr);
1814  Constant *NewInitializer =
1815  ConstantStruct::get(NewTy, G->getInitializer(),
1816  Constant::getNullValue(RightRedZoneTy), nullptr);
1817 
1818  // Create a new global variable with enough space for a redzone.
1819  GlobalValue::LinkageTypes Linkage = G->getLinkage();
1820  if (G->isConstant() && Linkage == GlobalValue::PrivateLinkage)
1821  Linkage = GlobalValue::InternalLinkage;
1822  GlobalVariable *NewGlobal =
1823  new GlobalVariable(M, NewTy, G->isConstant(), Linkage, NewInitializer,
1824  "", G, G->getThreadLocalMode());
1825  NewGlobal->copyAttributesFrom(G);
1826  NewGlobal->setAlignment(MinRZ);
1827 
1828  // Move null-terminated C strings to "__asan_cstring" section on Darwin.
1829  if (TargetTriple.isOSBinFormatMachO() && !G->hasSection() &&
1830  G->isConstant()) {
1831  auto Seq = dyn_cast<ConstantDataSequential>(G->getInitializer());
1832  if (Seq && Seq->isCString())
1833  NewGlobal->setSection("__TEXT,__asan_cstring,regular");
1834  }
1835 
1836  // Transfer the debug info. The payload starts at offset zero so we can
1837  // copy the debug info over as is.
1839  G->getDebugInfo(GVs);
1840  for (auto *GV : GVs)
1841  NewGlobal->addDebugInfo(GV);
1842 
1843  Value *Indices2[2];
1844  Indices2[0] = IRB.getInt32(0);
1845  Indices2[1] = IRB.getInt32(0);
1846 
1847  G->replaceAllUsesWith(
1848  ConstantExpr::getGetElementPtr(NewTy, NewGlobal, Indices2, true));
1849  NewGlobal->takeName(G);
1850  G->eraseFromParent();
1851  NewGlobals[i] = NewGlobal;
1852 
1853  Constant *SourceLoc;
1854  if (!MD.SourceLoc.empty()) {
1855  auto SourceLocGlobal = createPrivateGlobalForSourceLoc(M, MD.SourceLoc);
1856  SourceLoc = ConstantExpr::getPointerCast(SourceLocGlobal, IntptrTy);
1857  } else {
1858  SourceLoc = ConstantInt::get(IntptrTy, 0);
1859  }
1860 
1861  Constant *ODRIndicator = ConstantExpr::getNullValue(IRB.getInt8PtrTy());
1862  GlobalValue *InstrumentedGlobal = NewGlobal;
1863 
1864  bool CanUsePrivateAliases =
1865  TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO();
1866  if (CanUsePrivateAliases && ClUsePrivateAliasForGlobals) {
1867  // Create local alias for NewGlobal to avoid crash on ODR between
1868  // instrumented and non-instrumented libraries.
1870  NameForGlobal + M.getName(), NewGlobal);
1871 
1872  // With local aliases, we need to provide another externally visible
1873  // symbol __odr_asan_XXX to detect ODR violation.
1874  auto *ODRIndicatorSym =
1875  new GlobalVariable(M, IRB.getInt8Ty(), false, Linkage,
1877  kODRGenPrefix + NameForGlobal, nullptr,
1878  NewGlobal->getThreadLocalMode());
1879 
1880  // Set meaningful attributes for indicator symbol.
1881  ODRIndicatorSym->setVisibility(NewGlobal->getVisibility());
1882  ODRIndicatorSym->setDLLStorageClass(NewGlobal->getDLLStorageClass());
1883  ODRIndicatorSym->setAlignment(1);
1884  ODRIndicator = ODRIndicatorSym;
1885  InstrumentedGlobal = GA;
1886  }
1887 
1888  Constant *Initializer = ConstantStruct::get(
1889  GlobalStructTy,
1890  ConstantExpr::getPointerCast(InstrumentedGlobal, IntptrTy),
1891  ConstantInt::get(IntptrTy, SizeInBytes),
1892  ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
1893  ConstantExpr::getPointerCast(Name, IntptrTy),
1894  ConstantExpr::getPointerCast(ModuleName, IntptrTy),
1895  ConstantInt::get(IntptrTy, MD.IsDynInit), SourceLoc,
1896  ConstantExpr::getPointerCast(ODRIndicator, IntptrTy), nullptr);
1897 
1898  if (ClInitializers && MD.IsDynInit) HasDynamicallyInitializedGlobals = true;
1899 
1900  DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n");
1901 
1902  Initializers[i] = Initializer;
1903  }
1904 
1905  if (TargetTriple.isOSBinFormatCOFF()) {
1906  InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
1907  } else if (ShouldUseMachOGlobalsSection()) {
1908  InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
1909  } else {
1910  InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
1911  }
1912 
1913  // Create calls for poisoning before initializers run and unpoisoning after.
1914  if (HasDynamicallyInitializedGlobals)
1915  createInitializerPoisonCalls(M, ModuleName);
1916 
1917  DEBUG(dbgs() << M);
1918  return true;
1919 }
1920 
1921 bool AddressSanitizerModule::runOnModule(Module &M) {
1922  C = &(M.getContext());
1923  int LongSize = M.getDataLayout().getPointerSizeInBits();
1924  IntptrTy = Type::getIntNTy(*C, LongSize);
1925  TargetTriple = Triple(M.getTargetTriple());
1926  Mapping = getShadowMapping(TargetTriple, LongSize, CompileKernel);
1927  initializeCallbacks(M);
1928 
1929  bool Changed = false;
1930 
1931  // TODO(glider): temporarily disabled globals instrumentation for KASan.
1932  if (ClGlobals && !CompileKernel) {
1933  Function *CtorFunc = M.getFunction(kAsanModuleCtorName);
1934  assert(CtorFunc);
1935  IRBuilder<> IRB(CtorFunc->getEntryBlock().getTerminator());
1936  Changed |= InstrumentGlobals(IRB, M);
1937  }
1938 
1939  return Changed;
1940 }
1941 
1942 void AddressSanitizer::initializeCallbacks(Module &M) {
1943  IRBuilder<> IRB(*C);
1944  // Create __asan_report* callbacks.
1945  // IsWrite, TypeSize and Exp are encoded in the function name.
1946  for (int Exp = 0; Exp < 2; Exp++) {
1947  for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
1948  const std::string TypeStr = AccessIsWrite ? "store" : "load";
1949  const std::string ExpStr = Exp ? "exp_" : "";
1950  const std::string SuffixStr = CompileKernel ? "N" : "_n";
1951  const std::string EndingStr = Recover ? "_noabort" : "";
1952  Type *ExpType = Exp ? Type::getInt32Ty(*C) : nullptr;
1953  AsanErrorCallbackSized[AccessIsWrite][Exp] =
1955  kAsanReportErrorTemplate + ExpStr + TypeStr + SuffixStr + EndingStr,
1956  IRB.getVoidTy(), IntptrTy, IntptrTy, ExpType, nullptr));
1957  AsanMemoryAccessCallbackSized[AccessIsWrite][Exp] =
1959  ClMemoryAccessCallbackPrefix + ExpStr + TypeStr + "N" + EndingStr,
1960  IRB.getVoidTy(), IntptrTy, IntptrTy, ExpType, nullptr));
1961  for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
1962  AccessSizeIndex++) {
1963  const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
1964  AsanErrorCallback[AccessIsWrite][Exp][AccessSizeIndex] =
1966  kAsanReportErrorTemplate + ExpStr + Suffix + EndingStr,
1967  IRB.getVoidTy(), IntptrTy, ExpType, nullptr));
1968  AsanMemoryAccessCallback[AccessIsWrite][Exp][AccessSizeIndex] =
1970  ClMemoryAccessCallbackPrefix + ExpStr + Suffix + EndingStr,
1971  IRB.getVoidTy(), IntptrTy, ExpType, nullptr));
1972  }
1973  }
1974  }
1975 
1976  const std::string MemIntrinCallbackPrefix =
1977  CompileKernel ? std::string("") : ClMemoryAccessCallbackPrefix;
1979  MemIntrinCallbackPrefix + "memmove", IRB.getInt8PtrTy(),
1980  IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IntptrTy, nullptr));
1982  MemIntrinCallbackPrefix + "memcpy", IRB.getInt8PtrTy(),
1983  IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IntptrTy, nullptr));
1985  MemIntrinCallbackPrefix + "memset", IRB.getInt8PtrTy(),
1986  IRB.getInt8PtrTy(), IRB.getInt32Ty(), IntptrTy, nullptr));
1987 
1988  AsanHandleNoReturnFunc = checkSanitizerInterfaceFunction(
1990 
1992  kAsanPtrCmp, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
1994  kAsanPtrSub, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
1995  // We insert an empty inline asm after __asan_report* to avoid callback merge.
1996  EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false),
1997  StringRef(""), StringRef(""),
1998  /*hasSideEffects=*/true);
1999 }
2000 
2001 // virtual
2002 bool AddressSanitizer::doInitialization(Module &M) {
2003  // Initialize the private fields. No one has accessed them before.
2004 
2005  GlobalsMD.init(M);
2006 
2007  C = &(M.getContext());
2008  LongSize = M.getDataLayout().getPointerSizeInBits();
2009  IntptrTy = Type::getIntNTy(*C, LongSize);
2010  TargetTriple = Triple(M.getTargetTriple());
2011 
2012  if (!CompileKernel) {
2013  std::tie(AsanCtorFunction, AsanInitFunction) =
2016  /*InitArgTypes=*/{}, /*InitArgs=*/{}, kAsanVersionCheckName);
2017  appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndDtorPriority);
2018  }
2019  Mapping = getShadowMapping(TargetTriple, LongSize, CompileKernel);
2020  return true;
2021 }
2022 
2023 bool AddressSanitizer::doFinalization(Module &M) {
2024  GlobalsMD.reset();
2025  return false;
2026 }
2027 
2028 bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) {
2029  // For each NSObject descendant having a +load method, this method is invoked
2030  // by the ObjC runtime before any of the static constructors is called.
2031  // Therefore we need to instrument such methods with a call to __asan_init
2032  // at the beginning in order to initialize our runtime before any access to
2033  // the shadow memory.
2034  // We cannot just ignore these methods, because they may call other
2035  // instrumented functions.
2036  if (F.getName().find(" load]") != std::string::npos) {
2037  IRBuilder<> IRB(&F.front(), F.front().begin());
2038  IRB.CreateCall(AsanInitFunction, {});
2039  return true;
2040  }
2041  return false;
2042 }
2043 
2044 void AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) {
2045  // Generate code only when dynamic addressing is needed.
2046  if (Mapping.Offset != kDynamicShadowSentinel)
2047  return;
2048 
2049  IRBuilder<> IRB(&F.front().front());
2050  Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
2052  LocalDynamicShadow = IRB.CreateLoad(GlobalDynamicAddress);
2053 }
2054 
2055 void AddressSanitizer::markEscapedLocalAllocas(Function &F) {
2056  // Find the one possible call to llvm.localescape and pre-mark allocas passed
2057  // to it as uninteresting. This assumes we haven't started processing allocas
2058  // yet. This check is done up front because iterating the use list in
2059  // isInterestingAlloca would be algorithmically slower.
2060  assert(ProcessedAllocas.empty() && "must process localescape before allocas");
2061 
2062  // Try to get the declaration of llvm.localescape. If it's not in the module,
2063  // we can exit early.
2064  if (!F.getParent()->getFunction("llvm.localescape")) return;
2065 
2066  // Look for a call to llvm.localescape call in the entry block. It can't be in
2067  // any other block.
2068  for (Instruction &I : F.getEntryBlock()) {
2070  if (II && II->getIntrinsicID() == Intrinsic::localescape) {
2071  // We found a call. Mark all the allocas passed in as uninteresting.
2072  for (Value *Arg : II->arg_operands()) {
2073  AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
2074  assert(AI && AI->isStaticAlloca() &&
2075  "non-static alloca arg to localescape");
2076  ProcessedAllocas[AI] = false;
2077  }
2078  break;
2079  }
2080  }
2081 }
2082 
2083 bool AddressSanitizer::runOnFunction(Function &F) {
2084  if (&F == AsanCtorFunction) return false;
2085  if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false;
2086  if (!ClDebugFunc.empty() && ClDebugFunc == F.getName()) return false;
2087  if (F.getName().startswith("__asan_")) return false;
2088 
2089  bool FunctionModified = false;
2090 
2091  // If needed, insert __asan_init before checking for SanitizeAddress attr.
2092  // This function needs to be called even if the function body is not
2093  // instrumented.
2094  if (maybeInsertAsanInitAtFunctionEntry(F))
2095  FunctionModified = true;
2096 
2097  // Leave if the function doesn't need instrumentation.
2098  if (!F.hasFnAttribute(Attribute::SanitizeAddress)) return FunctionModified;
2099 
2100  DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n");
2101 
2102  initializeCallbacks(*F.getParent());
2103  DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
2104 
2105  FunctionStateRAII CleanupObj(this);
2106 
2107  maybeInsertDynamicShadowAtFunctionEntry(F);
2108 
2109  // We can't instrument allocas used with llvm.localescape. Only static allocas
2110  // can be passed to that intrinsic.
2111  markEscapedLocalAllocas(F);
2112 
2113  // We want to instrument every address only once per basic block (unless there
2114  // are calls between uses).
2115  SmallSet<Value *, 16> TempsToInstrument;
2116  SmallVector<Instruction *, 16> ToInstrument;
2117  SmallVector<Instruction *, 8> NoReturnCalls;
2119  SmallVector<Instruction *, 16> PointerComparisonsOrSubtracts;
2120  int NumAllocas = 0;
2121  bool IsWrite;
2122  unsigned Alignment;
2123  uint64_t TypeSize;
2124  const TargetLibraryInfo *TLI =
2125  &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
2126 
2127  // Fill the set of memory operations to instrument.
2128  for (auto &BB : F) {
2129  AllBlocks.push_back(&BB);
2130  TempsToInstrument.clear();
2131  int NumInsnsPerBB = 0;
2132  for (auto &Inst : BB) {
2133  if (LooksLikeCodeInBug11395(&Inst)) return false;
2134  Value *MaybeMask = nullptr;
2135  if (Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize,
2136  &Alignment, &MaybeMask)) {
2137  if (ClOpt && ClOptSameTemp) {
2138  // If we have a mask, skip instrumentation if we've already
2139  // instrumented the full object. But don't add to TempsToInstrument
2140  // because we might get another load/store with a different mask.
2141  if (MaybeMask) {
2142  if (TempsToInstrument.count(Addr))
2143  continue; // We've seen this (whole) temp in the current BB.
2144  } else {
2145  if (!TempsToInstrument.insert(Addr).second)
2146  continue; // We've seen this temp in the current BB.
2147  }
2148  }
2149  } else if (ClInvalidPointerPairs &&
2151  PointerComparisonsOrSubtracts.push_back(&Inst);
2152  continue;
2153  } else if (isa<MemIntrinsic>(Inst)) {
2154  // ok, take it.
2155  } else {
2156  if (isa<AllocaInst>(Inst)) NumAllocas++;
2157  CallSite CS(&Inst);
2158  if (CS) {
2159  // A call inside BB.
2160  TempsToInstrument.clear();
2161  if (CS.doesNotReturn()) NoReturnCalls.push_back(CS.getInstruction());
2162  }
2163  if (CallInst *CI = dyn_cast<CallInst>(&Inst))
2165  continue;
2166  }
2167  ToInstrument.push_back(&Inst);
2168  NumInsnsPerBB++;
2169  if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) break;
2170  }
2171  }
2172 
2173  bool UseCalls =
2174  CompileKernel ||
2177  const DataLayout &DL = F.getParent()->getDataLayout();
2178  ObjectSizeOffsetVisitor ObjSizeVis(DL, TLI, F.getContext(),
2179  /*RoundToAlign=*/true);
2180 
2181  // Instrument.
2182  int NumInstrumented = 0;
2183  for (auto Inst : ToInstrument) {
2184  if (ClDebugMin < 0 || ClDebugMax < 0 ||
2185  (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) {
2186  if (isInterestingMemoryAccess(Inst, &IsWrite, &TypeSize, &Alignment))
2187  instrumentMop(ObjSizeVis, Inst, UseCalls,
2188  F.getParent()->getDataLayout());
2189  else
2190  instrumentMemIntrinsic(cast<MemIntrinsic>(Inst));
2191  }
2192  NumInstrumented++;
2193  }
2194 
2195  FunctionStackPoisoner FSP(F, *this);
2196  bool ChangedStack = FSP.runOnFunction();
2197 
2198  // We must unpoison the stack before every NoReturn call (throw, _exit, etc).
2199  // See e.g. http://code.google.com/p/address-sanitizer/issues/detail?id=37
2200  for (auto CI : NoReturnCalls) {
2201  IRBuilder<> IRB(CI);
2202  IRB.CreateCall(AsanHandleNoReturnFunc, {});
2203  }
2204 
2205  for (auto Inst : PointerComparisonsOrSubtracts) {
2206  instrumentPointerComparisonOrSubtraction(Inst);
2207  NumInstrumented++;
2208  }
2209 
2210  if (NumInstrumented > 0 || ChangedStack || !NoReturnCalls.empty())
2211  FunctionModified = true;
2212 
2213  DEBUG(dbgs() << "ASAN done instrumenting: " << FunctionModified << " "
2214  << F << "\n");
2215 
2216  return FunctionModified;
2217 }
2218 
2219 // Workaround for bug 11395: we don't want to instrument stack in functions
2220 // with large assembly blobs (32-bit only), otherwise reg alloc may crash.
2221 // FIXME: remove once the bug 11395 is fixed.
2222 bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) {
2223  if (LongSize != 32) return false;
2224  CallInst *CI = dyn_cast<CallInst>(I);
2225  if (!CI || !CI->isInlineAsm()) return false;
2226  if (CI->getNumArgOperands() <= 5) return false;
2227  // We have inline assembly with quite a few arguments.
2228  return true;
2229 }
2230 
2231 void FunctionStackPoisoner::initializeCallbacks(Module &M) {
2232  IRBuilder<> IRB(*C);
2233  for (int i = 0; i <= kMaxAsanStackMallocSizeClass; i++) {
2234  std::string Suffix = itostr(i);
2235  AsanStackMallocFunc[i] = checkSanitizerInterfaceFunction(
2237  IntptrTy, nullptr));
2238  AsanStackFreeFunc[i] = checkSanitizerInterfaceFunction(
2240  IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
2241  }
2242  if (ASan.UseAfterScope) {
2243  AsanPoisonStackMemoryFunc = checkSanitizerInterfaceFunction(
2245  IntptrTy, IntptrTy, nullptr));
2246  AsanUnpoisonStackMemoryFunc = checkSanitizerInterfaceFunction(
2248  IntptrTy, IntptrTy, nullptr));
2249  }
2250 
2251  for (size_t Val : {0x00, 0xf1, 0xf2, 0xf3, 0xf5, 0xf8}) {
2252  std::ostringstream Name;
2253  Name << kAsanSetShadowPrefix;
2254  Name << std::setw(2) << std::setfill('0') << std::hex << Val;
2255  AsanSetShadowFunc[Val] =
2257  Name.str(), IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
2258  }
2259 
2260  AsanAllocaPoisonFunc = checkSanitizerInterfaceFunction(M.getOrInsertFunction(
2261  kAsanAllocaPoison, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
2262  AsanAllocasUnpoisonFunc =
2264  kAsanAllocasUnpoison, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
2265 }
2266 
2267 void FunctionStackPoisoner::copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
2268  ArrayRef<uint8_t> ShadowBytes,
2269  size_t Begin, size_t End,
2270  IRBuilder<> &IRB,
2271  Value *ShadowBase) {
2272  if (Begin >= End)
2273  return;
2274 
2275  const size_t LargestStoreSizeInBytes =
2276  std::min<size_t>(sizeof(uint64_t), ASan.LongSize / 8);
2277 
2278  const bool IsLittleEndian = F.getParent()->getDataLayout().isLittleEndian();
2279 
2280  // Poison given range in shadow using larges store size with out leading and
2281  // trailing zeros in ShadowMask. Zeros never change, so they need neither
2282  // poisoning nor up-poisoning. Still we don't mind if some of them get into a
2283  // middle of a store.
2284  for (size_t i = Begin; i < End;) {
2285  if (!ShadowMask[i]) {
2286  assert(!ShadowBytes[i]);
2287  ++i;
2288  continue;
2289  }
2290 
2291  size_t StoreSizeInBytes = LargestStoreSizeInBytes;
2292  // Fit store size into the range.
2293  while (StoreSizeInBytes > End - i)
2294  StoreSizeInBytes /= 2;
2295 
2296  // Minimize store size by trimming trailing zeros.
2297  for (size_t j = StoreSizeInBytes - 1; j && !ShadowMask[i + j]; --j) {
2298  while (j <= StoreSizeInBytes / 2)
2299  StoreSizeInBytes /= 2;
2300  }
2301 
2302  uint64_t Val = 0;
2303  for (size_t j = 0; j < StoreSizeInBytes; j++) {
2304  if (IsLittleEndian)
2305  Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
2306  else
2307  Val = (Val << 8) | ShadowBytes[i + j];
2308  }
2309 
2310  Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
2311  Value *Poison = IRB.getIntN(StoreSizeInBytes * 8, Val);
2312  IRB.CreateAlignedStore(
2313  Poison, IRB.CreateIntToPtr(Ptr, Poison->getType()->getPointerTo()), 1);
2314 
2315  i += StoreSizeInBytes;
2316  }
2317 }
2318 
2319 void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
2320  ArrayRef<uint8_t> ShadowBytes,
2321  IRBuilder<> &IRB, Value *ShadowBase) {
2322  copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.size(), IRB, ShadowBase);
2323 }
2324 
2325 void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
2326  ArrayRef<uint8_t> ShadowBytes,
2327  size_t Begin, size_t End,
2328  IRBuilder<> &IRB, Value *ShadowBase) {
2329  assert(ShadowMask.size() == ShadowBytes.size());
2330  size_t Done = Begin;
2331  for (size_t i = Begin, j = Begin + 1; i < End; i = j++) {
2332  if (!ShadowMask[i]) {
2333  assert(!ShadowBytes[i]);
2334  continue;
2335  }
2336  uint8_t Val = ShadowBytes[i];
2337  if (!AsanSetShadowFunc[Val])
2338  continue;
2339 
2340  // Skip same values.
2341  for (; j < End && ShadowMask[j] && Val == ShadowBytes[j]; ++j) {
2342  }
2343 
2344  if (j - i >= ClMaxInlinePoisoningSize) {
2345  copyToShadowInline(ShadowMask, ShadowBytes, Done, i, IRB, ShadowBase);
2346  IRB.CreateCall(AsanSetShadowFunc[Val],
2347  {IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
2348  ConstantInt::get(IntptrTy, j - i)});
2349  Done = j;
2350  }
2351  }
2352 
2353  copyToShadowInline(ShadowMask, ShadowBytes, Done, End, IRB, ShadowBase);
2354 }
2355 
2356 // Fake stack allocator (asan_fake_stack.h) has 11 size classes
2357 // for every power of 2 from kMinStackMallocSize to kMaxAsanStackMallocSizeClass
2358 static int StackMallocSizeClass(uint64_t LocalStackSize) {
2359  assert(LocalStackSize <= kMaxStackMallocSize);
2360  uint64_t MaxSize = kMinStackMallocSize;
2361  for (int i = 0;; i++, MaxSize *= 2)
2362  if (LocalStackSize <= MaxSize) return i;
2363  llvm_unreachable("impossible LocalStackSize");
2364 }
2365 
2366 PHINode *FunctionStackPoisoner::createPHI(IRBuilder<> &IRB, Value *Cond,
2367  Value *ValueIfTrue,
2368  Instruction *ThenTerm,
2369  Value *ValueIfFalse) {
2370  PHINode *PHI = IRB.CreatePHI(IntptrTy, 2);
2371  BasicBlock *CondBlock = cast<Instruction>(Cond)->getParent();
2372  PHI->addIncoming(ValueIfFalse, CondBlock);
2373  BasicBlock *ThenBlock = ThenTerm->getParent();
2374  PHI->addIncoming(ValueIfTrue, ThenBlock);
2375  return PHI;
2376 }
2377 
2378 Value *FunctionStackPoisoner::createAllocaForLayout(
2379  IRBuilder<> &IRB, const ASanStackFrameLayout &L, bool Dynamic) {
2380  AllocaInst *Alloca;
2381  if (Dynamic) {
2382  Alloca = IRB.CreateAlloca(IRB.getInt8Ty(),
2384  "MyAlloca");
2385  } else {
2386  Alloca = IRB.CreateAlloca(ArrayType::get(IRB.getInt8Ty(), L.FrameSize),
2387  nullptr, "MyAlloca");
2388  assert(Alloca->isStaticAlloca());
2389  }
2390  assert((ClRealignStack & (ClRealignStack - 1)) == 0);
2391  size_t FrameAlignment = std::max(L.FrameAlignment, (size_t)ClRealignStack);
2392  Alloca->setAlignment(FrameAlignment);
2393  return IRB.CreatePointerCast(Alloca, IntptrTy);
2394 }
2395 
2396 void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
2397  BasicBlock &FirstBB = *F.begin();
2398  IRBuilder<> IRB(dyn_cast<Instruction>(FirstBB.begin()));
2399  DynamicAllocaLayout = IRB.CreateAlloca(IntptrTy, nullptr);
2400  IRB.CreateStore(Constant::getNullValue(IntptrTy), DynamicAllocaLayout);
2401  DynamicAllocaLayout->setAlignment(32);
2402 }
2403 
2404 void FunctionStackPoisoner::processDynamicAllocas() {
2405  if (!ClInstrumentDynamicAllocas || DynamicAllocaVec.empty()) {
2406  assert(DynamicAllocaPoisonCallVec.empty());
2407  return;
2408  }
2409 
2410  // Insert poison calls for lifetime intrinsics for dynamic allocas.
2411  for (const auto &APC : DynamicAllocaPoisonCallVec) {
2412  assert(APC.InsBefore);
2413  assert(APC.AI);
2414  assert(ASan.isInterestingAlloca(*APC.AI));
2415  assert(!APC.AI->isStaticAlloca());
2416 
2417  IRBuilder<> IRB(APC.InsBefore);
2418  poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
2419  // Dynamic allocas will be unpoisoned unconditionally below in
2420  // unpoisonDynamicAllocas.
2421  // Flag that we need unpoison static allocas.
2422  }
2423 
2424  // Handle dynamic allocas.
2425  createDynamicAllocasInitStorage();
2426  for (auto &AI : DynamicAllocaVec)
2427  handleDynamicAllocaCall(AI);
2428  unpoisonDynamicAllocas();
2429 }
2430 
2431 void FunctionStackPoisoner::processStaticAllocas() {
2432  if (AllocaVec.empty()) {
2433  assert(StaticAllocaPoisonCallVec.empty());
2434  return;
2435  }
2436 
2437  int StackMallocIdx = -1;
2438  DebugLoc EntryDebugLocation;
2439  if (auto SP = F.getSubprogram())
2440  EntryDebugLocation = DebugLoc::get(SP->getScopeLine(), 0, SP);
2441 
2442  Instruction *InsBefore = AllocaVec[0];
2443  IRBuilder<> IRB(InsBefore);
2444  IRB.SetCurrentDebugLocation(EntryDebugLocation);
2445 
2446  // Make sure non-instrumented allocas stay in the entry block. Otherwise,
2447  // debug info is broken, because only entry-block allocas are treated as
2448  // regular stack slots.
2449  auto InsBeforeB = InsBefore->getParent();
2450  assert(InsBeforeB == &F.getEntryBlock());
2451  for (auto *AI : StaticAllocasToMoveUp)
2452  if (AI->getParent() == InsBeforeB)
2453  AI->moveBefore(InsBefore);
2454 
2455  // If we have a call to llvm.localescape, keep it in the entry block.
2456  if (LocalEscapeCall) LocalEscapeCall->moveBefore(InsBefore);
2457 
2459  SVD.reserve(AllocaVec.size());
2460  for (AllocaInst *AI : AllocaVec) {
2462  ASan.getAllocaSizeInBytes(*AI),
2463  0,
2464  AI->getAlignment(),
2465  AI,
2466  0,
2467  0};
2468  SVD.push_back(D);
2469  }
2470 
2471  // Minimal header size (left redzone) is 4 pointers,
2472  // i.e. 32 bytes on 64-bit platforms and 16 bytes in 32-bit platforms.
2473  size_t MinHeaderSize = ASan.LongSize / 2;
2474  const ASanStackFrameLayout &L =
2475  ComputeASanStackFrameLayout(SVD, 1ULL << Mapping.Scale, MinHeaderSize);
2476 
2477  // Build AllocaToSVDMap for ASanStackVariableDescription lookup.
2479  for (auto &Desc : SVD)
2480  AllocaToSVDMap[Desc.AI] = &Desc;
2481 
2482  // Update SVD with information from lifetime intrinsics.
2483  for (const auto &APC : StaticAllocaPoisonCallVec) {
2484  assert(APC.InsBefore);
2485  assert(APC.AI);
2486  assert(ASan.isInterestingAlloca(*APC.AI));
2487  assert(APC.AI->isStaticAlloca());
2488 
2489  ASanStackVariableDescription &Desc = *AllocaToSVDMap[APC.AI];
2490  Desc.LifetimeSize = Desc.Size;
2491  if (const DILocation *FnLoc = EntryDebugLocation.get()) {
2492  if (const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
2493  if (LifetimeLoc->getFile() == FnLoc->getFile())
2494  if (unsigned Line = LifetimeLoc->getLine())
2495  Desc.Line = std::min(Desc.Line ? Desc.Line : Line, Line);
2496  }
2497  }
2498  }
2499 
2500  auto DescriptionString = ComputeASanStackFrameDescription(SVD);
2501  DEBUG(dbgs() << DescriptionString << " --- " << L.FrameSize << "\n");
2502  uint64_t LocalStackSize = L.FrameSize;
2503  bool DoStackMalloc = ClUseAfterReturn && !ASan.CompileKernel &&
2504  LocalStackSize <= kMaxStackMallocSize;
2505  bool DoDynamicAlloca = ClDynamicAllocaStack;
2506  // Don't do dynamic alloca or stack malloc if:
2507  // 1) There is inline asm: too often it makes assumptions on which registers
2508  // are available.
2509  // 2) There is a returns_twice call (typically setjmp), which is
2510  // optimization-hostile, and doesn't play well with introduced indirect
2511  // register-relative calculation of local variable addresses.
2512  DoDynamicAlloca &= !HasNonEmptyInlineAsm && !HasReturnsTwiceCall;
2513  DoStackMalloc &= !HasNonEmptyInlineAsm && !HasReturnsTwiceCall;
2514 
2515  Value *StaticAlloca =
2516  DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L, false);
2517 
2518  Value *FakeStack;
2519  Value *LocalStackBase;
2520 
2521  if (DoStackMalloc) {
2522  // void *FakeStack = __asan_option_detect_stack_use_after_return
2523  // ? __asan_stack_malloc_N(LocalStackSize)
2524  // : nullptr;
2525  // void *LocalStackBase = (FakeStack) ? FakeStack : alloca(LocalStackSize);
2526  Constant *OptionDetectUseAfterReturn = F.getParent()->getOrInsertGlobal(
2528  Value *UseAfterReturnIsEnabled =
2529  IRB.CreateICmpNE(IRB.CreateLoad(OptionDetectUseAfterReturn),
2531  Instruction *Term =
2532  SplitBlockAndInsertIfThen(UseAfterReturnIsEnabled, InsBefore, false);
2533  IRBuilder<> IRBIf(Term);
2534  IRBIf.SetCurrentDebugLocation(EntryDebugLocation);
2535  StackMallocIdx = StackMallocSizeClass(LocalStackSize);
2536  assert(StackMallocIdx <= kMaxAsanStackMallocSizeClass);
2537  Value *FakeStackValue =
2538  IRBIf.CreateCall(AsanStackMallocFunc[StackMallocIdx],
2539  ConstantInt::get(IntptrTy, LocalStackSize));
2540  IRB.SetInsertPoint(InsBefore);
2541  IRB.SetCurrentDebugLocation(EntryDebugLocation);
2542  FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
2543  ConstantInt::get(IntptrTy, 0));
2544 
2545  Value *NoFakeStack =
2546  IRB.CreateICmpEQ(FakeStack, Constant::getNullValue(IntptrTy));
2547  Term = SplitBlockAndInsertIfThen(NoFakeStack, InsBefore, false);
2548  IRBIf.SetInsertPoint(Term);
2549  IRBIf.SetCurrentDebugLocation(EntryDebugLocation);
2550  Value *AllocaValue =
2551  DoDynamicAlloca ? createAllocaForLayout(IRBIf, L, true) : StaticAlloca;
2552  IRB.SetInsertPoint(InsBefore);
2553  IRB.SetCurrentDebugLocation(EntryDebugLocation);
2554  LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
2555  } else {
2556  // void *FakeStack = nullptr;
2557  // void *LocalStackBase = alloca(LocalStackSize);
2558  FakeStack = ConstantInt::get(IntptrTy, 0);
2559  LocalStackBase =
2560  DoDynamicAlloca ? createAllocaForLayout(IRB, L, true) : StaticAlloca;
2561  }
2562 
2563  // Replace Alloca instructions with base+offset.
2564  for (const auto &Desc : SVD) {
2565  AllocaInst *AI = Desc.AI;
2566  Value *NewAllocaPtr = IRB.CreateIntToPtr(
2567  IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, Desc.Offset)),
2568  AI->getType());
2569  replaceDbgDeclareForAlloca(AI, NewAllocaPtr, DIB, /*Deref=*/true);
2570  AI->replaceAllUsesWith(NewAllocaPtr);
2571  }
2572 
2573  // The left-most redzone has enough space for at least 4 pointers.
2574  // Write the Magic value to redzone[0].
2575  Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy);
2577  BasePlus0);
2578  // Write the frame description constant to redzone[1].
2579  Value *BasePlus1 = IRB.CreateIntToPtr(
2580  IRB.CreateAdd(LocalStackBase,
2581  ConstantInt::get(IntptrTy, ASan.LongSize / 8)),
2582  IntptrPtrTy);
2583  GlobalVariable *StackDescriptionGlobal =
2584  createPrivateGlobalForString(*F.getParent(), DescriptionString,
2585  /*AllowMerging*/ true);
2586  Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal, IntptrTy);
2587  IRB.CreateStore(Description, BasePlus1);
2588  // Write the PC to redzone[2].
2589  Value *BasePlus2 = IRB.CreateIntToPtr(
2590  IRB.CreateAdd(LocalStackBase,
2591  ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8)),
2592  IntptrPtrTy);
2593  IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2);
2594 
2595  const auto &ShadowAfterScope = GetShadowBytesAfterScope(SVD, L);
2596 
2597  // Poison the stack red zones at the entry.
2598  Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
2599  // As mask we must use most poisoned case: red zones and after scope.
2600  // As bytes we can use either the same or just red zones only.
2601  copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
2602 
2603  if (!StaticAllocaPoisonCallVec.empty()) {
2604  const auto &ShadowInScope = GetShadowBytes(SVD, L);
2605 
2606  // Poison static allocas near lifetime intrinsics.
2607  for (const auto &APC : StaticAllocaPoisonCallVec) {
2608  const ASanStackVariableDescription &Desc = *AllocaToSVDMap[APC.AI];
2609  assert(Desc.Offset % L.Granularity == 0);
2610  size_t Begin = Desc.Offset / L.Granularity;
2611  size_t End = Begin + (APC.Size + L.Granularity - 1) / L.Granularity;
2612 
2613  IRBuilder<> IRB(APC.InsBefore);
2614  copyToShadow(ShadowAfterScope,
2615  APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
2616  IRB, ShadowBase);
2617  }
2618  }
2619 
2620  SmallVector<uint8_t, 64> ShadowClean(ShadowAfterScope.size(), 0);
2621  SmallVector<uint8_t, 64> ShadowAfterReturn;
2622 
2623  // (Un)poison the stack before all ret instructions.
2624  for (auto Ret : RetVec) {
2625  IRBuilder<> IRBRet(Ret);
2626  // Mark the current frame as retired.
2627  IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic),
2628  BasePlus0);
2629  if (DoStackMalloc) {
2630  assert(StackMallocIdx >= 0);
2631  // if FakeStack != 0 // LocalStackBase == FakeStack
2632  // // In use-after-return mode, poison the whole stack frame.
2633  // if StackMallocIdx <= 4
2634  // // For small sizes inline the whole thing:
2635  // memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
2636  // **SavedFlagPtr(FakeStack) = 0
2637  // else
2638  // __asan_stack_free_N(FakeStack, LocalStackSize)
2639  // else
2640  // <This is not a fake stack; unpoison the redzones>
2641  Value *Cmp =
2642  IRBRet.CreateICmpNE(FakeStack, Constant::getNullValue(IntptrTy));
2643  TerminatorInst *ThenTerm, *ElseTerm;
2644  SplitBlockAndInsertIfThenElse(Cmp, Ret, &ThenTerm, &ElseTerm);
2645 
2646  IRBuilder<> IRBPoison(ThenTerm);
2647  if (StackMallocIdx <= 4) {
2648  int ClassSize = kMinStackMallocSize << StackMallocIdx;
2649  ShadowAfterReturn.resize(ClassSize / L.Granularity,
2651  copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
2652  ShadowBase);
2653  Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
2654  FakeStack,
2655  ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
2656  Value *SavedFlagPtr = IRBPoison.CreateLoad(
2657  IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
2658  IRBPoison.CreateStore(
2659  Constant::getNullValue(IRBPoison.getInt8Ty()),
2660  IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
2661  } else {
2662  // For larger frames call __asan_stack_free_*.
2663  IRBPoison.CreateCall(
2664  AsanStackFreeFunc[StackMallocIdx],
2665  {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)});
2666  }
2667 
2668  IRBuilder<> IRBElse(ElseTerm);
2669  copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
2670  } else {
2671  copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
2672  }
2673  }
2674 
2675  // We are done. Remove the old unused alloca instructions.
2676  for (auto AI : AllocaVec) AI->eraseFromParent();
2677 }
2678 
2679 void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size,
2680  IRBuilder<> &IRB, bool DoPoison) {
2681  // For now just insert the call to ASan runtime.
2682  Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy);
2683  Value *SizeArg = ConstantInt::get(IntptrTy, Size);
2684  IRB.CreateCall(
2685  DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
2686  {AddrArg, SizeArg});
2687 }
2688 
2689 // Handling llvm.lifetime intrinsics for a given %alloca:
2690 // (1) collect all llvm.lifetime.xxx(%size, %value) describing the alloca.
2691 // (2) if %size is constant, poison memory for llvm.lifetime.end (to detect
2692 // invalid accesses) and unpoison it for llvm.lifetime.start (the memory
2693 // could be poisoned by previous llvm.lifetime.end instruction, as the
2694 // variable may go in and out of scope several times, e.g. in loops).
2695 // (3) if we poisoned at least one %alloca in a function,
2696 // unpoison the whole stack frame at function exit.
2697 
2698 AllocaInst *FunctionStackPoisoner::findAllocaForValue(Value *V) {
2699  if (AllocaInst *AI = dyn_cast<AllocaInst>(V))
2700  // We're interested only in allocas we can handle.
2701  return ASan.isInterestingAlloca(*AI) ? AI : nullptr;
2702  // See if we've already calculated (or started to calculate) alloca for a
2703  // given value.
2704  AllocaForValueMapTy::iterator I = AllocaForValue.find(V);
2705  if (I != AllocaForValue.end()) return I->second;
2706  // Store 0 while we're calculating alloca for value V to avoid
2707  // infinite recursion if the value references itself.
2708  AllocaForValue[V] = nullptr;
2709  AllocaInst *Res = nullptr;
2710  if (CastInst *CI = dyn_cast<CastInst>(V))
2711  Res = findAllocaForValue(CI->getOperand(0));
2712  else if (PHINode *PN = dyn_cast<PHINode>(V)) {
2713  for (Value *IncValue : PN->incoming_values()) {
2714  // Allow self-referencing phi-nodes.
2715  if (IncValue == PN) continue;
2716  AllocaInst *IncValueAI = findAllocaForValue(IncValue);
2717  // AI for incoming values should exist and should all be equal.
2718  if (IncValueAI == nullptr || (Res != nullptr && IncValueAI != Res))
2719  return nullptr;
2720  Res = IncValueAI;
2721  }
2722  } else if (GetElementPtrInst *EP = dyn_cast<GetElementPtrInst>(V)) {
2723  Res = findAllocaForValue(EP->getPointerOperand());
2724  } else {
2725  DEBUG(dbgs() << "Alloca search canceled on unknown instruction: " << *V << "\n");
2726  }
2727  if (Res) AllocaForValue[V] = Res;
2728  return Res;
2729 }
2730 
2731 void FunctionStackPoisoner::handleDynamicAllocaCall(AllocaInst *AI) {
2732  IRBuilder<> IRB(AI);
2733 
2734  const unsigned Align = std::max(kAllocaRzSize, AI->getAlignment());
2735  const uint64_t AllocaRedzoneMask = kAllocaRzSize - 1;
2736 
2737  Value *Zero = Constant::getNullValue(IntptrTy);
2738  Value *AllocaRzSize = ConstantInt::get(IntptrTy, kAllocaRzSize);
2739  Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
2740 
2741  // Since we need to extend alloca with additional memory to locate
2742  // redzones, and OldSize is number of allocated blocks with
2743  // ElementSize size, get allocated memory size in bytes by
2744  // OldSize * ElementSize.
2745  const unsigned ElementSize =
2746  F.getParent()->getDataLayout().getTypeAllocSize(AI->getAllocatedType());
2747  Value *OldSize =
2748  IRB.CreateMul(IRB.CreateIntCast(AI->getArraySize(), IntptrTy, false),
2749  ConstantInt::get(IntptrTy, ElementSize));
2750 
2751  // PartialSize = OldSize % 32
2752  Value *PartialSize = IRB.CreateAnd(OldSize, AllocaRzMask);
2753 
2754  // Misalign = kAllocaRzSize - PartialSize;
2755  Value *Misalign = IRB.CreateSub(AllocaRzSize, PartialSize);
2756 
2757  // PartialPadding = Misalign != kAllocaRzSize ? Misalign : 0;
2758  Value *Cond = IRB.CreateICmpNE(Misalign, AllocaRzSize);
2759  Value *PartialPadding = IRB.CreateSelect(Cond, Misalign, Zero);
2760 
2761  // AdditionalChunkSize = Align + PartialPadding + kAllocaRzSize
2762  // Align is added to locate left redzone, PartialPadding for possible
2763  // partial redzone and kAllocaRzSize for right redzone respectively.
2764  Value *AdditionalChunkSize = IRB.CreateAdd(
2765  ConstantInt::get(IntptrTy, Align + kAllocaRzSize), PartialPadding);
2766 
2767  Value *NewSize = IRB.CreateAdd(OldSize, AdditionalChunkSize);
2768 
2769  // Insert new alloca with new NewSize and Align params.
2770  AllocaInst *NewAlloca = IRB.CreateAlloca(IRB.getInt8Ty(), NewSize);
2771  NewAlloca->setAlignment(Align);
2772 
2773  // NewAddress = Address + Align
2774  Value *NewAddress = IRB.CreateAdd(IRB.CreatePtrToInt(NewAlloca, IntptrTy),
2775  ConstantInt::get(IntptrTy, Align));
2776 
2777  // Insert __asan_alloca_poison call for new created alloca.
2778  IRB.CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize});
2779 
2780  // Store the last alloca's address to DynamicAllocaLayout. We'll need this
2781  // for unpoisoning stuff.
2782  IRB.CreateStore(IRB.CreatePtrToInt(NewAlloca, IntptrTy), DynamicAllocaLayout);
2783 
2784  Value *NewAddressPtr = IRB.CreateIntToPtr(NewAddress, AI->getType());
2785 
2786  // Replace all uses of AddessReturnedByAlloca with NewAddressPtr.
2787  AI->replaceAllUsesWith(NewAddressPtr);
2788 
2789  // We are done. Erase old alloca from parent.
2790  AI->eraseFromParent();
2791 }
2792 
2793 // isSafeAccess returns true if Addr is always inbounds with respect to its
2794 // base object. For example, it is a field access or an array access with
2795 // constant inbounds index.
2796 bool AddressSanitizer::isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis,
2797  Value *Addr, uint64_t TypeSize) const {
2798  SizeOffsetType SizeOffset = ObjSizeVis.compute(Addr);
2799  if (!ObjSizeVis.bothKnown(SizeOffset)) return false;
2800  uint64_t Size = SizeOffset.first.getZExtValue();
2801  int64_t Offset = SizeOffset.second.getSExtValue();
2802  // Three checks are required to ensure safety:
2803  // . Offset >= 0 (since the offset is given from the base ptr)
2804  // . Size >= Offset (unsigned)
2805  // . Size - Offset >= NeededSize (unsigned)
2806  return Offset >= 0 && Size >= uint64_t(Offset) &&
2807  Size - uint64_t(Offset) >= TypeSize / 8;
2808 }
void setVisibility(VisibilityTypes V)
Definition: GlobalValue.h:225
MachineLoop * L
Pass interface - Implemented by all 'passes'.
Definition: Pass.h:81
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V...
Definition: Constants.cpp:1182
Return a value (possibly void), from a function.
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Definition: Instruction.cpp:76
static const char *const kAsanSetShadowPrefix
static const uint64_t kIOSShadowOffset32
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
A parsed version of the target data layout string in and methods for querying it. ...
Definition: DataLayout.h:102
LinkageTypes getLinkage() const
Definition: GlobalValue.h:429
bool replaceDbgDeclareForAlloca(AllocaInst *AI, Value *NewAllocaAddress, DIBuilder &Builder, bool Deref, int Offset=0)
Replaces llvm.dbg.declare instruction when the alloca it describes is replaced with a new value...
Definition: Local.cpp:1306
void ReplaceInstWithInst(BasicBlock::InstListType &BIL, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
static Constant * getString(LLVMContext &Context, StringRef Initializer, bool AddNull=true)
This method constructs a CDS and initializes it with a text string.
Definition: Constants.cpp:2471
bool hasComdat() const
Definition: GlobalObject.h:91
void appendToGlobalDtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Same as appendToGlobalCtors(), but for global dtors.
Definition: ModuleUtils.cpp:88
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
VisibilityTypes getVisibility() const
Definition: GlobalValue.h:219
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
void SplitBlockAndInsertIfThenElse(Value *Cond, Instruction *SplitBefore, TerminatorInst **ThenTerm, TerminatorInst **ElseTerm, MDNode *BranchWeights=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
bool isOSBinFormatMachO() const
Tests whether the environment is MachO.
Definition: Triple.h:575
Base class for instruction visitors.
Definition: InstVisitor.h:81
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
Function * checkSanitizerInterfaceFunction(Constant *FuncOrBitcast)
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
Definition: IRBuilder.h:1469
bool hasName() const
Definition: Value.h:236
STATISTIC(NumFunctions,"Total number of functions")
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
size_t i
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:52
void setAlignment(unsigned Align)
Intrinsic::ID getIntrinsicID() const
Return the intrinsic ID of this intrinsic.
Definition: IntrinsicInst.h:51
an instruction that atomically checks whether a specified value is in a memory location, and, if it is, stores a new value there.
Definition: Instructions.h:504
aarch64 AArch64 CCMP Pass
static Constant * getAnon(ArrayRef< Constant * > V, bool Packed=false)
Return an anonymous struct that has the specified elements.
Definition: Constants.h:459
Available for inspection, not emission.
Definition: GlobalValue.h:50
unsigned getNumOperands() const
Return number of MDNode operands.
Definition: Metadata.h:1040
MachineInstrBuilder MachineInstrBuilder &DefMI const MCInstrDesc & Desc
bool isMacOSXVersionLT(unsigned Major, unsigned Minor=0, unsigned Micro=0) const
isMacOSXVersionLT - Comparison function for checking OS X version compatibility, which handles suppor...
Definition: Triple.h:412
Type * getValueType() const
Definition: GlobalValue.h:261
static const uintptr_t kCurrentStackFrameMagic
This class represents a function call, abstracting a target machine's calling convention.
static cl::opt< bool > ClUseAfterReturn("asan-use-after-return", cl::desc("Check stack-use-after-return"), cl::Hidden, cl::init(true))
static PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space...
Definition: Type.cpp:655
static bool isInterestingPointerComparisonOrSubtraction(Instruction *I)
static const char *const kODRGenPrefix
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, Optional< unsigned > InRangeIndex=None, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
Definition: Constants.h:1126
static cl::opt< bool > ClUseMachOGlobalsSection("asan-globals-live-support", cl::desc("Use linker features to support dead ""code stripping of globals ""(Mach-O only)"), cl::Hidden, cl::init(true))
Like Internal, but omit from symbol table.
Definition: GlobalValue.h:57
Externally visible function.
Definition: GlobalValue.h:49
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
uint64_t getLimitedValue(uint64_t Limit=~0ULL) const
If this value is smaller than the specified limit, return it, otherwise return the limit value...
Definition: APInt.h:409
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
bool empty() const
Definition: Module.h:544
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:100
SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
A debug info location.
Definition: DebugLoc.h:34
const Instruction & front() const
Definition: BasicBlock.h:240
Metadata node.
Definition: Metadata.h:830
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
An instruction for reading from memory.
Definition: Instructions.h:164
FunctionType * getType(LLVMContext &Context, ID id, ArrayRef< Type * > Tys=None)
Return the function type for an intrinsic.
Definition: Function.cpp:905
an instruction that atomically reads a memory location, combines it with another value, and then stores the result back.
Definition: Instructions.h:669
static cl::opt< bool > ClUsePrivateAliasForGlobals("asan-use-private-alias", cl::desc("Use private aliases for global"" variables"), cl::Hidden, cl::init(false))
void reserve(size_type N)
Definition: SmallVector.h:377
void setAlignment(unsigned Align)
Definition: Globals.cpp:86
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
static GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging)
static cl::opt< unsigned long long > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const std::string & getTargetTriple() const
Get the target triple which is a string describing the target host.
Definition: Module.h:218
bool isMinusOne() const
This function will return true iff every bit in this constant is set to true.
Definition: Constants.h:214
Tentative definitions.
Definition: GlobalValue.h:59
bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
bool isAndroid() const
Tests whether the target is Android.
Definition: Triple.h:593
bool isOSWindows() const
Tests whether the OS is Windows.
Definition: Triple.h:540
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Definition: Constants.cpp:195
StringRef getName() const
Return a constant reference to the value's name.
Definition: Value.cpp:191
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, unsigned Align, bool isVolatile=false)
Definition: IRBuilder.h:1117
iterator begin()
Instruction iterator methods.
Definition: BasicBlock.h:228
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, unsigned Alignment, unsigned Granularity, uint32_t TypeSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp)
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, Instruction *InsertBefore=nullptr)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
Definition: IRBuilder.h:347
bool isArrayAllocation() const
Return true if there is an allocation size parameter to the allocation instruction that is not 1...
static const uint64_t kDefaultShadowScale
bool isMacOSX() const
isMacOSX - Is this a Mac OS X triple.
Definition: Triple.h:427
static const char *const kAsanShadowMemoryDynamicAddress
AllocaInst * CreateAlloca(Type *Ty, Value *ArraySize=nullptr, const Twine &Name="")
Definition: IRBuilder.h:1076
static const int kAsanStackUseAfterReturnMagic
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:53
A tuple of MDNodes.
Definition: Metadata.h:1282
DILocation * get() const
Get the underlying DILocation.
Definition: DebugLoc.cpp:21
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
Definition: Twine.h:81
StringRef getName() const
Get a short "name" for the module.
Definition: Module.h:205
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Definition: IRBuilder.h:352
This is the base class for all instructions that perform data casts.
Definition: InstrTypes.h:578
const APInt & getValue() const
Return the constant as an APInt value reference.
Definition: Constants.h:143
Class to represent struct types.
Definition: DerivedTypes.h:199
static const uint64_t kLinuxKasan_ShadowOffset64
A Use represents the edge between a Value definition and its users.
Definition: Use.h:56
static GCRegistry::Add< StatepointGC > D("statepoint-example","an example strategy for statepoint")
unsigned getNumArgOperands() const
Return the number of call arguments.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:588
static const char *const kAsanAllocasUnpoison
ObjectFormatType getObjectFormat() const
getFormat - Get the object format for this triple.
Definition: Triple.h:300
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Definition: IRBuilder.h:813
void setName(const Twine &Name)
Change the name of the value.
Definition: Value.cpp:257
static const char *const kAsanModuleCtorName
const std::string & getModuleIdentifier() const
Get the module identifier which is, essentially, the name of the module.
Definition: Module.h:193
static const uint64_t kIOSSimShadowOffset64
Definition: regcomp.c:64
AddressSanitizer false
Type * getVoidTy()
Fetch the type representing void.
Definition: IRBuilder.h:380
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
Definition: Constants.h:154
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Definition: IRBuilder.h:1094
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Definition: IRBuilder.h:1358
bool isOSLinux() const
Tests whether the OS is Linux.
Definition: Triple.h:550
static const char *const kAsanPoisonStackMemoryName
static const uint64_t kSmallX86_64ShadowOffset
#define F(x, y, z)
Definition: MD5.cpp:51
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE bool startswith(StringRef Prefix) const
Check if this string starts with the given Prefix.
Definition: StringRef.h:264
void clear()
Definition: SmallSet.h:118
SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
ConstantDataSequential - A vector or array constant whose element type is a simple 1/2/4/8-byte integ...
Definition: Constants.h:564
static const char *const kSanCovGenPrefix
unsigned getAlignment() const
Definition: GlobalObject.h:59
Class to represent array types.
Definition: DerivedTypes.h:345
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kIOSSimShadowOffset32
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
uint64_t getLimitedValue(uint64_t Limit=~0ULL) const
getLimitedValue - If the value is smaller than the specified limit, return it, otherwise return the l...
Definition: Constants.h:256
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static const uint64_t kAsanCtorAndDtorPriority
void setComdat(Comdat *C)
Definition: GlobalObject.h:94
StringRef getSection() const
Get the custom section of this global if it has one.
Definition: GlobalObject.h:81
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
Definition: Type.cpp:291
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
Definition: APInt.h:33
static const uint64_t kWindowsShadowOffset64
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=None)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
Definition: Function.cpp:949
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Definition: IRBuilder.h:835
ArchType getArch() const
getArch - Get the parsed architecture type of this triple.
Definition: Triple.h:270
An instruction for storing to memory.
Definition: Instructions.h:300
void SetCurrentDebugLocation(DebugLoc L)
Set location information used by debugging information.
Definition: IRBuilder.h:151
std::pair< APInt, APInt > SizeOffsetType
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
Definition: Value.cpp:401
Debug location.
void takeName(Value *V)
Transfer the name from V to this value.
Definition: Value.cpp:263
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree...
Definition: Dominators.h:96
iterator_range< op_iterator > operands()
Definition: Metadata.h:1372
Type * getScalarType() const LLVM_READONLY
If this is a vector type, return the element type, otherwise return 'this'.
Definition: Type.cpp:44
size_t size() const
size - Get the array size.
Definition: ArrayRef.h:141
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block...
Definition: IRBuilder.h:127
static const uint64_t kDefaultShadowOffset32
Maximum length of the test input libFuzzer tries to guess a good value based on the corpus and reports it always prefer smaller inputs during the corpus shuffle When libFuzzer itself reports a bug this exit code will be used If indicates the maximal total time in seconds to run the fuzzer minimizes the provided crash input Use with etc Experimental Use value profile to guide fuzzing Number of simultaneous worker processes to run the jobs If min(jobs, NumberOfCpuCores()/2)\" is used.") FUZZER_FLAG_INT(reload
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
Definition: IRBuilder.h:318
Class to represent pointers.
Definition: DerivedTypes.h:443
static void instrumentMaskedLoadOrStore(AddressSanitizer *Pass, const DataLayout &DL, Type *IntptrTy, Value *Mask, Instruction *I, Value *Addr, unsigned Alignment, unsigned Granularity, uint32_t TypeSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp)
static GCRegistry::Add< CoreCLRGC > E("coreclr","CoreCLR-compatible GC")
ModulePass * createAddressSanitizerModulePass(bool CompileKernel=false, bool Recover=false)
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="")
Definition: IRBuilder.h:1003
Function * getFunction(StringRef Name) const
Look up the specified function in the module symbol table.
Definition: Module.cpp:196
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
bool hasSection() const
Check if this global has a custom object file section.
Definition: GlobalObject.h:73
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
Definition: Type.h:254
an instruction for type-safe pointer arithmetic to access elements of arrays and structs ...
Definition: Instructions.h:830
bool isiOS() const
Is this an iOS triple.
Definition: Triple.h:436
LoadInst * CreateLoad(Value *Ptr, const char *Name)
Definition: IRBuilder.h:1082
static const size_t kNumberOfAccessSizes
static const char *const kAsanGlobalsRegisteredFlagName
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:395
No other Module may specify this COMDAT.
Definition: Comdat.h:35
static const char *const kAsanPtrCmp
static std::string ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
Subclasses of this class are all able to terminate a basic block.
Definition: InstrTypes.h:52
* if(!EatIfPresent(lltok::kw_thread_local)) return false
ParseOptionalThreadLocal := /*empty.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
Definition: Instruction.h:256
std::size_t countTrailingZeros(T Val, ZeroBehavior ZB=ZB_Width)
Count number of 0's from the least significant bit to the most stopping at the first 1...
Definition: MathExtras.h:111
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE size_t find(char C, size_t From=0) const
Search for the first character C in the string.
Definition: StringRef.h:295
constexpr bool isPowerOf2_32(uint32_t Value)
isPowerOf2_32 - This function returns true if the argument is a power of two > 0. ...
Definition: MathExtras.h:399
Constant * getOrInsertFunction(StringRef Name, FunctionType *T, AttributeSet AttributeList)
Look up the specified function in the module symbol table.
Definition: Module.cpp:123
LLVM Basic Block Representation.
Definition: BasicBlock.h:51
The instances of the Type class are immutable: once they are created, they are never changed...
Definition: Type.h:45
BasicBlock * getSuccessor(unsigned idx) const
Return the specified successor.
Definition: InstrTypes.h:79
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:48
uint64_t getTypeStoreSizeInBits(Type *Ty) const
Returns the maximum number of bits that may be overwritten by storing the specified type; always a mu...
Definition: DataLayout.h:399
Constant * getOrInsertGlobal(StringRef Name, Type *Ty)
Look up the specified global in the module symbol table.
Definition: Module.cpp:225
Conditional or Unconditional Branch instruction.
static const size_t kMinStackMallocSize
This function has undefined behavior.
const Comdat * getComdat() const
Definition: GlobalObject.h:92
This is an important base class in LLVM.
Definition: Constant.h:42
PointerType * getType() const
Overload to return most specific pointer type.
Definition: Instructions.h:97
Resume the propagation of an exception.
static const char *const kAsanInitName
static const char *const kAsanStackFreeNameTemplate
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
Definition: SmallSet.h:36
Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
Definition: IRBuilder.h:1609
static const uint64_t kMIPS32_ShadowOffset32
static const char *const kAsanOptionDetectUseAfterReturn
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
Definition: Constants.cpp:888
unsigned getAlignment() const
Return the alignment of the memory that is being allocated by the instruction.
Definition: Instructions.h:109
static const uint64_t kFreeBSD_ShadowOffset32
static const uint64_t kAArch64_ShadowOffset64
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
Definition: Instruction.h:259
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
Represent the analysis usage information of a pass.
static Type * getVoidTy(LLVMContext &C)
Definition: Type.cpp:154
bool isWatchOS() const
Is this an Apple watchOS triple.
Definition: Triple.h:446
uint32_t Offset
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
Definition: Instructions.h:121
This instruction compares its operands according to the predicate given to the constructor.
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE,"Assign register bank of generic virtual registers", false, false) RegBankSelect
static const unsigned End
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
Definition: IRBuilder.h:1487
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:298
Value * getOperand(unsigned i) const
Definition: User.h:145
op_range operands()
Definition: User.h:213
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition: BasicBlock.h:93
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Definition: IRBuilder.h:1466
ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, size_t Granularity, size_t MinHeaderSize)
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
std::pair< NoneType, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition: SmallSet.h:80
static const uintptr_t kRetiredStackFrameMagic
static Constant * get(StructType *T, ArrayRef< Constant * > V)
Definition: Constants.cpp:949
Value * CreateExtractElement(Value *Vec, Value *Idx, const Twine &Name="")
Definition: IRBuilder.h:1629
bool isOSBinFormatCOFF() const
Tests whether the OS uses the COFF binary format.
Definition: Triple.h:570
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
Definition: Constants.cpp:265
static const char *const kAsanGenPrefix
bool isThreadLocal() const
If the value is "Thread Local", its value isn't shared by the threads.
Definition: GlobalValue.h:232
static const char *const kAsanPtrSub
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
S_CSTRING_LITERALS - Section with literal C strings.
bool isPointerTy() const
True if this is an instance of PointerType.
Definition: Type.h:213
Comdat * getOrInsertComdat(StringRef Name)
Return the Comdat in the module with the specified name.
Definition: Module.cpp:482
static std::string itostr(int64_t X)
Definition: StringExtras.h:95
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
PointerType * getInt8PtrTy(unsigned AddrSpace=0)
Fetch the type representing a pointer to an 8-bit integer value.
Definition: IRBuilder.h:385
MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight)
Return metadata containing two branch weights.
Definition: MDBuilder.cpp:37
static const uint64_t kWindowsShadowOffset32
Value * GetUnderlyingObject(Value *V, const DataLayout &DL, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value...
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void initializeAddressSanitizerPass(PassRegistry &)
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Definition: IRBuilder.h:857
Triple - Helper class for working with autoconf configuration names.
Definition: Triple.h:44
static const char *const kAsanRegisterGlobalsName
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
Definition: SmallSet.h:64
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Definition: IRBuilder.h:1574
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
Definition: Constants.cpp:1509
std::pair< Function *, Function * > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef())
Creates sanitizer constructor function, and calls sanitizer's init function from it.
StringRef getString() const
Definition: Metadata.cpp:424
static const uint64_t kDynamicShadowSentinel
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
Definition: Type.cpp:234
const MDOperand & getOperand(unsigned I) const
Definition: Metadata.h:1034
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
This is the common base class for memset/memcpy/memmove.
const BasicBlockListType & getBasicBlockList() const
Definition: Function.h:512
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
Definition: Instructions.h:132
This is the shared class of boolean and integer constants.
Definition: Constants.h:88
void setSelectionKind(SelectionKind Val)
Definition: Comdat.h:43
static const char *const kAsanStackMallocNameTemplate
InstrTy * getInstruction() const
Definition: CallSite.h:93
static const char *const kAsanUnpoisonStackMemoryName
uint64_t getTypeAllocSize(Type *Ty) const
Returns the offset in bytes between successive objects of the specified type, including alignment pad...
Definition: DataLayout.h:408
INITIALIZE_PASS(AddressSanitizerModule,"asan-module","AddressSanitizer: detects use-after-free and out-of-bounds bugs.""ModulePass", false, false) ModulePass *llvm
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Definition: Type.cpp:330
Evaluate the size and offset of an object pointed to by a Value* statically.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
Definition: Instruction.cpp:58
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
Definition: IRBuilder.h:1425
static CallInst * Create(Value *Func, ArrayRef< Value * > Args, ArrayRef< OperandBundleDef > Bundles=None, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:843
static size_t TypeSizeToSizeIndex(uint32_t TypeSize)
bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size...
Module.h This file contains the declarations for the Module class.
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:230
Provides information about what library functions are available for the current target.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
Definition: Instruction.h:175
const DataFlowGraph & G
Definition: RDFGraph.cpp:206
INITIALIZE_PASS_BEGIN(AddressSanitizer,"asan","AddressSanitizer: detects use-after-free and out-of-bounds bugs.", false, false) INITIALIZE_PASS_END(AddressSanitizer
TerminatorInst * SplitBlockAndInsertIfThen(Value *Cond, Instruction *SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DominatorTree *DT=nullptr, LoopInfo *LI=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
static const uint64_t kDefaultShadowOffset64
bool isSwiftError() const
Return true if this value is a swifterror value.
Definition: Value.cpp:675
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
Definition: IRBuilder.h:307
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
Definition: Type.cpp:173
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
static Constant * get(Type *Ty, uint64_t V, bool isSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
Definition: Constants.cpp:558
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Definition: ModuleUtils.cpp:84
const BasicBlock & getEntryBlock() const
Definition: Function.h:519
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
static bool isPointerOperand(Value *V)
static GCRegistry::Add< ShadowStackGC > C("shadow-stack","Very portable GC for uncooperative code generators")
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
SizeOffsetType compute(Value *V)
Value * getArgOperand(unsigned i) const
getArgOperand/setArgOperand - Return/set the i-th call argument.
Value * CreateGEP(Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="")
Definition: IRBuilder.h:1141
ConstantArray - Constant Array Declarations.
Definition: Constants.h:411
bool hasInitializer() const
Definitions have initializers, declarations don't.
static const uint64_t kSystemZ_ShadowOffset64
static GlobalVariable * createPrivateGlobalForSourceLoc(Module &M, LocationMetadata MD)
Create a global describing a source location.
LinkageTypes
An enumeration for the kinds of linkage for global values.
Definition: GlobalValue.h:48
bool isConstant() const
If the value is a global constant, its value is immutable throughout the runtime execution of the pro...
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
Definition: IRBuilder.h:337
bool isInlineAsm() const
Check if this call is an inline asm statement.
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Definition: IRBuilder.h:1402
bool isOSVersionLT(unsigned Major, unsigned Minor=0, unsigned Micro=0) const
isOSVersionLT - Helper function for doing comparisons against version numbers included in the target ...
Definition: Triple.h:388
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
ThreadLocalMode getThreadLocalMode() const
Definition: GlobalValue.h:240
static StringRef getRealLinkageName(StringRef Name)
If special LLVM prefix that is used to inform the asm printer to not emit usual symbol prefix before ...
Definition: GlobalValue.h:444
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Definition: Module.cpp:384
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
NamedMDNode * getNamedMetadata(const Twine &Name) const
Return the first NamedMDNode in the module with the specified name.
Definition: Module.cpp:265
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
bool isOSBinFormatELF() const
Tests whether the OS uses the ELF binary format.
Definition: Triple.h:565
static const uint64_t kMIPS64_ShadowOffset64
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
Definition: Function.h:226
void setUnnamedAddr(UnnamedAddr Val)
Definition: GlobalValue.h:203
static const size_t npos
Definition: StringRef.h:51
static IntegerType * getInt32Ty(LLVMContext &C)
Definition: Type.cpp:169
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
#define I(x, y, z)
Definition: MD5.cpp:54
#define N
TerminatorInst * getTerminator()
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition: BasicBlock.cpp:124
LLVM_ATTRIBUTE_ALWAYS_INLINE size_type size() const
Definition: SmallVector.h:135
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:235
unsigned getPointerSizeInBits(unsigned AS=0) const
Layout pointer size, in bits FIXME: The defaults need to be removed once all of the backends/clients ...
Definition: DataLayout.h:349
CallInst * CreateCall(Value *Callee, ArrayRef< Value * > Args=None, const Twine &Name="", MDNode *FPMathTag=nullptr)
Definition: IRBuilder.h:1579
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
Definition: Type.cpp:606
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
Definition: Casting.h:287
Rename collisions when linking (static functions).
Definition: GlobalValue.h:56
void maybeMarkSanitizerLibraryCallNoBuiltin(CallInst *CI, const TargetLibraryInfo *TLI)
Given a CallInst, check if it calls a string function known to CodeGen, and mark it with NoBuiltin if...
Definition: Local.cpp:2068
static volatile int Zero
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT)
InlineAsm::get - Return the specified uniqued inline asm string.
Definition: InlineAsm.cpp:27
pgo instr use
bool isOSFreeBSD() const
Definition: Triple.h:467
static const char *const kAsanModuleDtorName
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
Definition: IRBuilder.h:987
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Definition: IRBuilder.h:1354
bool hasLocalLinkage() const
Definition: GlobalValue.h:415
static const char *const kAsanUnregisterGlobalsName
iterator_range< df_iterator< T > > depth_first(const T &G)
static const char *const kAsanUnpoisonGlobalsName
void copyAttributesFrom(const GlobalValue *Src) override
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
Definition: Globals.cpp:346
iterator_range< op_iterator > arg_operands()
Iteration adapter for range-for loops.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool bothKnown(const SizeOffsetType &SizeOffset)
const BasicBlock & front() const
Definition: Function.h:542
static const char *const kAsanPoisonGlobalsName
Module * getParent()
Get the module that this global value is contained inside of...
Definition: GlobalValue.h:537
LLVM Value Representation.
Definition: Value.h:71
void setAlignment(unsigned Align)
static const char *const kAsanVersionCheckName
static const uint64_t kPPC64_ShadowOffset64
static const unsigned kAllocaRzSize
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
const Value * getArraySize() const
Get the number of elements allocated.
Definition: Instructions.h:93
static const char *const kAsanUnregisterImageGlobalsName
std::underlying_type< E >::type Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
Definition: BitmaskEnum.h:81
static const Function * getParent(const Value *V)
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
Definition: Instruction.cpp:95
static const char *const kAsanRegisterImageGlobalsName
DLLStorageClassTypes getDLLStorageClass() const
Definition: GlobalValue.h:244
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
Definition: IRBuilder.h:951
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Definition: StringRef.h:125
#define DEBUG(X)
Definition: Debug.h:100
void addDebugInfo(DIGlobalVariableExpression *GV)
Attach a DIGlobalVariableExpression.
Definition: Metadata.cpp:1462
FunctionPass * createAddressSanitizerFunctionPass(bool CompileKernel=false, bool Recover=false, bool UseAfterScope=false)
static const char *const kAsanAllocaPoison
static const char *const kAsanHandleNoReturnName
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
Definition: Type.cpp:678
iterator_range< global_iterator > globals()
Definition: Module.h:524
IRTranslator LLVM IR MI
static const size_t kMaxStackMallocSize
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:47
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
A single uniqued string.
Definition: Metadata.h:586
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, const Twine &N="", Module *M=nullptr)
Definition: Function.h:117
Legacy analysis pass which computes a DominatorTree.
Definition: Dominators.h:217
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
iterator getFirstInsertionPt()
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
Definition: BasicBlock.cpp:209
int * Ptr
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
Definition: Instructions.h:102
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
Definition: DerivedTypes.h:479
Root of the metadata hierarchy.
Definition: Metadata.h:55
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
void setSection(StringRef S)
Change the section for this global.
Definition: Globals.cpp:173
#define OP(n)
Definition: regex2.h:70
const BasicBlock * getParent() const
Definition: Instruction.h:62
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
Definition: Globals.cpp:384
bool isOne() const
This is just a convenience method to make client code smaller for a common case.
Definition: Constants.h:206
GlobalVariable * getGlobalVariable(StringRef Name) const
Look up the specified global variable in the module symbol table.
Definition: Module.h:344
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than ""this number of memory accesses, use callbacks instead of ""inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
A wrapper class for inspecting calls to intrinsic functions.
Definition: IntrinsicInst.h:44
LLVMContext & getContext() const
Get the global data context.
Definition: Module.h:222
an instruction to allocate memory on the stack
Definition: Instructions.h:60
SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
static const char *const kAsanReportErrorTemplate
void resize(size_type N)
Definition: SmallVector.h:352