95#define DEBUG_TYPE "asan"
101 std::numeric_limits<uint64_t>::max();
142 "__asan_unregister_image_globals";
155 "__asan_stack_malloc_always_";
169 "__asan_option_detect_stack_use_after_return";
172 "__asan_shadow_memory_dynamic_address";
198 "asan-kernel",
cl::desc(
"Enable KernelAddressSanitizer instrumentation"),
203 cl::desc(
"Enable recovery mode (continue-after-error)."),
207 "asan-guard-against-version-mismatch",
213 cl::desc(
"instrument read instructions"),
217 "asan-instrument-writes",
cl::desc(
"instrument write instructions"),
226 "asan-instrument-atomics",
236 "asan-always-slow-path",
241 "asan-force-dynamic-shadow",
242 cl::desc(
"Load shadow address into a local variable for each function"),
247 cl::desc(
"Access dynamic shadow through an ifunc global on "
248 "platforms that support this"),
252 "asan-with-ifunc-suppress-remat",
253 cl::desc(
"Suppress rematerialization of dynamic shadow address by passing "
254 "it through inline asm in prologue."),
262 "asan-max-ins-per-bb",
cl::init(10000),
263 cl::desc(
"maximal number of instructions to instrument in any given BB"),
270 "asan-max-inline-poisoning-size",
272 "Inline shadow poisoning for blocks up to the given size in bytes."),
276 "asan-use-after-return",
277 cl::desc(
"Sets the mode of detection for stack-use-after-return."),
280 "Never detect stack use after return."),
283 "Detect stack use after return if "
284 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
286 "Always detect stack use after return.")),
290 cl::desc(
"Create redzones for byval "
291 "arguments (extra copy "
296 cl::desc(
"Check stack-use-after-scope"),
305 cl::desc(
"Handle C++ initializer order"),
309 "asan-detect-invalid-pointer-pair",
314 "asan-detect-invalid-pointer-cmp",
319 "asan-detect-invalid-pointer-sub",
324 "asan-realign-stack",
325 cl::desc(
"Realign stack to the value of this flag (power of two)"),
329 "asan-instrumentation-with-call-threshold",
330 cl::desc(
"If the function being instrumented contains more than "
331 "this number of memory accesses, use callbacks instead of "
332 "inline checks (-1 means never use callbacks)."),
336 "asan-memory-access-callback-prefix",
341 "asan-kernel-mem-intrinsic-prefix",
347 cl::desc(
"instrument dynamic allocas"),
351 "asan-skip-promotable-allocas",
356 "asan-constructor-kind",
357 cl::desc(
"Sets the ASan constructor kind"),
360 "Use global constructors")),
367 cl::desc(
"scale of asan shadow mapping"),
372 cl::desc(
"offset of asan shadow mapping [EXPERIMENTAL]"),
386 "asan-opt-same-temp",
cl::desc(
"Instrument the same temp just once"),
390 cl::desc(
"Don't instrument scalar globals"),
394 "asan-opt-stack",
cl::desc(
"Don't instrument scalar stack variables"),
398 "asan-stack-dynamic-alloca",
403 "asan-force-experiment",
409 cl::desc(
"Use private aliases for global variables"),
414 cl::desc(
"Use odr indicators to improve ODR reporting"),
419 cl::desc(
"Use linker features to support dead "
420 "code stripping of globals"),
427 cl::desc(
"Place ASan constructors in comdat sections"),
431 "asan-destructor-kind",
432 cl::desc(
"Sets the ASan destructor kind. The default is to use the value "
433 "provided to the pass constructor"),
436 "Use global destructors")),
456STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
457STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
459 "Number of optimized accesses to global vars");
461 "Number of optimized accesses to stack vars");
470struct ShadowMapping {
481 bool IsAndroid = TargetTriple.
isAndroid();
484 bool IsMacOS = TargetTriple.
isMacOSX();
487 bool IsPS = TargetTriple.
isPS();
493 bool IsMIPSN32ABI = TargetTriple.
isABIN32();
494 bool IsMIPS32 = TargetTriple.
isMIPS32();
495 bool IsMIPS64 = TargetTriple.
isMIPS64();
496 bool IsArmOrThumb = TargetTriple.
isARM() || TargetTriple.
isThumb();
503 bool IsAMDGPU = TargetTriple.
isAMDGPU();
505 bool IsWasm = TargetTriple.
isWasm();
507 ShadowMapping Mapping;
514 if (LongSize == 32) {
517 else if (IsMIPSN32ABI)
542 else if (IsFreeBSD && IsAArch64)
544 else if (IsFreeBSD && !IsMIPS64) {
549 }
else if (IsNetBSD) {
556 else if (IsLinux && IsX86_64) {
562 }
else if (IsWindows && IsX86_64) {
568 else if (IsMacOS && IsAArch64)
572 else if (IsLoongArch64)
579 else if (IsHaiku && IsX86_64)
599 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
600 !IsRISCV64 && !IsLoongArch64 &&
601 !(Mapping.Offset & (Mapping.Offset - 1)) &&
603 Mapping.InGlobal =
ClWithIfunc && IsAndroid && IsArmOrThumb;
610 int *MappingScale,
bool *OrShadowOffset) {
612 *ShadowBase = Mapping.Offset;
613 *MappingScale = Mapping.Scale;
614 *OrShadowOffset = Mapping.OrShadowOffset;
633 if (!
F.doesNotAccessMemory()) {
634 bool WritesMemory = !
F.onlyReadsMemory();
635 bool ReadsMemory = !
F.onlyWritesMemory();
636 if ((WritesMemory && !ReadsMemory) ||
F.onlyAccessesArgMemory()) {
637 F.removeFnAttr(Attribute::Memory);
643 if (
A.hasAttribute(Attribute::WriteOnly)) {
644 A.removeAttr(Attribute::WriteOnly);
652 F.addFnAttr(Attribute::NoBuiltin);
673 return std::max(32U, 1U << MappingScale);
691class RuntimeCallInserter {
693 bool TrackInsertedCalls =
false;
697 RuntimeCallInserter(Function &Fn) : OwnerFn(&Fn) {
699 auto Personality = classifyEHPersonality(Fn.getPersonalityFn());
700 if (isScopedEHPersonality(Personality))
701 TrackInsertedCalls = true;
705 ~RuntimeCallInserter() {
706 if (InsertedCalls.
empty())
708 assert(TrackInsertedCalls &&
"Calls were wrongly tracked");
710 DenseMap<BasicBlock *, ColorVector> BlockColors =
colorEHFunclets(*OwnerFn);
711 for (CallInst *CI : InsertedCalls) {
713 assert(BB &&
"Instruction doesn't belong to a BasicBlock");
715 "Instruction doesn't belong to the expected Function!");
723 if (Colors.
size() != 1) {
725 "Instruction's BasicBlock is not monochromatic");
732 if (EHPadIt != Color->end() && EHPadIt->isEHPad()) {
736 OB, CI->getIterator());
737 NewCall->copyMetadata(*CI);
738 CI->replaceAllUsesWith(NewCall);
739 CI->eraseFromParent();
744 CallInst *createRuntimeCall(
IRBuilder<> &IRB, FunctionCallee Callee,
746 const Twine &
Name =
"") {
749 CallInst *Inst = IRB.
CreateCall(Callee, Args, Name,
nullptr);
750 if (TrackInsertedCalls)
751 InsertedCalls.push_back(Inst);
757struct AddressSanitizer {
758 AddressSanitizer(
Module &M,
const StackSafetyGlobalInfo *SSGI,
759 int InstrumentationWithCallsThreshold,
760 uint32_t MaxInlinePoisoningSize,
bool CompileKernel =
false,
761 bool Recover =
false,
bool UseAfterScope =
false,
763 AsanDetectStackUseAfterReturnMode::Runtime)
772 InstrumentationWithCallsThreshold(
775 : InstrumentationWithCallsThreshold),
778 : MaxInlinePoisoningSize) {
779 C = &(
M.getContext());
780 DL = &
M.getDataLayout();
781 LongSize =
M.getDataLayout().getPointerSizeInBits();
782 IntptrTy = Type::getIntNTy(*
C, LongSize);
783 PtrTy = PointerType::getUnqual(*
C);
785 TargetTriple =
M.getTargetTriple();
789 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
797 bool isInterestingAlloca(
const AllocaInst &AI);
799 bool ignoreAccess(Instruction *Inst,
Value *
Ptr);
801 Instruction *
I, SmallVectorImpl<InterestingMemoryOperand> &Interesting,
802 const TargetTransformInfo *
TTI);
804 void instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis,
805 InterestingMemoryOperand &O,
bool UseCalls,
806 const DataLayout &
DL, RuntimeCallInserter &RTCI);
807 void instrumentPointerComparisonOrSubtraction(Instruction *
I,
808 RuntimeCallInserter &RTCI);
810 Value *Addr, MaybeAlign Alignment,
811 uint32_t TypeStoreSize,
bool IsWrite,
812 Value *SizeArgument,
bool UseCalls, uint32_t Exp,
813 RuntimeCallInserter &RTCI);
814 Instruction *instrumentAMDGPUAddress(Instruction *OrigIns,
815 Instruction *InsertBefore,
Value *Addr,
816 uint32_t TypeStoreSize,
bool IsWrite,
817 Value *SizeArgument);
820 void instrumentUnusualSizeOrAlignment(Instruction *
I,
821 Instruction *InsertBefore,
Value *Addr,
822 TypeSize TypeStoreSize,
bool IsWrite,
823 Value *SizeArgument,
bool UseCalls,
825 RuntimeCallInserter &RTCI);
826 void instrumentMaskedLoadOrStore(AddressSanitizer *
Pass,
const DataLayout &
DL,
829 MaybeAlign Alignment,
unsigned Granularity,
830 Type *OpType,
bool IsWrite,
831 Value *SizeArgument,
bool UseCalls,
832 uint32_t Exp, RuntimeCallInserter &RTCI);
834 Value *ShadowValue, uint32_t TypeStoreSize);
836 bool IsWrite,
size_t AccessSizeIndex,
837 Value *SizeArgument, uint32_t Exp,
838 RuntimeCallInserter &RTCI);
839 void instrumentMemIntrinsic(MemIntrinsic *
MI, RuntimeCallInserter &RTCI);
841 bool suppressInstrumentationSiteForDebug(
int &Instrumented);
842 bool instrumentFunction(Function &
F,
const TargetLibraryInfo *TLI,
843 const TargetTransformInfo *
TTI);
844 bool maybeInsertAsanInitAtFunctionEntry(Function &
F);
845 bool maybeInsertDynamicShadowAtFunctionEntry(Function &
F);
846 void markEscapedLocalAllocas(Function &
F);
847 void markCatchParametersAsUninteresting(Function &
F);
850 friend struct FunctionStackPoisoner;
852 void initializeCallbacks(
const TargetLibraryInfo *TLI);
854 bool LooksLikeCodeInBug11395(Instruction *
I);
855 bool GlobalIsLinkerInitialized(GlobalVariable *
G);
856 bool isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis,
Value *Addr,
857 TypeSize TypeStoreSize)
const;
860 struct FunctionStateRAII {
861 AddressSanitizer *
Pass;
863 FunctionStateRAII(AddressSanitizer *
Pass) :
Pass(
Pass) {
865 "last pass forgot to clear cache");
869 ~FunctionStateRAII() {
870 Pass->LocalDynamicShadow =
nullptr;
871 Pass->ProcessedAllocas.clear();
877 const DataLayout *
DL;
887 ShadowMapping Mapping;
888 FunctionCallee AsanHandleNoReturnFunc;
889 FunctionCallee AsanPtrCmpFunction, AsanPtrSubFunction;
897 FunctionCallee AsanErrorCallbackSized[2][2];
898 FunctionCallee AsanMemoryAccessCallbackSized[2][2];
900 FunctionCallee AsanMemmove, AsanMemcpy, AsanMemset;
901 Value *LocalDynamicShadow =
nullptr;
902 const StackSafetyGlobalInfo *SSGI;
903 DenseMap<const AllocaInst *, bool> ProcessedAllocas;
905 FunctionCallee AMDGPUAddressShared;
906 FunctionCallee AMDGPUAddressPrivate;
907 int InstrumentationWithCallsThreshold;
908 uint32_t MaxInlinePoisoningSize;
911class ModuleAddressSanitizer {
913 ModuleAddressSanitizer(
Module &M,
bool InsertVersionCheck,
914 bool CompileKernel =
false,
bool Recover =
false,
915 bool UseGlobalsGC =
true,
bool UseOdrIndicator =
true,
923 : InsertVersionCheck),
925 UseGlobalsGC(UseGlobalsGC &&
ClUseGlobalsGC && !this->CompileKernel),
940 UseCtorComdat(UseGlobalsGC &&
ClWithComdat && !this->CompileKernel),
941 DestructorKind(DestructorKind),
945 C = &(
M.getContext());
946 int LongSize =
M.getDataLayout().getPointerSizeInBits();
947 IntptrTy = Type::getIntNTy(*
C, LongSize);
948 PtrTy = PointerType::getUnqual(*
C);
949 TargetTriple =
M.getTargetTriple();
954 assert(this->DestructorKind != AsanDtorKind::Invalid);
957 bool instrumentModule();
960 void initializeCallbacks();
962 void instrumentGlobals(
IRBuilder<> &IRB,
bool *CtorComdat);
969 const std::string &UniqueModuleId);
974 InstrumentGlobalsWithMetadataArray(
IRBuilder<> &IRB,
978 GlobalVariable *CreateMetadataGlobal(Constant *Initializer,
979 StringRef OriginalName);
980 void SetComdatForGlobalMetadata(GlobalVariable *
G, GlobalVariable *
Metadata,
981 StringRef InternalSuffix);
984 const GlobalVariable *getExcludedAliasedGlobal(
const GlobalAlias &GA)
const;
985 bool shouldInstrumentGlobal(GlobalVariable *
G)
const;
986 bool ShouldUseMachOGlobalsSection()
const;
987 StringRef getGlobalMetadataSection()
const;
988 void poisonOneInitializer(Function &GlobalInit);
989 void createInitializerPoisonCalls();
990 uint64_t getMinRedzoneSizeForGlobal()
const {
994 int GetAsanVersion()
const;
995 GlobalVariable *getOrCreateModuleName();
999 bool InsertVersionCheck;
1002 bool UsePrivateAlias;
1003 bool UseOdrIndicator;
1010 Triple TargetTriple;
1011 ShadowMapping Mapping;
1012 FunctionCallee AsanPoisonGlobals;
1013 FunctionCallee AsanUnpoisonGlobals;
1014 FunctionCallee AsanRegisterGlobals;
1015 FunctionCallee AsanUnregisterGlobals;
1016 FunctionCallee AsanRegisterImageGlobals;
1017 FunctionCallee AsanUnregisterImageGlobals;
1018 FunctionCallee AsanRegisterElfGlobals;
1019 FunctionCallee AsanUnregisterElfGlobals;
1021 Function *AsanCtorFunction =
nullptr;
1022 Function *AsanDtorFunction =
nullptr;
1023 GlobalVariable *ModuleName =
nullptr;
1035struct FunctionStackPoisoner :
public InstVisitor<FunctionStackPoisoner> {
1037 AddressSanitizer &ASan;
1038 RuntimeCallInserter &RTCI;
1043 ShadowMapping Mapping;
1047 SmallVector<Instruction *, 8> RetVec;
1051 FunctionCallee AsanSetShadowFunc[0x100] = {};
1052 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
1053 FunctionCallee AsanAllocaPoisonFunc, AsanAllocasUnpoisonFunc;
1056 struct AllocaPoisonCall {
1057 IntrinsicInst *InsBefore;
1067 AllocaInst *DynamicAllocaLayout =
nullptr;
1068 IntrinsicInst *LocalEscapeCall =
nullptr;
1070 bool HasInlineAsm =
false;
1071 bool HasReturnsTwiceCall =
false;
1074 FunctionStackPoisoner(Function &
F, AddressSanitizer &ASan,
1075 RuntimeCallInserter &RTCI)
1076 :
F(
F), ASan(ASan), RTCI(RTCI),
1078 IntptrTy(ASan.IntptrTy),
1080 Mapping(ASan.Mapping),
1088 copyArgsPassedByValToAllocas();
1093 if (AllocaVec.
empty() && DynamicAllocaVec.
empty())
return false;
1095 initializeCallbacks(*
F.getParent());
1097 processDynamicAllocas();
1098 processStaticAllocas();
1109 void copyArgsPassedByValToAllocas();
1114 void processStaticAllocas();
1115 void processDynamicAllocas();
1117 void createDynamicAllocasInitStorage();
1122 void visitReturnInst(ReturnInst &RI) {
1123 if (CallInst *CI = RI.
getParent()->getTerminatingMustTailCall())
1130 void visitResumeInst(ResumeInst &RI) { RetVec.
push_back(&RI); }
1133 void visitCleanupReturnInst(CleanupReturnInst &CRI) { RetVec.
push_back(&CRI); }
1135 void unpoisonDynamicAllocasBeforeInst(Instruction *InstBefore,
1136 Value *SavedStack) {
1145 Intrinsic::get_dynamic_area_offset, {IntptrTy}, {});
1151 RTCI.createRuntimeCall(
1152 IRB, AsanAllocasUnpoisonFunc,
1153 {IRB.
CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1157 void unpoisonDynamicAllocas() {
1158 for (Instruction *Ret : RetVec)
1159 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1161 for (Instruction *StackRestoreInst : StackRestoreVec)
1162 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1163 StackRestoreInst->getOperand(0));
1176 void handleDynamicAllocaCall(AllocaInst *AI);
1179 void visitAllocaInst(AllocaInst &AI) {
1184 (STy && STy->containsHomogeneousScalableVectorTypes())) {
1188 if (AllocaVec.
empty())
1204 void visitIntrinsicInst(IntrinsicInst &
II) {
1206 if (
ID == Intrinsic::stackrestore) StackRestoreVec.
push_back(&
II);
1207 if (
ID == Intrinsic::localescape) LocalEscapeCall = &
II;
1208 if (!ASan.UseAfterScope)
1210 if (!
II.isLifetimeStartOrEnd())
1215 if (!AI || !ASan.isInterestingAlloca(*AI))
1225 bool DoPoison = (
ID == Intrinsic::lifetime_end);
1226 AllocaPoisonCall APC = {&
II, AI, *
Size, DoPoison};
1228 StaticAllocaPoisonCallVec.
push_back(APC);
1230 DynamicAllocaPoisonCallVec.
push_back(APC);
1233 void visitCallBase(CallBase &CB) {
1235 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1236 HasReturnsTwiceCall |= CI->canReturnTwice();
1241 void initializeCallbacks(
Module &M);
1246 void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
1248 void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
1251 void copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
1252 ArrayRef<uint8_t> ShadowBytes,
size_t Begin,
1257 Value *createAllocaForLayout(
IRBuilder<> &IRB,
const ASanStackFrameLayout &L,
1260 Instruction *ThenTerm,
Value *ValueIfFalse);
1268 OS, MapClassName2PassName);
1270 if (Options.CompileKernel)
1272 if (Options.UseAfterScope)
1273 OS <<
"use-after-scope";
1281 : Options(Options), UseGlobalGC(UseGlobalGC),
1282 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1283 ConstructorKind(ConstructorKind) {}
1292 ModuleAddressSanitizer ModuleSanitizer(
1293 M, Options.InsertVersionCheck, Options.CompileKernel, Options.Recover,
1294 UseGlobalGC, UseOdrIndicator, DestructorKind, ConstructorKind);
1306 if (
F.getName().starts_with(
"__asan_"))
1308 if (
F.isPresplitCoroutine())
1310 AddressSanitizer FunctionSanitizer(
1311 M, SSGI, Options.InstrumentationWithCallsThreshold,
1312 Options.MaxInlinePoisoningSize, Options.CompileKernel, Options.Recover,
1313 Options.UseAfterScope, Options.UseAfterReturn);
1316 Modified |= FunctionSanitizer.instrumentFunction(
F, &TLI, &
TTI);
1318 Modified |= ModuleSanitizer.instrumentModule();
1339 if (
G->getName().starts_with(
"llvm.") ||
1341 G->getName().starts_with(
"__llvm_gcov_ctr") ||
1343 G->getName().starts_with(
"__llvm_rtti_proxy"))
1358 if (AddrSpace == 3 || AddrSpace == 5)
1365 Shadow = IRB.
CreateLShr(Shadow, Mapping.Scale);
1366 if (Mapping.Offset == 0)
return Shadow;
1369 if (LocalDynamicShadow)
1370 ShadowBase = LocalDynamicShadow;
1372 ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
1373 if (Mapping.OrShadowOffset)
1374 return IRB.
CreateOr(Shadow, ShadowBase);
1376 return IRB.
CreateAdd(Shadow, ShadowBase);
1381 RuntimeCallInserter &RTCI) {
1384 RTCI.createRuntimeCall(
1390 RTCI.createRuntimeCall(
1396 MI->eraseFromParent();
1400bool AddressSanitizer::isInterestingAlloca(
const AllocaInst &AI) {
1401 auto [It,
Inserted] = ProcessedAllocas.try_emplace(&AI);
1404 return It->getSecond();
1406 bool IsInteresting =
1419 !(SSGI && SSGI->
isSafe(AI)));
1421 It->second = IsInteresting;
1422 return IsInteresting;
1436 if (
Ptr->isSwiftError())
1453void AddressSanitizer::getInterestingMemoryOperands(
1457 if (LocalDynamicShadow ==
I)
1463 Interesting.
emplace_back(
I, LI->getPointerOperandIndex(),
false,
1464 LI->getType(), LI->getAlign());
1469 SI->getValueOperand()->getType(),
SI->getAlign());
1473 Interesting.
emplace_back(
I, RMW->getPointerOperandIndex(),
true,
1474 RMW->getValOperand()->getType(), std::nullopt);
1478 Interesting.
emplace_back(
I, XCHG->getPointerOperandIndex(),
true,
1479 XCHG->getCompareOperand()->getType(),
1482 switch (CI->getIntrinsicID()) {
1483 case Intrinsic::masked_load:
1484 case Intrinsic::masked_store:
1485 case Intrinsic::masked_gather:
1486 case Intrinsic::masked_scatter: {
1487 bool IsWrite = CI->getType()->isVoidTy();
1489 unsigned OpOffset = IsWrite ? 1 : 0;
1493 auto BasePtr = CI->getOperand(OpOffset);
1494 if (ignoreAccess(
I, BasePtr))
1496 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1498 Value *
Mask = CI->getOperand(1 + OpOffset);
1499 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, Mask);
1502 case Intrinsic::masked_expandload:
1503 case Intrinsic::masked_compressstore: {
1504 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_compressstore;
1505 unsigned OpOffset = IsWrite ? 1 : 0;
1508 auto BasePtr = CI->getOperand(OpOffset);
1509 if (ignoreAccess(
I, BasePtr))
1512 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1515 Value *
Mask = CI->getOperand(1 + OpOffset);
1518 Value *ExtMask =
IB.CreateZExt(Mask, ExtTy);
1519 Value *EVL =
IB.CreateAddReduce(ExtMask);
1520 Value *TrueMask = ConstantInt::get(
Mask->getType(), 1);
1521 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, TrueMask,
1525 case Intrinsic::vp_load:
1526 case Intrinsic::vp_store:
1527 case Intrinsic::experimental_vp_strided_load:
1528 case Intrinsic::experimental_vp_strided_store: {
1530 unsigned IID = CI->getIntrinsicID();
1531 bool IsWrite = CI->getType()->isVoidTy();
1534 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1535 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1536 MaybeAlign Alignment = VPI->getOperand(PtrOpNo)->getPointerAlignment(*
DL);
1537 Value *Stride =
nullptr;
1538 if (IID == Intrinsic::experimental_vp_strided_store ||
1539 IID == Intrinsic::experimental_vp_strided_load) {
1540 Stride = VPI->getOperand(PtrOpNo + 1);
1547 Alignment =
Align(1);
1549 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1550 VPI->getMaskParam(), VPI->getVectorLengthParam(),
1554 case Intrinsic::vp_gather:
1555 case Intrinsic::vp_scatter: {
1557 unsigned IID = CI->getIntrinsicID();
1558 bool IsWrite = IID == Intrinsic::vp_scatter;
1561 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1562 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1563 MaybeAlign Alignment = VPI->getPointerAlignment();
1564 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1565 VPI->getMaskParam(),
1566 VPI->getVectorLengthParam());
1572 if (
TTI->getTgtMemIntrinsic(
II, IntrInfo))
1576 for (
unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1578 ignoreAccess(
I, CI->getArgOperand(ArgNo)))
1580 Type *Ty = CI->getParamByValType(ArgNo);
1596 if (!Cmp->isRelational())
1610 if (BO->getOpcode() != Instruction::Sub)
1623 if (!
G->hasInitializer())
1626 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().IsDynInit)
1632void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1636 Value *
Param[2] = {
I->getOperand(0),
I->getOperand(1)};
1637 for (
Value *&i : Param) {
1638 if (i->getType()->isPointerTy())
1641 RTCI.createRuntimeCall(IRB,
F, Param);
1647 TypeSize TypeStoreSize,
bool IsWrite,
1648 Value *SizeArgument,
bool UseCalls,
1649 uint32_t Exp, RuntimeCallInserter &RTCI) {
1654 switch (FixedSize) {
1660 if (!Alignment || *Alignment >= Granularity ||
1661 *Alignment >= FixedSize / 8)
1662 return Pass->instrumentAddress(
I, InsertBefore, Addr, Alignment,
1663 FixedSize, IsWrite,
nullptr, UseCalls,
1667 Pass->instrumentUnusualSizeOrAlignment(
I, InsertBefore, Addr, TypeStoreSize,
1668 IsWrite,
nullptr, UseCalls, Exp, RTCI);
1671void AddressSanitizer::instrumentMaskedLoadOrStore(
1674 MaybeAlign Alignment,
unsigned Granularity,
Type *OpType,
bool IsWrite,
1675 Value *SizeArgument,
bool UseCalls, uint32_t Exp,
1676 RuntimeCallInserter &RTCI) {
1678 TypeSize ElemTypeSize =
DL.getTypeStoreSizeInBits(VTy->getScalarType());
1679 auto Zero = ConstantInt::get(IntptrTy, 0);
1687 Value *IsEVLZero =
IB.CreateICmpNE(EVL, ConstantInt::get(EVLType, 0));
1689 IB.SetInsertPoint(LoopInsertBefore);
1691 EVL =
IB.CreateZExtOrTrunc(EVL, IntptrTy);
1694 Value *
EC =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1695 EVL =
IB.CreateBinaryIntrinsic(Intrinsic::umin, EVL, EC);
1697 EVL =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1702 Stride =
IB.CreateZExtOrTrunc(Stride, IntptrTy);
1706 Value *MaskElem = IRB.CreateExtractElement(Mask, Index);
1707 if (auto *MaskElemC = dyn_cast<ConstantInt>(MaskElem)) {
1708 if (MaskElemC->isZero())
1714 Instruction *ThenTerm = SplitBlockAndInsertIfThen(
1715 MaskElem, &*IRB.GetInsertPoint(), false);
1716 IRB.SetInsertPoint(ThenTerm);
1719 Value *InstrumentedAddress;
1722 cast<VectorType>(Addr->getType())->getElementType()->isPointerTy() &&
1723 "Expected vector of pointer.");
1724 InstrumentedAddress = IRB.CreateExtractElement(Addr, Index);
1725 }
else if (Stride) {
1732 Alignment, Granularity, ElemTypeSize, IsWrite,
1733 SizeArgument, UseCalls, Exp, RTCI);
1740 RuntimeCallInserter &RTCI) {
1741 Value *Addr =
O.getPtr();
1761 isSafeAccess(ObjSizeVis, Addr,
O.TypeStoreSize)) {
1762 NumOptimizedAccessesToGlobalVar++;
1770 isSafeAccess(ObjSizeVis, Addr,
O.TypeStoreSize)) {
1771 NumOptimizedAccessesToStackVar++;
1777 NumInstrumentedWrites++;
1779 NumInstrumentedReads++;
1781 if (
O.MaybeByteOffset) {
1786 if (TargetTriple.isRISCV()) {
1791 static_cast<unsigned>(LongSize)) {
1800 unsigned Granularity = 1 << Mapping.Scale;
1802 instrumentMaskedLoadOrStore(
this,
DL, IntptrTy,
O.MaybeMask,
O.MaybeEVL,
1803 O.MaybeStride,
O.getInsn(), Addr,
O.Alignment,
1804 Granularity,
O.OpType,
O.IsWrite,
nullptr,
1805 UseCalls, Exp, RTCI);
1808 Granularity,
O.TypeStoreSize,
O.IsWrite,
nullptr,
1809 UseCalls, Exp, RTCI);
1814 Value *Addr,
bool IsWrite,
1815 size_t AccessSizeIndex,
1816 Value *SizeArgument,
1818 RuntimeCallInserter &RTCI) {
1824 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][0],
1825 {Addr, SizeArgument});
1827 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][1],
1828 {Addr, SizeArgument, ExpVal});
1831 Call = RTCI.createRuntimeCall(
1832 IRB, AsanErrorCallback[IsWrite][0][AccessSizeIndex], Addr);
1834 Call = RTCI.createRuntimeCall(
1835 IRB, AsanErrorCallback[IsWrite][1][AccessSizeIndex], {Addr, ExpVal});
1844 uint32_t TypeStoreSize) {
1845 size_t Granularity =
static_cast<size_t>(1) << Mapping.Scale;
1847 Value *LastAccessedByte =
1848 IRB.
CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
1850 if (TypeStoreSize / 8 > 1)
1852 LastAccessedByte, ConstantInt::get(IntptrTy, TypeStoreSize / 8 - 1));
1855 IRB.
CreateIntCast(LastAccessedByte, ShadowValue->getType(),
false);
1860Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1862 uint32_t TypeStoreSize,
bool IsWrite,
Value *SizeArgument) {
1869 return InsertBefore;
1874 Value *IsSharedOrPrivate = IRB.
CreateOr(IsShared, IsPrivate);
1876 Value *AddrSpaceZeroLanding =
1879 return InsertBefore;
1895 Trm->getParent()->setName(
"asan.report");
1906void AddressSanitizer::instrumentAddress(
Instruction *OrigIns,
1909 uint32_t TypeStoreSize,
bool IsWrite,
1910 Value *SizeArgument,
bool UseCalls,
1912 RuntimeCallInserter &RTCI) {
1913 if (TargetTriple.isAMDGPU()) {
1914 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore, Addr,
1915 TypeStoreSize, IsWrite, SizeArgument);
1924 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1927 ConstantInt::get(
Int32Ty, AccessInfo.Packed)});
1934 RTCI.createRuntimeCall(
1935 IRB, AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex], AddrLong);
1937 RTCI.createRuntimeCall(
1938 IRB, AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1939 {AddrLong, ConstantInt::get(IRB.
getInt32Ty(), Exp)});
1946 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1947 const uint64_t ShadowAlign =
1948 std::max<uint64_t>(Alignment.
valueOrOne().
value() >> Mapping.Scale, 1);
1953 size_t Granularity = 1ULL << Mapping.Scale;
1956 bool GenSlowPath = (
ClAlwaysSlowPath || (TypeStoreSize < 8 * Granularity));
1958 if (TargetTriple.isAMDGCN()) {
1960 auto *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1963 CrashTerm = genAMDGPUReportBlock(IRB, Cmp, Recover);
1964 }
else if (GenSlowPath) {
1972 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1987 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument, Exp, RTCI);
1996void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1998 TypeSize TypeStoreSize,
bool IsWrite,
Value *SizeArgument,
bool UseCalls,
1999 uint32_t Exp, RuntimeCallInserter &RTCI) {
2007 RTCI.createRuntimeCall(IRB, AsanMemoryAccessCallbackSized[IsWrite][0],
2010 RTCI.createRuntimeCall(
2011 IRB, AsanMemoryAccessCallbackSized[IsWrite][1],
2025void ModuleAddressSanitizer::poisonOneInitializer(
Function &GlobalInit) {
2031 Value *ModuleNameAddr =
2033 IRB.
CreateCall(AsanPoisonGlobals, ModuleNameAddr);
2036 for (
auto &BB : GlobalInit)
2041void ModuleAddressSanitizer::createInitializerPoisonCalls() {
2061 poisonOneInitializer(*
F);
2067ModuleAddressSanitizer::getExcludedAliasedGlobal(
const GlobalAlias &GA)
const {
2072 assert(CompileKernel &&
"Only expecting to be called when compiling kernel");
2084bool ModuleAddressSanitizer::shouldInstrumentGlobal(
GlobalVariable *
G)
const {
2085 Type *Ty =
G->getValueType();
2088 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().NoAddress)
2090 if (!Ty->
isSized())
return false;
2091 if (!
G->hasInitializer())
return false;
2093 if (
G->getAddressSpace() &&
2100 if (
G->isThreadLocal())
return false;
2102 if (
G->getAlign() && *
G->getAlign() > getMinRedzoneSizeForGlobal())
return false;
2108 if (!TargetTriple.isOSBinFormatCOFF()) {
2109 if (!
G->hasExactDefinition() ||
G->hasComdat())
2113 if (
G->isInterposable())
2117 if (
G->hasAvailableExternallyLinkage())
2124 switch (
C->getSelectionKind()) {
2135 if (
G->hasSection()) {
2145 if (Section ==
"llvm.metadata")
return false;
2152 if (
Section.starts_with(
".preinit_array") ||
2153 Section.starts_with(
".init_array") ||
2154 Section.starts_with(
".fini_array")) {
2160 if (TargetTriple.isOSBinFormatELF()) {
2174 if (TargetTriple.isOSBinFormatCOFF() &&
Section.contains(
'$')) {
2175 LLVM_DEBUG(
dbgs() <<
"Ignoring global in sorted section (contains '$'): "
2180 if (TargetTriple.isOSBinFormatMachO()) {
2182 unsigned TAA = 0, StubSize = 0;
2185 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
2190 if (ParsedSegment ==
"__OBJC" ||
2191 (ParsedSegment ==
"__DATA" && ParsedSection.
starts_with(
"__objc_"))) {
2203 if (ParsedSegment ==
"__DATA" && ParsedSection ==
"__cfstring") {
2216 if (CompileKernel) {
2219 if (
G->getName().starts_with(
"__"))
2229bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection()
const {
2230 if (!TargetTriple.isOSBinFormatMachO())
2233 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
2235 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
2237 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
2239 if (TargetTriple.isDriverKit())
2241 if (TargetTriple.isXROS())
2247StringRef ModuleAddressSanitizer::getGlobalMetadataSection()
const {
2248 switch (TargetTriple.getObjectFormat()) {
2258 "ModuleAddressSanitizer not implemented for object file format");
2265void ModuleAddressSanitizer::initializeCallbacks() {
2271 AsanUnpoisonGlobals =
2275 AsanRegisterGlobals =
M.getOrInsertFunction(
2277 AsanUnregisterGlobals =
M.getOrInsertFunction(
2282 AsanRegisterImageGlobals =
M.getOrInsertFunction(
2284 AsanUnregisterImageGlobals =
M.getOrInsertFunction(
2287 AsanRegisterElfGlobals =
2289 IntptrTy, IntptrTy, IntptrTy);
2290 AsanUnregisterElfGlobals =
2292 IntptrTy, IntptrTy, IntptrTy);
2297void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
2302 if (!
G->hasName()) {
2306 G->setName(
genName(
"anon_global"));
2309 if (!InternalSuffix.
empty() &&
G->hasLocalLinkage()) {
2310 std::string
Name = std::string(
G->getName());
2311 Name += InternalSuffix;
2312 C =
M.getOrInsertComdat(Name);
2314 C =
M.getOrInsertComdat(
G->getName());
2320 if (TargetTriple.isOSBinFormatCOFF()) {
2322 if (
G->hasPrivateLinkage())
2335ModuleAddressSanitizer::CreateMetadataGlobal(
Constant *Initializer,
2337 auto Linkage = TargetTriple.isOSBinFormatMachO()
2343 Metadata->setSection(getGlobalMetadataSection());
2350Instruction *ModuleAddressSanitizer::CreateAsanModuleDtor() {
2354 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2362void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2366 auto &
DL =
M.getDataLayout();
2369 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2370 Constant *Initializer = MetadataInitializers[i];
2374 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2380 unsigned SizeOfGlobalStruct =
DL.getTypeAllocSize(Initializer->
getType());
2382 "global metadata will not be padded appropriately");
2385 SetComdatForGlobalMetadata(
G,
Metadata,
"");
2390 if (!MetadataGlobals.empty())
2394void ModuleAddressSanitizer::instrumentGlobalsELF(
2397 const std::string &UniqueModuleId) {
2404 bool UseComdatForGlobalsGC = UseOdrIndicator && !UniqueModuleId.empty();
2407 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2410 CreateMetadataGlobal(MetadataInitializers[i],
G->getName());
2412 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2415 if (UseComdatForGlobalsGC)
2416 SetComdatForGlobalMetadata(
G,
Metadata, UniqueModuleId);
2421 if (!MetadataGlobals.empty())
2438 "__start_" + getGlobalMetadataSection());
2442 "__stop_" + getGlobalMetadataSection());
2456 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2463void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2474 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2475 Constant *Initializer = MetadataInitializers[i];
2481 auto LivenessBinder =
2486 Twine(
"__asan_binder_") +
G->getName());
2487 Liveness->
setSection(
"__DATA,__asan_liveness,regular,live_support");
2488 LivenessGlobals[i] = Liveness;
2495 if (!LivenessGlobals.empty())
2517 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2522void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2526 unsigned N = ExtendedGlobals.
size();
2536 if (Mapping.Scale > 3)
2537 AllGlobals->setAlignment(
Align(1ULL << Mapping.Scale));
2542 ConstantInt::get(IntptrTy,
N)});
2548 IrbDtor.CreateCall(AsanUnregisterGlobals,
2550 ConstantInt::get(IntptrTy,
N)});
2559void ModuleAddressSanitizer::instrumentGlobals(
IRBuilder<> &IRB,
2564 if (CompileKernel) {
2565 for (
auto &GA :
M.aliases()) {
2567 AliasedGlobalExclusions.
insert(GV);
2572 for (
auto &
G :
M.globals()) {
2573 if (!AliasedGlobalExclusions.
count(&
G) && shouldInstrumentGlobal(&
G))
2577 size_t n = GlobalsToChange.
size();
2578 auto &
DL =
M.getDataLayout();
2592 IntptrTy, IntptrTy, IntptrTy);
2596 for (
size_t i = 0; i < n; i++) {
2600 if (
G->hasSanitizerMetadata())
2601 MD =
G->getSanitizerMetadata();
2606 std::string NameForGlobal =
G->getName().str();
2611 Type *Ty =
G->getValueType();
2612 const uint64_t SizeInBytes =
DL.getTypeAllocSize(Ty);
2625 M, NewTy,
G->isConstant(),
Linkage, NewInitializer,
"",
G,
2626 G->getThreadLocalMode(),
G->getAddressSpace());
2636 if (TargetTriple.isOSBinFormatMachO() && !
G->hasSection() &&
2639 if (Seq && Seq->isCString())
2640 NewGlobal->
setSection(
"__TEXT,__asan_cstring,regular");
2654 G->eraseFromParent();
2655 NewGlobals[i] = NewGlobal;
2660 bool CanUsePrivateAliases =
2661 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2662 TargetTriple.isOSBinFormatWasm();
2663 if (CanUsePrivateAliases && UsePrivateAlias) {
2666 InstrumentedGlobal =
2672 ODRIndicator = ConstantInt::get(IntptrTy, -1);
2673 }
else if (UseOdrIndicator) {
2676 auto *ODRIndicatorSym =
2685 ODRIndicatorSym->setAlignment(
Align(1));
2692 ConstantInt::get(IntptrTy, SizeInBytes),
2693 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
2696 ConstantInt::get(IntptrTy, MD.
IsDynInit),
2701 Initializers[i] = Initializer;
2707 for (
size_t i = 0; i < n; i++) {
2709 if (
G->getName().empty())
continue;
2714 if (UseGlobalsGC && TargetTriple.isOSBinFormatELF()) {
2721 }
else if (n == 0) {
2724 *CtorComdat = TargetTriple.isOSBinFormatELF();
2726 *CtorComdat =
false;
2727 if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2728 InstrumentGlobalsCOFF(IRB, NewGlobals, Initializers);
2729 }
else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2730 InstrumentGlobalsMachO(IRB, NewGlobals, Initializers);
2732 InstrumentGlobalsWithMetadataArray(IRB, NewGlobals, Initializers);
2738 createInitializerPoisonCalls();
2744ModuleAddressSanitizer::getRedzoneSizeForGlobal(uint64_t SizeInBytes)
const {
2745 constexpr uint64_t kMaxRZ = 1 << 18;
2746 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2749 if (SizeInBytes <= MinRZ / 2) {
2753 RZ = MinRZ - SizeInBytes;
2756 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2759 if (SizeInBytes % MinRZ)
2760 RZ += MinRZ - (SizeInBytes % MinRZ);
2763 assert((RZ + SizeInBytes) % MinRZ == 0);
2768int ModuleAddressSanitizer::GetAsanVersion()
const {
2769 int LongSize =
M.getDataLayout().getPointerSizeInBits();
2770 bool isAndroid =
M.getTargetTriple().isAndroid();
2774 Version += (LongSize == 32 && isAndroid);
2789bool ModuleAddressSanitizer::instrumentModule() {
2790 initializeCallbacks();
2798 if (CompileKernel) {
2803 std::string AsanVersion = std::to_string(GetAsanVersion());
2804 std::string VersionCheckName =
2806 std::tie(AsanCtorFunction, std::ignore) =
2809 {}, VersionCheckName);
2813 bool CtorComdat =
true;
2816 if (AsanCtorFunction) {
2817 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2818 instrumentGlobals(IRB, &CtorComdat);
2821 instrumentGlobals(IRB, &CtorComdat);
2830 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2831 if (AsanCtorFunction) {
2835 if (AsanDtorFunction) {
2840 if (AsanCtorFunction)
2842 if (AsanDtorFunction)
2853 for (
int Exp = 0;
Exp < 2;
Exp++) {
2854 for (
size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2855 const std::string TypeStr = AccessIsWrite ?
"store" :
"load";
2856 const std::string ExpStr =
Exp ?
"exp_" :
"";
2857 const std::string EndingStr = Recover ?
"_noabort" :
"";
2867 if (
auto AK = TLI->getExtAttrForI32Param(
false)) {
2868 AL2 = AL2.addParamAttribute(*
C, 2, AK);
2869 AL1 = AL1.addParamAttribute(*
C, 1, AK);
2872 AsanErrorCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2876 AsanMemoryAccessCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2881 AccessSizeIndex++) {
2882 const std::string Suffix = TypeStr +
itostr(1ULL << AccessSizeIndex);
2883 AsanErrorCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2884 M.getOrInsertFunction(
2888 AsanMemoryAccessCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2889 M.getOrInsertFunction(
2896 const std::string MemIntrinCallbackPrefix =
2900 AsanMemmove =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memmove",
2901 PtrTy, PtrTy, PtrTy, IntptrTy);
2902 AsanMemcpy =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memcpy", PtrTy,
2903 PtrTy, PtrTy, IntptrTy);
2904 AsanMemset =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memset",
2908 AsanHandleNoReturnFunc =
2911 AsanPtrCmpFunction =
2913 AsanPtrSubFunction =
2915 if (Mapping.InGlobal)
2916 AsanShadowGlobal =
M.getOrInsertGlobal(
"__asan_shadow",
2919 AMDGPUAddressShared =
2921 AMDGPUAddressPrivate =
2925bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(
Function &
F) {
2933 if (
F.getName().contains(
" load]")) {
2943bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(
Function &
F) {
2949 if (Mapping.InGlobal) {
2957 LocalDynamicShadow =
2958 IRB.
CreateCall(Asm, {AsanShadowGlobal},
".asan.shadow");
2960 LocalDynamicShadow =
2964 Value *GlobalDynamicAddress =
F.getParent()->getOrInsertGlobal(
2966 LocalDynamicShadow = IRB.
CreateLoad(IntptrTy, GlobalDynamicAddress);
2971void AddressSanitizer::markEscapedLocalAllocas(
Function &
F) {
2976 assert(ProcessedAllocas.empty() &&
"must process localescape before allocas");
2980 if (!
F.getParent()->getFunction(
"llvm.localescape"))
return;
2986 if (
II &&
II->getIntrinsicID() == Intrinsic::localescape) {
2988 for (
Value *Arg :
II->args()) {
2991 "non-static alloca arg to localescape");
2992 ProcessedAllocas[AI] =
false;
3001void AddressSanitizer::markCatchParametersAsUninteresting(
Function &
F) {
3007 for (
Value *Operand : CatchPad->arg_operands())
3009 ProcessedAllocas[AI] =
false;
3015bool AddressSanitizer::suppressInstrumentationSiteForDebug(
int &Instrumented) {
3016 bool ShouldInstrument =
3020 return !ShouldInstrument;
3023bool AddressSanitizer::instrumentFunction(
Function &
F,
3026 bool FunctionModified =
false;
3029 if (
F.hasFnAttribute(Attribute::Naked))
3030 return FunctionModified;
3035 if (maybeInsertAsanInitAtFunctionEntry(
F))
3036 FunctionModified =
true;
3039 if (!
F.hasFnAttribute(Attribute::SanitizeAddress))
return FunctionModified;
3041 if (
F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
3042 return FunctionModified;
3046 initializeCallbacks(TLI);
3048 FunctionStateRAII CleanupObj(
this);
3050 RuntimeCallInserter RTCI(
F);
3052 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(
F);
3056 markEscapedLocalAllocas(
F);
3058 if (TargetTriple.isOSWindows())
3059 markCatchParametersAsUninteresting(
F);
3071 for (
auto &BB :
F) {
3073 TempsToInstrument.
clear();
3074 int NumInsnsPerBB = 0;
3075 for (
auto &Inst : BB) {
3076 if (LooksLikeCodeInBug11395(&Inst))
return false;
3083 if (!InterestingOperands.
empty()) {
3084 for (
auto &Operand : InterestingOperands) {
3090 if (Operand.MaybeMask) {
3094 if (!TempsToInstrument.
insert(
Ptr).second)
3098 OperandsToInstrument.
push_back(Operand);
3105 PointerComparisonsOrSubtracts.
push_back(&Inst);
3113 TempsToInstrument.
clear();
3124 bool UseCalls = (InstrumentationWithCallsThreshold >= 0 &&
3125 OperandsToInstrument.
size() + IntrinToInstrument.
size() >
3126 (
unsigned)InstrumentationWithCallsThreshold);
3131 int NumInstrumented = 0;
3132 for (
auto &Operand : OperandsToInstrument) {
3133 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3134 instrumentMop(ObjSizeVis, Operand, UseCalls,
3135 F.getDataLayout(), RTCI);
3136 FunctionModified =
true;
3138 for (
auto *Inst : IntrinToInstrument) {
3139 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3140 instrumentMemIntrinsic(Inst, RTCI);
3141 FunctionModified =
true;
3144 FunctionStackPoisoner FSP(
F, *
this, RTCI);
3145 bool ChangedStack = FSP.runOnFunction();
3149 for (
auto *CI : NoReturnCalls) {
3151 RTCI.createRuntimeCall(IRB, AsanHandleNoReturnFunc, {});
3154 for (
auto *Inst : PointerComparisonsOrSubtracts) {
3155 instrumentPointerComparisonOrSubtraction(Inst, RTCI);
3156 FunctionModified =
true;
3159 if (ChangedStack || !NoReturnCalls.empty())
3160 FunctionModified =
true;
3162 LLVM_DEBUG(
dbgs() <<
"ASAN done instrumenting: " << FunctionModified <<
" "
3165 return FunctionModified;
3171bool AddressSanitizer::LooksLikeCodeInBug11395(
Instruction *
I) {
3172 if (LongSize != 32)
return false;
3181void FunctionStackPoisoner::initializeCallbacks(
Module &M) {
3185 const char *MallocNameTemplate =
3190 std::string Suffix =
itostr(Index);
3191 AsanStackMallocFunc[
Index] =
M.getOrInsertFunction(
3192 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
3193 AsanStackFreeFunc[
Index] =
3198 if (ASan.UseAfterScope) {
3199 AsanPoisonStackMemoryFunc =
M.getOrInsertFunction(
3201 AsanUnpoisonStackMemoryFunc =
M.getOrInsertFunction(
3205 for (
size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
3206 0xf3, 0xf5, 0xf8}) {
3207 std::ostringstream
Name;
3209 Name << std::setw(2) << std::setfill(
'0') << std::hex << Val;
3210 AsanSetShadowFunc[Val] =
3211 M.getOrInsertFunction(
Name.str(), IRB.
getVoidTy(), IntptrTy, IntptrTy);
3214 AsanAllocaPoisonFunc =
M.getOrInsertFunction(
3216 AsanAllocasUnpoisonFunc =
M.getOrInsertFunction(
3222 size_t Begin,
size_t End,
3224 Value *ShadowBase) {
3228 const size_t LargestStoreSizeInBytes =
3229 std::min<size_t>(
sizeof(uint64_t), ASan.LongSize / 8);
3231 const bool IsLittleEndian =
F.getDataLayout().isLittleEndian();
3237 for (
size_t i = Begin; i < End;) {
3238 if (!ShadowMask[i]) {
3244 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
3246 while (StoreSizeInBytes > End - i)
3247 StoreSizeInBytes /= 2;
3250 for (
size_t j = StoreSizeInBytes - 1;
j && !ShadowMask[i +
j]; --
j) {
3251 while (j <= StoreSizeInBytes / 2)
3252 StoreSizeInBytes /= 2;
3256 for (
size_t j = 0;
j < StoreSizeInBytes;
j++) {
3258 Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
3260 Val = (Val << 8) | ShadowBytes[i + j];
3269 i += StoreSizeInBytes;
3276 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.
size(), IRB, ShadowBase);
3281 size_t Begin,
size_t End,
3284 size_t Done = Begin;
3285 for (
size_t i = Begin, j = Begin + 1; i < End; i =
j++) {
3286 if (!ShadowMask[i]) {
3290 uint8_t Val = ShadowBytes[i];
3291 if (!AsanSetShadowFunc[Val])
3295 for (;
j < End && ShadowMask[
j] && Val == ShadowBytes[
j]; ++
j) {
3298 if (j - i >= ASan.MaxInlinePoisoningSize) {
3299 copyToShadowInline(ShadowMask, ShadowBytes,
Done, i, IRB, ShadowBase);
3300 RTCI.createRuntimeCall(
3301 IRB, AsanSetShadowFunc[Val],
3302 {IRB.
CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
3303 ConstantInt::get(IntptrTy, j - i)});
3308 copyToShadowInline(ShadowMask, ShadowBytes,
Done, End, IRB, ShadowBase);
3316 for (
int i = 0;; i++, MaxSize *= 2)
3317 if (LocalStackSize <= MaxSize)
return i;
3321void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
3323 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
3331 if (Arg.hasByValAttr()) {
3332 Type *Ty = Arg.getParamByValType();
3333 const Align Alignment =
3334 DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
3338 (Arg.hasName() ? Arg.getName() :
"Arg" +
Twine(Arg.getArgNo())) +
3341 Arg.replaceAllUsesWith(AI);
3343 uint64_t AllocSize =
DL.getTypeAllocSize(Ty);
3344 IRB.
CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
3352 Value *ValueIfFalse) {
3355 PHI->addIncoming(ValueIfFalse, CondBlock);
3357 PHI->addIncoming(ValueIfTrue, ThenBlock);
3361Value *FunctionStackPoisoner::createAllocaForLayout(
3370 nullptr,
"MyAlloca");
3374 uint64_t FrameAlignment = std::max(
L.FrameAlignment, uint64_t(
ClRealignStack));
3379void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3382 DynamicAllocaLayout = IRB.
CreateAlloca(IntptrTy,
nullptr);
3387void FunctionStackPoisoner::processDynamicAllocas() {
3394 for (
const auto &APC : DynamicAllocaPoisonCallVec) {
3397 assert(ASan.isInterestingAlloca(*APC.AI));
3398 assert(!APC.AI->isStaticAlloca());
3401 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3408 createDynamicAllocasInitStorage();
3409 for (
auto &AI : DynamicAllocaVec)
3410 handleDynamicAllocaCall(AI);
3411 unpoisonDynamicAllocas();
3423 for (
Instruction *It = Start; It; It = It->getNextNode()) {
3440 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3443 Value *Val = Store->getValueOperand();
3445 bool IsArgInitViaCast =
3450 Val == It->getPrevNode();
3451 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3455 if (IsArgInitViaCast)
3470 if (AI->
hasMetadata(LLVMContext::MD_annotation)) {
3473 for (
auto &Annotation : AllocaAnnotations->
operands()) {
3477 for (
unsigned Index = 0; Index < AnnotationTuple->getNumOperands();
3480 auto MetadataString =
3482 if (MetadataString->getString() ==
"alloca_name_altered")
3491void FunctionStackPoisoner::processStaticAllocas() {
3492 if (AllocaVec.
empty()) {
3497 int StackMallocIdx = -1;
3499 if (
auto SP =
F.getSubprogram())
3500 EntryDebugLocation =
3509 auto InsBeforeB = InsBefore->
getParent();
3510 assert(InsBeforeB == &
F.getEntryBlock());
3511 for (
auto *AI : StaticAllocasToMoveUp)
3522 ArgInitInst->moveBefore(InsBefore->
getIterator());
3525 if (LocalEscapeCall)
3533 ASan.getAllocaSizeInBytes(*AI),
3544 uint64_t Granularity = 1ULL << Mapping.Scale;
3545 uint64_t MinHeaderSize = std::max((uint64_t)ASan.LongSize / 2, Granularity);
3551 for (
auto &
Desc : SVD)
3555 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3558 assert(ASan.isInterestingAlloca(*APC.AI));
3559 assert(APC.AI->isStaticAlloca());
3564 if (
const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3565 if (LifetimeLoc->getFile() == FnLoc->getFile())
3566 if (
unsigned Line = LifetimeLoc->getLine())
3567 Desc.Line = std::min(
Desc.Line ?
Desc.Line : Line, Line);
3573 LLVM_DEBUG(
dbgs() << DescriptionString <<
" --- " <<
L.FrameSize <<
"\n");
3574 uint64_t LocalStackSize =
L.FrameSize;
3575 bool DoStackMalloc =
3585 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3586 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3588 Type *PtrTy =
F.getDataLayout().getAllocaPtrType(
F.getContext());
3589 Value *StaticAlloca =
3590 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L,
false);
3592 Value *FakeStackPtr;
3593 Value *FakeStackInt;
3594 Value *LocalStackBase;
3595 Value *LocalStackBaseAlloca;
3598 if (DoStackMalloc) {
3599 LocalStackBaseAlloca =
3600 IRB.
CreateAlloca(IntptrTy,
nullptr,
"asan_local_stack_base");
3607 Constant *OptionDetectUseAfterReturn =
F.getParent()->getOrInsertGlobal(
3617 Value *FakeStackValue =
3618 RTCI.createRuntimeCall(IRBIf, AsanStackMallocFunc[StackMallocIdx],
3619 ConstantInt::get(IntptrTy, LocalStackSize));
3621 FakeStackInt = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue,
3622 Term, ConstantInt::get(IntptrTy, 0));
3630 RTCI.createRuntimeCall(IRB, AsanStackMallocFunc[StackMallocIdx],
3631 ConstantInt::get(IntptrTy, LocalStackSize));
3634 Value *NoFakeStack =
3639 Value *AllocaValue =
3640 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L,
true) : StaticAlloca;
3644 createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStackPtr);
3645 IRB.
CreateStore(LocalStackBase, LocalStackBaseAlloca);
3653 DoDynamicAlloca ? createAllocaForLayout(IRB, L,
true) : StaticAlloca;
3654 LocalStackBaseAlloca = LocalStackBase;
3659 for (
const auto &
Desc : SVD) {
3663 LocalStackBase, ConstantInt::get(IntptrTy,
Desc.Offset));
3674 LocalStackBase, ConstantInt::get(IntptrTy, ASan.LongSize / 8));
3682 LocalStackBase, ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8));
3689 ASan.memToShadow(IRB.
CreatePtrToInt(LocalStackBase, IntptrTy), IRB);
3692 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3694 if (!StaticAllocaPoisonCallVec.empty()) {
3698 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3701 size_t Begin =
Desc.Offset /
L.Granularity;
3702 size_t End = Begin + (APC.Size +
L.Granularity - 1) /
L.Granularity;
3705 copyToShadow(ShadowAfterScope,
3706 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
3712 for (
Value *NewAllocaPtr : NewAllocaPtrs) {
3715 if (
I->isLifetimeStartOrEnd())
3716 I->eraseFromParent();
3729 if (DoStackMalloc) {
3730 assert(StackMallocIdx >= 0);
3747 if (ASan.MaxInlinePoisoningSize != 0 && StackMallocIdx <= 4) {
3749 ShadowAfterReturn.
resize(ClassSize /
L.Granularity,
3751 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3753 Value *SavedFlagPtrPtr = IRBPoison.CreatePtrAdd(
3755 ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
3756 Value *SavedFlagPtr = IRBPoison.CreateLoad(IntptrTy, SavedFlagPtrPtr);
3757 IRBPoison.CreateStore(
3759 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getPtrTy()));
3762 RTCI.createRuntimeCall(
3763 IRBPoison, AsanStackFreeFunc[StackMallocIdx],
3764 {FakeStackInt, ConstantInt::get(IntptrTy, LocalStackSize)});
3768 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3770 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3775 for (
auto *AI : AllocaVec)
3779void FunctionStackPoisoner::poisonAlloca(
Value *V, uint64_t
Size,
3783 Value *SizeArg = ConstantInt::get(IntptrTy,
Size);
3784 RTCI.createRuntimeCall(
3785 IRB, DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3786 {AddrArg, SizeArg});
3797void FunctionStackPoisoner::handleDynamicAllocaCall(
AllocaInst *AI) {
3805 Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
3811 const unsigned ElementSize =
3815 ConstantInt::get(IntptrTy, ElementSize));
3843 ConstantInt::get(IntptrTy, Alignment.
value()));
3846 RTCI.createRuntimeCall(IRB, AsanAllocaPoisonFunc, {NewAddress, OldSize});
3857 if (
I->isLifetimeStartOrEnd())
3858 I->eraseFromParent();
3890 Size - uint64_t(
Offset) >= TypeStoreSize / 8;
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp, RuntimeCallInserter &RTCI)
static const uint64_t kDefaultShadowScale
const char kAMDGPUUnreachableName[]
constexpr size_t kAccessSizeIndexMask
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
static const uint64_t kFreeBSD_ShadowOffset32
constexpr size_t kIsWriteShift
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
static bool isInterestingPointerSubtraction(Instruction *I)
const char kAMDGPUAddressSharedName[]
const char kAsanStackFreeNameTemplate[]
constexpr size_t kCompileKernelMask
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static Twine genName(StringRef suffix)
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
const char kAsanStackMallocNameTemplate[]
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
const char kAsanGenPrefix[]
constexpr size_t kIsWriteMask
static uint64_t getRedzoneSizeForScale(int MappingScale)
static const uint64_t kDefaultShadowOffset64
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
static const uint64_t kAsanCtorAndDtorPriority
const char kAsanUnpoisonGlobalsName[]
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
const char kAsanRegisterImageGlobalsName[]
static const uint64_t kWebAssemblyShadowOffset
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
const char kODRGenPrefix[]
static const uint64_t kSystemZ_ShadowOffset64
static const uint64_t kDefaultShadowOffset32
const char kAsanShadowMemoryDynamicAddress[]
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
const char kAsanStackMallocAlwaysNameTemplate[]
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
static const uint64_t kFreeBSDKasan_ShadowOffset64
const char kAsanModuleDtorName[]
static const uint64_t kDynamicShadowSentinel
static bool isInterestingPointerComparison(Instruction *I)
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
static const uint64_t kLinuxKasan_ShadowOffset64
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
const char kAsanUnpoisonStackMemoryName[]
static const uint64_t kLoongArch64_ShadowOffset64
const char kAsanRegisterGlobalsName[]
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
const char kAsanGlobalsRegisteredFlagName[]
static const size_t kMaxStackMallocSize
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
const char kAsanUnregisterImageGlobalsName[]
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
static const uint64_t kPPC64_ShadowOffset64
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kMIPS32_ShadowOffset32
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
static const uint64_t kFreeBSDAArch64_ShadowOffset64
static const uint64_t kSmallX86_64ShadowOffsetBase
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
static size_t TypeStoreSizeToSizeIndex(uint32_t TypeSize)
const char kAsanAllocaPoison[]
constexpr size_t kCompileKernelShift
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
const char kAMDGPUAddressPrivateName[]
static const uint64_t kNetBSDKasan_ShadowOffset64
const char kAMDGPUBallotName[]
const char kAsanRegisterElfGlobalsName[]
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static StringRef getAllocaName(AllocaInst *AI)
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(true), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanPoisonGlobalsName[]
const char kAsanHandleNoReturnName[]
static const size_t kMinStackMallocSize
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
static const uint64_t kAArch64_ShadowOffset64
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
static bool isPointerOperand(Value *I, User *U)
static const Function * getParent(const Value *V)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
Module.h This file contains the declarations for the Module class.
This defines the Use class.
std::pair< Instruction::BinaryOps, Value * > OffsetOp
Find all possible pairs (BinOp, RHS) that BinOp V, RHS can be simplified.
static bool isZero(Value *V, const DataLayout &DL, DominatorTree *DT, AssumptionCache *AC)
print mir2vec MIR2Vec Vocabulary Printer Pass
Machine Check Debug Module
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
const SmallVectorImpl< MachineOperand > & Cond
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
uint64_t getZExtValue() const
Get zero extended value.
int64_t getSExtValue() const
Get sign extended value.
LLVM_ABI AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
LLVM_ABI PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
LLVM_ABI void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
LLVM_ABI bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
Class to represent array types.
static LLVM_ABI ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
LLVM_ABI const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr if the function does no...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
bool isInlineAsm() const
Check if this call is an inline asm statement.
static LLVM_ABI CallBase * addOperandBundle(CallBase *CB, uint32_t ID, OperandBundleDef OB, InsertPosition InsertPt=nullptr)
Create a clone of CB with operand bundle OB added.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static LLVM_ABI Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static LLVM_ABI Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static LLVM_ABI Constant * getPtrToInt(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, GEPNoWrapFlags NW=GEPNoWrapFlags::none(), std::optional< ConstantRange > InRange=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static LLVM_ABI bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static LLVM_ABI Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
A parsed version of the target data layout string in and methods for querying it.
LLVM_ABI DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags and the LLVMContext applied.
bool hasPersonalityFn() const
Check whether this function has a personality function.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
const Constant * getAliasee() const
static LLVM_ABI GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
LLVM_ABI void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
LLVM_ABI void setComdat(Comdat *C)
LLVM_ABI void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
LLVM_ABI void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalVariable.
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
Common base class shared among various IRBuilders.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
LoadInst * CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align, const char *Name)
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, const AAMDNodes &AAInfo=AAMDNodes())
Create and insert a memcpy between the specified pointers.
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
LLVM_ABI Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
BasicBlock::iterator GetInsertPoint() const
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
LLVM_ABI CallInst * CreateIntrinsic(Intrinsic::ID ID, ArrayRef< Type * > Types, ArrayRef< Value * > Args, FMFSource FMFSource={}, const Twine &Name="")
Create a call to intrinsic ID with Args, mangled using Types.
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args={}, const Twine &Name="", MDNode *FPMathTag=nullptr)
LLVM_ABI Value * CreateTypeSize(Type *Ty, TypeSize Size)
Create an expression which evaluates to the number of units in Size at runtime.
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="", bool IsDisjoint=false)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
Value * CreateAddrSpaceCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static LLVM_ABI InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
Base class for instruction visitors.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void moveBefore(InstListType::iterator InsertPos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
static LLVM_ABI IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
LLVM_ABI void emitError(const Instruction *I, const Twine &ErrorStr)
emitError - Emit an error message to the currently installed error handler with optional location inf...
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
LLVM_ABI MDNode * createUnlikelyBranchWeights()
Return metadata containing two branch weights, with significant bias towards false destination.
ArrayRef< MDOperand > operands() const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value* statically.
LLVM_ABI SizeOffsetAPInt compute(Value *V)
Pass interface - Implemented by all 'passes'.
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
static LLVM_ABI PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses & abandon()
Mark an analysis as abandoned.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, InsertPosition InsertBefore=nullptr)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
constexpr bool empty() const
empty - Check if the string is empty.
Class to represent struct types.
static LLVM_ABI StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetTransformInfo.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
Triple - Helper class for working with autoconf configuration names.
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
bool isLoongArch64() const
Tests whether the target is 64-bit LoongArch.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
bool isWasm() const
Tests whether the target is wasm (32- and 64-bit).
bool isOSHaiku() const
Tests whether the OS is Haiku.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static LLVM_ABI Type * getVoidTy(LLVMContext &C)
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
Type * getScalarType() const
If this is a vector type, return the element type, otherwise return 'this'.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
LLVM_ABI void takeName(Value *V)
Transfer the name from V to this value.
Base class of all SIMD vector types.
static LLVM_ABI VectorType * get(Type *ElementType, ElementCount EC)
This static method is the primary way to construct an VectorType.
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
This file contains the declaration of the Comdat class, which represents a single COMDAT in LLVM.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void getInterestingMemoryOperands(Module &M, Instruction *I, SmallVectorImpl< InterestingMemoryOperand > &Interesting)
Get all the memory operands from the instruction that needs to be instrumented.
void instrumentAddress(Module &M, IRBuilder<> &IRB, Instruction *OrigIns, Instruction *InsertBefore, Value *Addr, Align Alignment, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, bool Recover, int AsanScale, int AsanOffset)
Instrument the memory operand Addr.
uint64_t getRedzoneSizeForGlobal(int AsanScale, uint64_t SizeInBytes)
Given SizeInBytes of the Value to be instrunmented, Returns the redzone size corresponding to it.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ S_CSTRING_LITERALS
S_CSTRING_LITERALS - Section with literal C strings.
@ OB
OB - OneByte - Set if this instruction has a one byte opcode.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
uint64_t getAllocaSizeInBytes(const AllocaInst &AI)
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
LLVM_ABI void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
FunctionAddr VTableAddr Value
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
LLVM_ABI GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging, Twine NamePrefix="")
LLVM_ABI AllocaInst * findAllocaForValue(Value *V, bool OffsetZero=false)
Returns unique alloca where the value comes from, or nullptr.
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
LLVM_ABI Function * createSanitizerCtor(Module &M, StringRef CtorName)
Creates sanitizer constructor function.
AsanDetectStackUseAfterReturnMode
Mode of ASan detect stack use after return.
@ Always
Always detect stack use after return.
@ Never
Never detect stack use after return.
@ Runtime
Detect stack use after return if not disabled runtime with (ASAN_OPTIONS=detect_stack_use_after_retur...
LLVM_ABI DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
InnerAnalysisManagerProxy< FunctionAnalysisManager, Module > FunctionAnalysisManagerModuleProxy
Provide the FunctionAnalysisManager to Module proxy.
LLVM_ABI bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
LLVM_ABI SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
LLVM_ABI SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
auto dyn_cast_or_null(const Y &Val)
LLVM_ABI FunctionCallee declareSanitizerInitFunction(Module &M, StringRef InitName, ArrayRef< Type * > InitArgTypes, bool Weak=false)
FunctionAddr VTableAddr uintptr_t uintptr_t Version
LLVM_ABI std::string getUniqueModuleId(Module *M)
Produce a unique identifier for this module by taking the MD5 sum of the names of the module's strong...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
LLVM_ABI std::pair< Function *, FunctionCallee > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef(), bool Weak=false)
Creates sanitizer constructor function, and calls sanitizer's init function from it.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
LLVM_ABI void SplitBlockAndInsertIfThenElse(Value *Cond, BasicBlock::iterator SplitBefore, Instruction **ThenTerm, Instruction **ElseTerm, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
bool isAlnum(char C)
Checks whether character C is either a decimal digit or an uppercase or lowercase letter as classifie...
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
AsanDtorKind
Types of ASan module destructors supported.
@ Invalid
Not a valid destructor Kind.
@ Global
Append to llvm.global_dtors.
@ None
Do not emit any destructors for ASan.
LLVM_ABI ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, uint64_t Granularity, uint64_t MinHeaderSize)
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
OperandBundleDefT< Value * > OperandBundleDef
LLVM_ABI void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
static const int kAsanStackUseAfterReturnMagic
LLVM_ABI void setGlobalVariableLargeSection(const Triple &TargetTriple, GlobalVariable &GV)
void removeASanIncompatibleFnAttributes(Function &F, bool ReadsArgMem)
Remove memory attributes that are incompatible with the instrumentation added by AddressSanitizer and...
@ Dynamic
Denotes mode unknown at compile time.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
TinyPtrVector< BasicBlock * > ColorVector
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
iterator_range< df_iterator< T > > depth_first(const T &G)
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
AsanCtorKind
Types of ASan module constructors supported.
LLVM_ABI void maybeMarkSanitizerLibraryCallNoBuiltin(CallInst *CI, const TargetLibraryInfo *TLI)
Given a CallInst, check if it calls a string function known to CodeGen, and mark it with NoBuiltin if...
LLVM_ABI void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
LLVM_ABI void appendToGlobalDtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Same as appendToGlobalCtors(), but for global dtors.
LLVM_ABI bool checkIfAlreadyInstrumented(Module &M, StringRef Flag)
Check if module has flag attached, if not add the flag.
void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize, bool IsKasan, uint64_t *ShadowBase, int *MappingScale, bool *OrShadowOffset)
DEMANGLE_ABI std::string demangle(std::string_view MangledName)
Attempt to demangle a string using different demangling schemes.
std::string itostr(int64_t X)
LLVM_ABI void SplitBlockAndInsertForEachLane(ElementCount EC, Type *IndexTy, BasicBlock::iterator InsertBefore, std::function< void(IRBuilderBase &, Value *)> Func)
Utility function for performing a given action on each lane of a vector with EC elements.
AnalysisManager< Module > ModuleAnalysisManager
Convenience typedef for the Module analysis manager.
LLVM_ABI bool replaceDbgDeclare(Value *Address, Value *NewAddress, DIBuilder &Builder, uint8_t DIExprFlags, int Offset)
Replaces dbg.declare record when the address it describes is replaced with a new value.
LLVM_ABI ASanAccessInfo(int32_t Packed)
const uint8_t AccessSizeIndex
This struct is a compact representation of a valid (non-zero power of two) alignment.
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
Align valueOrOne() const
For convenience, returns a valid alignment or 1 if undefined.
Information about a load/store intrinsic defined by the target.
SmallVector< InterestingMemoryOperand, 1 > InterestingOperands
A CRTP mix-in to automatically provide informational APIs needed for passes.
SizeOffsetAPInt - Used by ObjectSizeOffsetVisitor, which works with APInts.