94#define DEBUG_TYPE "asan"
100 std::numeric_limits<uint64_t>::max();
141 "__asan_unregister_image_globals";
154 "__asan_stack_malloc_always_";
168 "__asan_option_detect_stack_use_after_return";
171 "__asan_shadow_memory_dynamic_address";
197 "asan-kernel",
cl::desc(
"Enable KernelAddressSanitizer instrumentation"),
202 cl::desc(
"Enable recovery mode (continue-after-error)."),
206 "asan-guard-against-version-mismatch",
212 cl::desc(
"instrument read instructions"),
216 "asan-instrument-writes",
cl::desc(
"instrument write instructions"),
225 "asan-instrument-atomics",
235 "asan-always-slow-path",
240 "asan-force-dynamic-shadow",
241 cl::desc(
"Load shadow address into a local variable for each function"),
246 cl::desc(
"Access dynamic shadow through an ifunc global on "
247 "platforms that support this"),
251 "asan-with-ifunc-suppress-remat",
252 cl::desc(
"Suppress rematerialization of dynamic shadow address by passing "
253 "it through inline asm in prologue."),
261 "asan-max-ins-per-bb",
cl::init(10000),
262 cl::desc(
"maximal number of instructions to instrument in any given BB"),
269 "asan-max-inline-poisoning-size",
271 "Inline shadow poisoning for blocks up to the given size in bytes."),
275 "asan-use-after-return",
276 cl::desc(
"Sets the mode of detection for stack-use-after-return."),
278 clEnumValN(AsanDetectStackUseAfterReturnMode::Never,
"never",
279 "Never detect stack use after return."),
281 AsanDetectStackUseAfterReturnMode::Runtime,
"runtime",
282 "Detect stack use after return if "
283 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
284 clEnumValN(AsanDetectStackUseAfterReturnMode::Always,
"always",
285 "Always detect stack use after return.")),
289 cl::desc(
"Create redzones for byval "
290 "arguments (extra copy "
295 cl::desc(
"Check stack-use-after-scope"),
304 cl::desc(
"Handle C++ initializer order"),
308 "asan-detect-invalid-pointer-pair",
313 "asan-detect-invalid-pointer-cmp",
318 "asan-detect-invalid-pointer-sub",
323 "asan-realign-stack",
324 cl::desc(
"Realign stack to the value of this flag (power of two)"),
328 "asan-instrumentation-with-call-threshold",
329 cl::desc(
"If the function being instrumented contains more than "
330 "this number of memory accesses, use callbacks instead of "
331 "inline checks (-1 means never use callbacks)."),
335 "asan-memory-access-callback-prefix",
340 "asan-kernel-mem-intrinsic-prefix",
346 cl::desc(
"instrument dynamic allocas"),
350 "asan-skip-promotable-allocas",
355 "asan-constructor-kind",
356 cl::desc(
"Sets the ASan constructor kind"),
359 "Use global constructors")),
366 cl::desc(
"scale of asan shadow mapping"),
371 cl::desc(
"offset of asan shadow mapping [EXPERIMENTAL]"),
385 "asan-opt-same-temp",
cl::desc(
"Instrument the same temp just once"),
389 cl::desc(
"Don't instrument scalar globals"),
393 "asan-opt-stack",
cl::desc(
"Don't instrument scalar stack variables"),
397 "asan-stack-dynamic-alloca",
402 "asan-force-experiment",
408 cl::desc(
"Use private aliases for global variables"),
413 cl::desc(
"Use odr indicators to improve ODR reporting"),
418 cl::desc(
"Use linker features to support dead "
419 "code stripping of globals"),
426 cl::desc(
"Place ASan constructors in comdat sections"),
430 "asan-destructor-kind",
431 cl::desc(
"Sets the ASan destructor kind. The default is to use the value "
432 "provided to the pass constructor"),
435 "Use global destructors")),
455STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
456STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
458 "Number of optimized accesses to global vars");
460 "Number of optimized accesses to stack vars");
469struct ShadowMapping {
480 bool IsAndroid = TargetTriple.
isAndroid();
483 bool IsMacOS = TargetTriple.
isMacOSX();
486 bool IsPS = TargetTriple.
isPS();
493 bool IsMIPS32 = TargetTriple.
isMIPS32();
494 bool IsMIPS64 = TargetTriple.
isMIPS64();
495 bool IsArmOrThumb = TargetTriple.
isARM() || TargetTriple.
isThumb();
503 bool IsAMDGPU = TargetTriple.
isAMDGPU();
505 ShadowMapping Mapping;
512 if (LongSize == 32) {
515 else if (IsMIPSN32ABI)
527 else if (IsEmscripten)
540 else if (IsFreeBSD && IsAArch64)
542 else if (IsFreeBSD && !IsMIPS64) {
547 }
else if (IsNetBSD) {
554 else if (IsLinux && IsX86_64) {
560 }
else if (IsWindows && IsX86_64) {
566 else if (IsMacOS && IsAArch64)
570 else if (IsLoongArch64)
594 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
595 !IsRISCV64 && !IsLoongArch64 &&
596 !(Mapping.Offset & (Mapping.Offset - 1)) &&
598 bool IsAndroidWithIfuncSupport =
600 Mapping.InGlobal =
ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
608 int *MappingScale,
bool *OrShadowOffset) {
610 *ShadowBase = Mapping.Offset;
611 *MappingScale = Mapping.Scale;
612 *OrShadowOffset = Mapping.OrShadowOffset;
622 uint8_t AccessSizeIndex)
626 AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
627 CompileKernel(CompileKernel) {}
634 return std::max(32U, 1U << MappingScale);
649class RuntimeCallInserter {
651 bool TrackInsertedCalls =
false;
655 RuntimeCallInserter(
Function &Fn) : OwnerFn(&Fn) {
659 TrackInsertedCalls =
true;
663 ~RuntimeCallInserter() {
664 if (InsertedCalls.
empty())
666 assert(TrackInsertedCalls &&
"Calls were wrongly tracked");
669 for (
CallInst *CI : InsertedCalls) {
671 assert(BB &&
"Instruction doesn't belong to a BasicBlock");
673 "Instruction doesn't belong to the expected Function!");
681 if (Colors.
size() != 1) {
683 "Instruction's BasicBlock is not monochromatic");
690 if (EHPad && EHPad->
isEHPad()) {
694 OB, CI->getIterator());
695 NewCall->copyMetadata(*CI);
696 CI->replaceAllUsesWith(NewCall);
697 CI->eraseFromParent();
708 if (TrackInsertedCalls)
709 InsertedCalls.push_back(Inst);
715struct AddressSanitizer {
717 int InstrumentationWithCallsThreshold,
718 uint32_t MaxInlinePoisoningSize,
bool CompileKernel =
false,
719 bool Recover =
false,
bool UseAfterScope =
false,
721 AsanDetectStackUseAfterReturnMode::Runtime)
729 InstrumentationWithCallsThreshold(
732 : InstrumentationWithCallsThreshold),
735 : MaxInlinePoisoningSize) {
736 C = &(
M.getContext());
737 DL = &
M.getDataLayout();
738 LongSize =
M.getDataLayout().getPointerSizeInBits();
740 PtrTy = PointerType::getUnqual(*C);
742 TargetTriple =
Triple(
M.getTargetTriple());
746 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
754 bool isInterestingAlloca(
const AllocaInst &AI);
762 const DataLayout &DL, RuntimeCallInserter &RTCI);
763 void instrumentPointerComparisonOrSubtraction(
Instruction *
I,
764 RuntimeCallInserter &RTCI);
767 uint32_t TypeStoreSize,
bool IsWrite,
769 RuntimeCallInserter &RTCI);
772 uint32_t TypeStoreSize,
bool IsWrite,
773 Value *SizeArgument);
778 TypeSize TypeStoreSize,
bool IsWrite,
779 Value *SizeArgument,
bool UseCalls,
781 RuntimeCallInserter &RTCI);
782 void instrumentMaskedLoadOrStore(AddressSanitizer *
Pass,
const DataLayout &DL,
786 Type *OpType,
bool IsWrite,
787 Value *SizeArgument,
bool UseCalls,
788 uint32_t Exp, RuntimeCallInserter &RTCI);
792 bool IsWrite,
size_t AccessSizeIndex,
794 RuntimeCallInserter &RTCI);
795 void instrumentMemIntrinsic(
MemIntrinsic *
MI, RuntimeCallInserter &RTCI);
797 bool suppressInstrumentationSiteForDebug(
int &Instrumented);
799 bool maybeInsertAsanInitAtFunctionEntry(
Function &
F);
800 bool maybeInsertDynamicShadowAtFunctionEntry(
Function &
F);
801 void markEscapedLocalAllocas(
Function &
F);
804 friend struct FunctionStackPoisoner;
814 struct FunctionStateRAII {
815 AddressSanitizer *
Pass;
817 FunctionStateRAII(AddressSanitizer *
Pass) :
Pass(
Pass) {
819 "last pass forgot to clear cache");
823 ~FunctionStateRAII() {
824 Pass->LocalDynamicShadow =
nullptr;
825 Pass->ProcessedAllocas.clear();
840 ShadowMapping Mapping;
854 Value *LocalDynamicShadow =
nullptr;
860 int InstrumentationWithCallsThreshold;
864class ModuleAddressSanitizer {
866 ModuleAddressSanitizer(
Module &M,
bool InsertVersionCheck,
867 bool CompileKernel =
false,
bool Recover =
false,
868 bool UseGlobalsGC =
true,
bool UseOdrIndicator =
true,
875 : InsertVersionCheck),
877 UseGlobalsGC(UseGlobalsGC &&
ClUseGlobalsGC && !this->CompileKernel),
892 UseCtorComdat(UseGlobalsGC &&
ClWithComdat && !this->CompileKernel),
893 DestructorKind(DestructorKind),
897 C = &(
M.getContext());
898 int LongSize =
M.getDataLayout().getPointerSizeInBits();
900 PtrTy = PointerType::getUnqual(*C);
901 TargetTriple =
Triple(
M.getTargetTriple());
906 assert(this->DestructorKind != AsanDtorKind::Invalid);
909 bool instrumentModule(
Module &);
912 void initializeCallbacks(
Module &M);
921 const std::string &UniqueModuleId);
938 bool ShouldUseMachOGlobalsSection()
const;
939 StringRef getGlobalMetadataSection()
const;
942 uint64_t getMinRedzoneSizeForGlobal()
const {
946 int GetAsanVersion(
const Module &M)
const;
949 bool InsertVersionCheck;
952 bool UsePrivateAlias;
953 bool UseOdrIndicator;
961 ShadowMapping Mapping;
971 Function *AsanCtorFunction =
nullptr;
972 Function *AsanDtorFunction =
nullptr;
984struct FunctionStackPoisoner :
public InstVisitor<FunctionStackPoisoner> {
986 AddressSanitizer &ASan;
987 RuntimeCallInserter &RTCI;
992 ShadowMapping Mapping;
1001 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
1005 struct AllocaPoisonCall {
1013 bool HasUntracedLifetimeIntrinsic =
false;
1020 bool HasInlineAsm =
false;
1021 bool HasReturnsTwiceCall =
false;
1024 FunctionStackPoisoner(
Function &F, AddressSanitizer &ASan,
1025 RuntimeCallInserter &RTCI)
1026 :
F(
F), ASan(ASan), RTCI(RTCI),
1028 IntptrTy(ASan.IntptrTy), IntptrPtrTy(
PointerType::
get(IntptrTy, 0)),
1029 Mapping(ASan.Mapping),
1038 copyArgsPassedByValToAllocas();
1043 if (AllocaVec.
empty() && DynamicAllocaVec.
empty())
return false;
1045 initializeCallbacks(*
F.getParent());
1047 if (HasUntracedLifetimeIntrinsic) {
1051 StaticAllocaPoisonCallVec.
clear();
1052 DynamicAllocaPoisonCallVec.
clear();
1055 processDynamicAllocas();
1056 processStaticAllocas();
1067 void copyArgsPassedByValToAllocas();
1072 void processStaticAllocas();
1073 void processDynamicAllocas();
1075 void createDynamicAllocasInitStorage();
1093 void unpoisonDynamicAllocasBeforeInst(
Instruction *InstBefore,
1094 Value *SavedStack) {
1101 if (!isa<ReturnInst>(InstBefore)) {
1103 InstBefore->
getModule(), Intrinsic::get_dynamic_area_offset,
1112 RTCI.createRuntimeCall(
1113 IRB, AsanAllocasUnpoisonFunc,
1114 {IRB.
CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1118 void unpoisonDynamicAllocas() {
1120 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1122 for (
Instruction *StackRestoreInst : StackRestoreVec)
1123 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1124 StackRestoreInst->getOperand(0));
1137 void handleDynamicAllocaCall(
AllocaInst *AI);
1143 const auto *STy = dyn_cast<StructType>(AllocaType);
1144 if (!ASan.isInterestingAlloca(AI) || isa<ScalableVectorType>(AllocaType) ||
1145 (STy && STy->containsHomogeneousScalableVectorTypes())) {
1149 if (AllocaVec.
empty())
1167 if (
ID == Intrinsic::stackrestore) StackRestoreVec.
push_back(&
II);
1168 if (
ID == Intrinsic::localescape) LocalEscapeCall = &
II;
1169 if (!ASan.UseAfterScope)
1171 if (!
II.isLifetimeStartOrEnd())
1174 auto *
Size = cast<ConstantInt>(
II.getArgOperand(0));
1176 if (
Size->isMinusOne())
return;
1179 const uint64_t SizeValue =
Size->getValue().getLimitedValue();
1180 if (SizeValue == ~0ULL ||
1188 HasUntracedLifetimeIntrinsic =
true;
1192 if (!ASan.isInterestingAlloca(*AI))
1194 bool DoPoison = (
ID == Intrinsic::lifetime_end);
1195 AllocaPoisonCall APC = {&
II, AI, SizeValue, DoPoison};
1197 StaticAllocaPoisonCallVec.
push_back(APC);
1199 DynamicAllocaPoisonCallVec.
push_back(APC);
1203 if (
CallInst *CI = dyn_cast<CallInst>(&CB)) {
1204 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1205 HasReturnsTwiceCall |= CI->canReturnTwice();
1210 void initializeCallbacks(
Module &M);
1237 OS, MapClassName2PassName);
1249 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1250 ConstructorKind(ConstructorKind) {}
1259 ModuleAddressSanitizer ModuleSanitizer(
1261 UseGlobalGC, UseOdrIndicator, DestructorKind, ConstructorKind);
1267 AddressSanitizer FunctionSanitizer(
1272 Modified |= FunctionSanitizer.instrumentFunction(
F, &TLI);
1274 Modified |= ModuleSanitizer.instrumentModule(M);
1295 if (
G->getName().starts_with(
"llvm.") ||
1297 G->getName().starts_with(
"__llvm_gcov_ctr") ||
1299 G->getName().starts_with(
"__llvm_rtti_proxy"))
1312 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1314 if (AddrSpace == 3 || AddrSpace == 5)
1321 Shadow = IRB.
CreateLShr(Shadow, Mapping.Scale);
1322 if (Mapping.Offset == 0)
return Shadow;
1325 if (LocalDynamicShadow)
1326 ShadowBase = LocalDynamicShadow;
1328 ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
1329 if (Mapping.OrShadowOffset)
1330 return IRB.
CreateOr(Shadow, ShadowBase);
1332 return IRB.
CreateAdd(Shadow, ShadowBase);
1337 RuntimeCallInserter &RTCI) {
1339 if (isa<MemTransferInst>(
MI)) {
1340 RTCI.createRuntimeCall(
1341 IRB, isa<MemMoveInst>(
MI) ? AsanMemmove : AsanMemcpy,
1345 }
else if (isa<MemSetInst>(
MI)) {
1346 RTCI.createRuntimeCall(
1352 MI->eraseFromParent();
1356bool AddressSanitizer::isInterestingAlloca(
const AllocaInst &AI) {
1357 auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
1359 if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
1360 return PreviouslySeenAllocaInfo->getSecond();
1362 bool IsInteresting =
1375 !(SSGI && SSGI->
isSafe(AI)));
1377 ProcessedAllocas[&AI] = IsInteresting;
1378 return IsInteresting;
1383 Type *PtrTy = cast<PointerType>(
Ptr->getType()->getScalarType());
1392 if (
Ptr->isSwiftError())
1398 if (
auto AI = dyn_cast_or_null<AllocaInst>(
Ptr))
1409void AddressSanitizer::getInterestingMemoryOperands(
1412 if (LocalDynamicShadow ==
I)
1415 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1418 Interesting.
emplace_back(
I, LI->getPointerOperandIndex(),
false,
1419 LI->getType(), LI->getAlign());
1420 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(
I)) {
1424 SI->getValueOperand()->getType(),
SI->getAlign());
1428 Interesting.
emplace_back(
I, RMW->getPointerOperandIndex(),
true,
1429 RMW->getValOperand()->getType(), std::nullopt);
1433 Interesting.
emplace_back(
I, XCHG->getPointerOperandIndex(),
true,
1434 XCHG->getCompareOperand()->getType(),
1436 }
else if (
auto CI = dyn_cast<CallInst>(
I)) {
1437 switch (CI->getIntrinsicID()) {
1438 case Intrinsic::masked_load:
1439 case Intrinsic::masked_store:
1440 case Intrinsic::masked_gather:
1441 case Intrinsic::masked_scatter: {
1442 bool IsWrite = CI->getType()->isVoidTy();
1444 unsigned OpOffset = IsWrite ? 1 : 0;
1448 auto BasePtr = CI->getOperand(OpOffset);
1449 if (ignoreAccess(
I, BasePtr))
1451 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1454 if (
auto *
Op = dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
1455 Alignment =
Op->getMaybeAlignValue();
1456 Value *
Mask = CI->getOperand(2 + OpOffset);
1457 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, Mask);
1460 case Intrinsic::masked_expandload:
1461 case Intrinsic::masked_compressstore: {
1462 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_compressstore;
1463 unsigned OpOffset = IsWrite ? 1 : 0;
1466 auto BasePtr = CI->getOperand(OpOffset);
1467 if (ignoreAccess(
I, BasePtr))
1470 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1473 Value *
Mask = CI->getOperand(1 + OpOffset);
1476 Value *ExtMask =
IB.CreateZExt(Mask, ExtTy);
1477 Value *EVL =
IB.CreateAddReduce(ExtMask);
1478 Value *TrueMask = ConstantInt::get(
Mask->getType(), 1);
1479 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, TrueMask,
1483 case Intrinsic::vp_load:
1484 case Intrinsic::vp_store:
1485 case Intrinsic::experimental_vp_strided_load:
1486 case Intrinsic::experimental_vp_strided_store: {
1487 auto *VPI = cast<VPIntrinsic>(CI);
1488 unsigned IID = CI->getIntrinsicID();
1489 bool IsWrite = CI->getType()->isVoidTy();
1492 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1493 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1494 MaybeAlign Alignment = VPI->getOperand(PtrOpNo)->getPointerAlignment(*
DL);
1495 Value *Stride =
nullptr;
1496 if (IID == Intrinsic::experimental_vp_strided_store ||
1497 IID == Intrinsic::experimental_vp_strided_load) {
1498 Stride = VPI->getOperand(PtrOpNo + 1);
1503 if (!isa<ConstantInt>(Stride) ||
1504 cast<ConstantInt>(Stride)->getZExtValue() % PointerAlign != 0)
1505 Alignment =
Align(1);
1507 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1508 VPI->getMaskParam(), VPI->getVectorLengthParam(),
1512 case Intrinsic::vp_gather:
1513 case Intrinsic::vp_scatter: {
1514 auto *VPI = cast<VPIntrinsic>(CI);
1515 unsigned IID = CI->getIntrinsicID();
1516 bool IsWrite = IID == Intrinsic::vp_scatter;
1519 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1520 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1521 MaybeAlign Alignment = VPI->getPointerAlignment();
1522 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1523 VPI->getMaskParam(),
1524 VPI->getVectorLengthParam());
1528 for (
unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1530 ignoreAccess(
I, CI->getArgOperand(ArgNo)))
1532 Type *Ty = CI->getParamByValType(ArgNo);
1540 return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
1547 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(
I)) {
1548 if (!Cmp->isRelational())
1562 if (BO->getOpcode() != Instruction::Sub)
1575 if (!
G->hasInitializer())
1578 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().IsDynInit)
1584void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1587 FunctionCallee F = isa<ICmpInst>(
I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
1588 Value *
Param[2] = {
I->getOperand(0),
I->getOperand(1)};
1589 for (
Value *&i : Param) {
1590 if (i->getType()->isPointerTy())
1593 RTCI.createRuntimeCall(IRB,
F, Param);
1599 TypeSize TypeStoreSize,
bool IsWrite,
1600 Value *SizeArgument,
bool UseCalls,
1601 uint32_t Exp, RuntimeCallInserter &RTCI) {
1606 switch (FixedSize) {
1612 if (!Alignment || *Alignment >= Granularity ||
1613 *Alignment >= FixedSize / 8)
1614 return Pass->instrumentAddress(
I, InsertBefore,
Addr, Alignment,
1615 FixedSize, IsWrite,
nullptr, UseCalls,
1619 Pass->instrumentUnusualSizeOrAlignment(
I, InsertBefore,
Addr, TypeStoreSize,
1620 IsWrite,
nullptr, UseCalls, Exp, RTCI);
1623void AddressSanitizer::instrumentMaskedLoadOrStore(
1626 MaybeAlign Alignment,
unsigned Granularity,
Type *OpType,
bool IsWrite,
1628 RuntimeCallInserter &RTCI) {
1629 auto *VTy = cast<VectorType>(OpType);
1630 TypeSize ElemTypeSize =
DL.getTypeStoreSizeInBits(VTy->getScalarType());
1631 auto Zero = ConstantInt::get(IntptrTy, 0);
1639 Value *IsEVLZero =
IB.CreateICmpNE(EVL, ConstantInt::get(EVLType, 0));
1641 IB.SetInsertPoint(LoopInsertBefore);
1643 EVL =
IB.CreateZExtOrTrunc(EVL, IntptrTy);
1646 Value *
EC =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1647 EVL =
IB.CreateBinaryIntrinsic(Intrinsic::umin, EVL, EC);
1649 EVL =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1654 Stride =
IB.CreateZExtOrTrunc(Stride, IntptrTy);
1659 if (
auto *MaskElemC = dyn_cast<ConstantInt>(MaskElem)) {
1660 if (MaskElemC->isZero())
1671 Value *InstrumentedAddress;
1672 if (isa<VectorType>(
Addr->getType())) {
1674 cast<VectorType>(
Addr->getType())->getElementType()->isPointerTy() &&
1675 "Expected vector of pointer.");
1677 }
else if (Stride) {
1684 Alignment, Granularity, ElemTypeSize, IsWrite,
1685 SizeArgument, UseCalls, Exp, RTCI);
1692 RuntimeCallInserter &RTCI) {
1713 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1714 NumOptimizedAccessesToGlobalVar++;
1722 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1723 NumOptimizedAccessesToStackVar++;
1729 NumInstrumentedWrites++;
1731 NumInstrumentedReads++;
1733 unsigned Granularity = 1 << Mapping.Scale;
1735 instrumentMaskedLoadOrStore(
this,
DL, IntptrTy,
O.MaybeMask,
O.MaybeEVL,
1736 O.MaybeStride,
O.getInsn(),
Addr,
O.Alignment,
1737 Granularity,
O.OpType,
O.IsWrite,
nullptr,
1738 UseCalls, Exp, RTCI);
1741 Granularity,
O.TypeStoreSize,
O.IsWrite,
nullptr,
1742 UseCalls, Exp, RTCI);
1748 size_t AccessSizeIndex,
1749 Value *SizeArgument,
1751 RuntimeCallInserter &RTCI) {
1757 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][0],
1758 {
Addr, SizeArgument});
1760 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][1],
1761 {
Addr, SizeArgument, ExpVal});
1764 Call = RTCI.createRuntimeCall(
1765 IRB, AsanErrorCallback[IsWrite][0][AccessSizeIndex],
Addr);
1767 Call = RTCI.createRuntimeCall(
1768 IRB, AsanErrorCallback[IsWrite][1][AccessSizeIndex], {
Addr, ExpVal});
1771 Call->setCannotMerge();
1778 size_t Granularity =
static_cast<size_t>(1) << Mapping.Scale;
1780 Value *LastAccessedByte =
1781 IRB.
CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
1783 if (TypeStoreSize / 8 > 1)
1785 LastAccessedByte, ConstantInt::get(IntptrTy, TypeStoreSize / 8 - 1));
1793Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1795 uint32_t TypeStoreSize,
bool IsWrite,
Value *SizeArgument) {
1799 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1802 return InsertBefore;
1807 Value *IsSharedOrPrivate = IRB.
CreateOr(IsShared, IsPrivate);
1809 Value *AddrSpaceZeroLanding =
1811 InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
1812 return InsertBefore;
1828 Trm->getParent()->setName(
"asan.report");
1839void AddressSanitizer::instrumentAddress(
Instruction *OrigIns,
1842 uint32_t TypeStoreSize,
bool IsWrite,
1843 Value *SizeArgument,
bool UseCalls,
1845 RuntimeCallInserter &RTCI) {
1846 if (TargetTriple.isAMDGPU()) {
1847 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore,
Addr,
1848 TypeStoreSize, IsWrite, SizeArgument);
1857 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1862 ConstantInt::get(Int32Ty, AccessInfo.Packed)});
1869 RTCI.createRuntimeCall(
1870 IRB, AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex], AddrLong);
1872 RTCI.createRuntimeCall(
1873 IRB, AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1874 {AddrLong, ConstantInt::get(IRB.
getInt32Ty(), Exp)});
1881 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1883 std::max<uint64_t>(Alignment.
valueOrOne().
value() >> Mapping.Scale, 1);
1888 size_t Granularity = 1ULL << Mapping.Scale;
1891 bool GenSlowPath = (
ClAlwaysSlowPath || (TypeStoreSize < 8 * Granularity));
1893 if (TargetTriple.isAMDGCN()) {
1895 auto *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1898 CrashTerm = genAMDGPUReportBlock(IRB, Cmp, Recover);
1899 }
else if (GenSlowPath) {
1904 assert(cast<BranchInst>(CheckTerm)->isUnconditional());
1907 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1922 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument, Exp, RTCI);
1931void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1933 TypeSize TypeStoreSize,
bool IsWrite,
Value *SizeArgument,
bool UseCalls,
1934 uint32_t Exp, RuntimeCallInserter &RTCI) {
1942 RTCI.createRuntimeCall(IRB, AsanMemoryAccessCallbackSized[IsWrite][0],
1945 RTCI.createRuntimeCall(
1946 IRB, AsanMemoryAccessCallbackSized[IsWrite][1],
1960void ModuleAddressSanitizer::poisonOneInitializer(
Function &GlobalInit,
1968 IRB.
CreateCall(AsanPoisonGlobals, ModuleNameAddr);
1971 for (
auto &BB : GlobalInit)
1976void ModuleAddressSanitizer::createInitializerPoisonCalls(
1987 if (isa<ConstantAggregateZero>(
OP))
continue;
1993 auto *Priority = cast<ConstantInt>(CS->
getOperand(0));
2003ModuleAddressSanitizer::getExcludedAliasedGlobal(
const GlobalAlias &GA)
const {
2008 assert(CompileKernel &&
"Only expecting to be called when compiling kernel");
2015 return dyn_cast<GlobalVariable>(
C->stripPointerCastsAndAliases());
2020bool ModuleAddressSanitizer::shouldInstrumentGlobal(
GlobalVariable *
G)
const {
2021 Type *Ty =
G->getValueType();
2024 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().NoAddress)
2026 if (!Ty->
isSized())
return false;
2027 if (!
G->hasInitializer())
return false;
2029 if (
G->getAddressSpace() &&
2036 if (
G->isThreadLocal())
return false;
2038 if (
G->getAlign() && *
G->getAlign() > getMinRedzoneSizeForGlobal())
return false;
2044 if (!TargetTriple.isOSBinFormatCOFF()) {
2045 if (!
G->hasExactDefinition() ||
G->hasComdat())
2049 if (
G->isInterposable())
2053 if (
G->hasAvailableExternallyLinkage())
2060 switch (
C->getSelectionKind()) {
2071 if (
G->hasSection()) {
2081 if (Section ==
"llvm.metadata")
return false;
2088 if (
Section.starts_with(
".preinit_array") ||
2089 Section.starts_with(
".init_array") ||
2090 Section.starts_with(
".fini_array")) {
2096 if (TargetTriple.isOSBinFormatELF()) {
2098 [](
char c) {
return llvm::isAlnum(c) || c ==
'_'; }))
2110 if (TargetTriple.isOSBinFormatCOFF() &&
Section.contains(
'$')) {
2111 LLVM_DEBUG(
dbgs() <<
"Ignoring global in sorted section (contains '$'): "
2116 if (TargetTriple.isOSBinFormatMachO()) {
2118 unsigned TAA = 0, StubSize = 0;
2121 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
2126 if (ParsedSegment ==
"__OBJC" ||
2127 (ParsedSegment ==
"__DATA" && ParsedSection.
starts_with(
"__objc_"))) {
2139 if (ParsedSegment ==
"__DATA" && ParsedSection ==
"__cfstring") {
2152 if (CompileKernel) {
2155 if (
G->getName().starts_with(
"__"))
2165bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection()
const {
2166 if (!TargetTriple.isOSBinFormatMachO())
2169 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
2171 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
2173 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
2175 if (TargetTriple.isDriverKit())
2177 if (TargetTriple.isXROS())
2183StringRef ModuleAddressSanitizer::getGlobalMetadataSection()
const {
2184 switch (TargetTriple.getObjectFormat()) {
2194 "ModuleAddressSanitizer not implemented for object file format");
2201void ModuleAddressSanitizer::initializeCallbacks(
Module &M) {
2207 AsanUnpoisonGlobals =
2211 AsanRegisterGlobals =
M.getOrInsertFunction(
2213 AsanUnregisterGlobals =
M.getOrInsertFunction(
2218 AsanRegisterImageGlobals =
M.getOrInsertFunction(
2220 AsanUnregisterImageGlobals =
M.getOrInsertFunction(
2223 AsanRegisterElfGlobals =
2225 IntptrTy, IntptrTy, IntptrTy);
2226 AsanUnregisterElfGlobals =
2228 IntptrTy, IntptrTy, IntptrTy);
2233void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
2238 if (!
G->hasName()) {
2245 if (!InternalSuffix.
empty() &&
G->hasLocalLinkage()) {
2246 std::string
Name = std::string(
G->getName());
2247 Name += InternalSuffix;
2248 C =
M.getOrInsertComdat(
Name);
2250 C =
M.getOrInsertComdat(
G->getName());
2256 if (TargetTriple.isOSBinFormatCOFF()) {
2258 if (
G->hasPrivateLinkage())
2271ModuleAddressSanitizer::CreateMetadataGlobal(
Module &M,
Constant *Initializer,
2273 auto Linkage = TargetTriple.isOSBinFormatMachO()
2277 M, Initializer->
getType(),
false, Linkage, Initializer,
2279 Metadata->setSection(getGlobalMetadataSection());
2290 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2298void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2302 auto &
DL =
M.getDataLayout();
2305 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2306 Constant *Initializer = MetadataInitializers[i];
2309 CreateMetadataGlobal(M, Initializer,
G->getName());
2311 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2317 unsigned SizeOfGlobalStruct =
DL.getTypeAllocSize(Initializer->
getType());
2319 "global metadata will not be padded appropriately");
2322 SetComdatForGlobalMetadata(
G,
Metadata,
"");
2327 if (!MetadataGlobals.empty())
2331void ModuleAddressSanitizer::instrumentGlobalsELF(
2334 const std::string &UniqueModuleId) {
2341 bool UseComdatForGlobalsGC = UseOdrIndicator && !UniqueModuleId.empty();
2344 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2347 CreateMetadataGlobal(M, MetadataInitializers[i],
G->getName());
2349 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2352 if (UseComdatForGlobalsGC)
2353 SetComdatForGlobalMetadata(
G,
Metadata, UniqueModuleId);
2358 if (!MetadataGlobals.empty())
2375 "__start_" + getGlobalMetadataSection());
2379 "__stop_" + getGlobalMetadataSection());
2393 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2400void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2411 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2412 Constant *Initializer = MetadataInitializers[i];
2415 CreateMetadataGlobal(M, Initializer,
G->getName());
2419 auto LivenessBinder =
2424 Twine(
"__asan_binder_") +
G->getName());
2425 Liveness->
setSection(
"__DATA,__asan_liveness,regular,live_support");
2426 LivenessGlobals[i] = Liveness;
2433 if (!LivenessGlobals.empty())
2455 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2460void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2464 unsigned N = ExtendedGlobals.
size();
2474 if (Mapping.Scale > 3)
2475 AllGlobals->setAlignment(
Align(1ULL << Mapping.Scale));
2480 ConstantInt::get(IntptrTy,
N)});
2486 IrbDtor.CreateCall(AsanUnregisterGlobals,
2488 ConstantInt::get(IntptrTy,
N)});
2502 if (CompileKernel) {
2503 for (
auto &GA :
M.aliases()) {
2505 AliasedGlobalExclusions.
insert(GV);
2510 for (
auto &
G :
M.globals()) {
2511 if (!AliasedGlobalExclusions.
count(&
G) && shouldInstrumentGlobal(&
G))
2515 size_t n = GlobalsToChange.
size();
2516 auto &
DL =
M.getDataLayout();
2530 IntptrTy, IntptrTy, IntptrTy);
2534 bool HasDynamicallyInitializedGlobals =
false;
2544 for (
size_t i = 0; i < n; i++) {
2548 if (
G->hasSanitizerMetadata())
2549 MD =
G->getSanitizerMetadata();
2554 std::string NameForGlobal =
G->getName().str();
2559 Type *Ty =
G->getValueType();
2560 const uint64_t SizeInBytes =
DL.getTypeAllocSize(Ty);
2573 M, NewTy,
G->isConstant(), Linkage, NewInitializer,
"",
G,
2574 G->getThreadLocalMode(),
G->getAddressSpace());
2584 if (TargetTriple.isOSBinFormatMachO() && !
G->hasSection() &&
2586 auto Seq = dyn_cast<ConstantDataSequential>(
G->getInitializer());
2587 if (Seq && Seq->isCString())
2588 NewGlobal->
setSection(
"__TEXT,__asan_cstring,regular");
2599 G->replaceAllUsesWith(
2602 G->eraseFromParent();
2603 NewGlobals[i] = NewGlobal;
2608 bool CanUsePrivateAliases =
2609 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2610 TargetTriple.isOSBinFormatWasm();
2611 if (CanUsePrivateAliases && UsePrivateAlias) {
2614 InstrumentedGlobal =
2622 }
else if (UseOdrIndicator) {
2625 auto *ODRIndicatorSym =
2634 ODRIndicatorSym->setAlignment(
Align(1));
2635 ODRIndicator = ODRIndicatorSym;
2641 ConstantInt::get(IntptrTy, SizeInBytes),
2642 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
2645 ConstantInt::get(IntptrTy, MD.
IsDynInit),
2650 HasDynamicallyInitializedGlobals =
true;
2654 Initializers[i] = Initializer;
2660 for (
size_t i = 0; i < n; i++) {
2662 if (
G->getName().empty())
continue;
2667 if (UseGlobalsGC && TargetTriple.isOSBinFormatELF()) {
2673 instrumentGlobalsELF(IRB, M, NewGlobals, Initializers,
2675 }
else if (n == 0) {
2678 *CtorComdat = TargetTriple.isOSBinFormatELF();
2680 *CtorComdat =
false;
2681 if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2682 InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
2683 }
else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2684 InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
2686 InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
2691 if (HasDynamicallyInitializedGlobals)
2698ModuleAddressSanitizer::getRedzoneSizeForGlobal(
uint64_t SizeInBytes)
const {
2699 constexpr uint64_t kMaxRZ = 1 << 18;
2700 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2703 if (SizeInBytes <= MinRZ / 2) {
2707 RZ = MinRZ - SizeInBytes;
2710 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2713 if (SizeInBytes % MinRZ)
2714 RZ += MinRZ - (SizeInBytes % MinRZ);
2717 assert((RZ + SizeInBytes) % MinRZ == 0);
2722int ModuleAddressSanitizer::GetAsanVersion(
const Module &M)
const {
2723 int LongSize =
M.getDataLayout().getPointerSizeInBits();
2728 Version += (LongSize == 32 && isAndroid);
2732bool ModuleAddressSanitizer::instrumentModule(
Module &M) {
2733 initializeCallbacks(M);
2738 if (CompileKernel) {
2743 std::string AsanVersion = std::to_string(GetAsanVersion(M));
2744 std::string VersionCheckName =
2746 std::tie(AsanCtorFunction, std::ignore) =
2749 {}, VersionCheckName);
2753 bool CtorComdat =
true;
2756 if (AsanCtorFunction) {
2757 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2758 instrumentGlobals(IRB, M, &CtorComdat);
2761 instrumentGlobals(IRB, M, &CtorComdat);
2770 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2771 if (AsanCtorFunction) {
2775 if (AsanDtorFunction) {
2780 if (AsanCtorFunction)
2782 if (AsanDtorFunction)
2793 for (
int Exp = 0;
Exp < 2;
Exp++) {
2794 for (
size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2795 const std::string TypeStr = AccessIsWrite ?
"store" :
"load";
2796 const std::string ExpStr =
Exp ?
"exp_" :
"";
2797 const std::string EndingStr = Recover ?
"_noabort" :
"";
2806 Args1.push_back(ExpType);
2807 if (
auto AK = TLI->getExtAttrForI32Param(
false)) {
2812 AsanErrorCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2816 AsanMemoryAccessCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2821 AccessSizeIndex++) {
2822 const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
2823 AsanErrorCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2824 M.getOrInsertFunction(
2828 AsanMemoryAccessCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2829 M.getOrInsertFunction(
2836 const std::string MemIntrinCallbackPrefix =
2840 AsanMemmove =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memmove",
2841 PtrTy, PtrTy, PtrTy, IntptrTy);
2842 AsanMemcpy =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memcpy", PtrTy,
2843 PtrTy, PtrTy, IntptrTy);
2844 AsanMemset =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memset",
2848 AsanHandleNoReturnFunc =
2851 AsanPtrCmpFunction =
2853 AsanPtrSubFunction =
2855 if (Mapping.InGlobal)
2856 AsanShadowGlobal =
M.getOrInsertGlobal(
"__asan_shadow",
2859 AMDGPUAddressShared =
2861 AMDGPUAddressPrivate =
2865bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(
Function &
F) {
2873 if (
F.getName().contains(
" load]")) {
2883bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(
Function &
F) {
2889 if (Mapping.InGlobal) {
2897 LocalDynamicShadow =
2898 IRB.
CreateCall(Asm, {AsanShadowGlobal},
".asan.shadow");
2900 LocalDynamicShadow =
2904 Value *GlobalDynamicAddress =
F.getParent()->getOrInsertGlobal(
2906 LocalDynamicShadow = IRB.
CreateLoad(IntptrTy, GlobalDynamicAddress);
2911void AddressSanitizer::markEscapedLocalAllocas(
Function &
F) {
2916 assert(ProcessedAllocas.empty() &&
"must process localescape before allocas");
2920 if (!
F.getParent()->getFunction(
"llvm.localescape"))
return;
2926 if (
II &&
II->getIntrinsicID() == Intrinsic::localescape) {
2928 for (
Value *Arg :
II->args()) {
2929 AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
2931 "non-static alloca arg to localescape");
2932 ProcessedAllocas[AI] =
false;
2939bool AddressSanitizer::suppressInstrumentationSiteForDebug(
int &Instrumented) {
2940 bool ShouldInstrument =
2944 return !ShouldInstrument;
2947bool AddressSanitizer::instrumentFunction(
Function &
F,
2953 if (
F.getName().starts_with(
"__asan_"))
return false;
2954 if (
F.isPresplitCoroutine())
2957 bool FunctionModified =
false;
2962 if (maybeInsertAsanInitAtFunctionEntry(
F))
2963 FunctionModified =
true;
2966 if (!
F.hasFnAttribute(Attribute::SanitizeAddress))
return FunctionModified;
2968 if (
F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
2969 return FunctionModified;
2973 initializeCallbacks(*
F.getParent(), TLI);
2975 FunctionStateRAII CleanupObj(
this);
2977 RuntimeCallInserter RTCI(
F);
2979 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(
F);
2983 markEscapedLocalAllocas(
F);
2995 for (
auto &BB :
F) {
2997 TempsToInstrument.
clear();
2998 int NumInsnsPerBB = 0;
2999 for (
auto &Inst : BB) {
3000 if (LooksLikeCodeInBug11395(&Inst))
return false;
3007 if (!InterestingOperands.
empty()) {
3008 for (
auto &Operand : InterestingOperands) {
3014 if (Operand.MaybeMask) {
3018 if (!TempsToInstrument.
insert(
Ptr).second)
3022 OperandsToInstrument.
push_back(Operand);
3029 PointerComparisonsOrSubtracts.
push_back(&Inst);
3035 if (
auto *CB = dyn_cast<CallBase>(&Inst)) {
3037 TempsToInstrument.
clear();
3041 if (
CallInst *CI = dyn_cast<CallInst>(&Inst))
3048 bool UseCalls = (InstrumentationWithCallsThreshold >= 0 &&
3049 OperandsToInstrument.
size() + IntrinToInstrument.
size() >
3050 (
unsigned)InstrumentationWithCallsThreshold);
3057 int NumInstrumented = 0;
3058 for (
auto &Operand : OperandsToInstrument) {
3059 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3060 instrumentMop(ObjSizeVis, Operand, UseCalls,
3061 F.getDataLayout(), RTCI);
3062 FunctionModified =
true;
3064 for (
auto *Inst : IntrinToInstrument) {
3065 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3066 instrumentMemIntrinsic(Inst, RTCI);
3067 FunctionModified =
true;
3070 FunctionStackPoisoner FSP(
F, *
this, RTCI);
3071 bool ChangedStack = FSP.runOnFunction();
3075 for (
auto *CI : NoReturnCalls) {
3077 RTCI.createRuntimeCall(IRB, AsanHandleNoReturnFunc, {});
3080 for (
auto *Inst : PointerComparisonsOrSubtracts) {
3081 instrumentPointerComparisonOrSubtraction(Inst, RTCI);
3082 FunctionModified =
true;
3085 if (ChangedStack || !NoReturnCalls.empty())
3086 FunctionModified =
true;
3088 LLVM_DEBUG(
dbgs() <<
"ASAN done instrumenting: " << FunctionModified <<
" "
3091 return FunctionModified;
3097bool AddressSanitizer::LooksLikeCodeInBug11395(
Instruction *
I) {
3098 if (LongSize != 32)
return false;
3107void FunctionStackPoisoner::initializeCallbacks(
Module &M) {
3111 const char *MallocNameTemplate =
3116 std::string Suffix = itostr(
Index);
3117 AsanStackMallocFunc[
Index] =
M.getOrInsertFunction(
3118 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
3119 AsanStackFreeFunc[
Index] =
3124 if (ASan.UseAfterScope) {
3125 AsanPoisonStackMemoryFunc =
M.getOrInsertFunction(
3127 AsanUnpoisonStackMemoryFunc =
M.getOrInsertFunction(
3131 for (
size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
3132 0xf3, 0xf5, 0xf8}) {
3133 std::ostringstream
Name;
3135 Name << std::setw(2) << std::setfill(
'0') << std::hex << Val;
3136 AsanSetShadowFunc[Val] =
3137 M.getOrInsertFunction(
Name.str(), IRB.
getVoidTy(), IntptrTy, IntptrTy);
3140 AsanAllocaPoisonFunc =
M.getOrInsertFunction(
3142 AsanAllocasUnpoisonFunc =
M.getOrInsertFunction(
3148 size_t Begin,
size_t End,
3150 Value *ShadowBase) {
3154 const size_t LargestStoreSizeInBytes =
3155 std::min<size_t>(
sizeof(
uint64_t), ASan.LongSize / 8);
3157 const bool IsLittleEndian =
F.getDataLayout().isLittleEndian();
3163 for (
size_t i = Begin; i <
End;) {
3164 if (!ShadowMask[i]) {
3170 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
3172 while (StoreSizeInBytes >
End - i)
3173 StoreSizeInBytes /= 2;
3176 for (
size_t j = StoreSizeInBytes - 1;
j && !ShadowMask[i +
j]; --
j) {
3177 while (j <= StoreSizeInBytes / 2)
3178 StoreSizeInBytes /= 2;
3182 for (
size_t j = 0;
j < StoreSizeInBytes;
j++) {
3184 Val |= (
uint64_t)ShadowBytes[i + j] << (8 * j);
3186 Val = (Val << 8) | ShadowBytes[i + j];
3195 i += StoreSizeInBytes;
3202 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.
size(), IRB, ShadowBase);
3207 size_t Begin,
size_t End,
3210 size_t Done = Begin;
3211 for (
size_t i = Begin, j = Begin + 1; i <
End; i =
j++) {
3212 if (!ShadowMask[i]) {
3216 uint8_t Val = ShadowBytes[i];
3217 if (!AsanSetShadowFunc[Val])
3221 for (;
j <
End && ShadowMask[
j] && Val == ShadowBytes[
j]; ++
j) {
3224 if (j - i >= ASan.MaxInlinePoisoningSize) {
3225 copyToShadowInline(ShadowMask, ShadowBytes,
Done, i, IRB, ShadowBase);
3226 RTCI.createRuntimeCall(
3227 IRB, AsanSetShadowFunc[Val],
3228 {IRB.
CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
3229 ConstantInt::get(IntptrTy, j - i)});
3234 copyToShadowInline(ShadowMask, ShadowBytes,
Done,
End, IRB, ShadowBase);
3242 for (
int i = 0;; i++, MaxSize *= 2)
3243 if (LocalStackSize <= MaxSize)
return i;
3247void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
3249 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
3257 if (Arg.hasByValAttr()) {
3258 Type *Ty = Arg.getParamByValType();
3259 const Align Alignment =
3260 DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
3264 (Arg.hasName() ? Arg.getName() :
"Arg" +
Twine(Arg.getArgNo())) +
3267 Arg.replaceAllUsesWith(AI);
3269 uint64_t AllocSize =
DL.getTypeAllocSize(Ty);
3270 IRB.
CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
3278 Value *ValueIfFalse) {
3281 PHI->addIncoming(ValueIfFalse, CondBlock);
3283 PHI->addIncoming(ValueIfTrue, ThenBlock);
3287Value *FunctionStackPoisoner::createAllocaForLayout(
3296 nullptr,
"MyAlloca");
3305void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3308 DynamicAllocaLayout = IRB.
CreateAlloca(IntptrTy,
nullptr);
3313void FunctionStackPoisoner::processDynamicAllocas() {
3320 for (
const auto &APC : DynamicAllocaPoisonCallVec) {
3323 assert(ASan.isInterestingAlloca(*APC.AI));
3324 assert(!APC.AI->isStaticAlloca());
3327 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3334 createDynamicAllocasInitStorage();
3335 for (
auto &AI : DynamicAllocaVec)
3336 handleDynamicAllocaCall(AI);
3337 unpoisonDynamicAllocas();
3349 for (
Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
3359 if (isa<AllocaInst>(It) || isa<CastInst>(It))
3361 if (
auto *Store = dyn_cast<StoreInst>(It)) {
3365 auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
3366 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3369 Value *Val = Store->getValueOperand();
3370 bool IsDirectArgInit = isa<Argument>(Val);
3371 bool IsArgInitViaCast =
3372 isa<CastInst>(Val) &&
3373 isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
3376 Val == It->getPrevNonDebugInstruction();
3377 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3381 if (IsArgInitViaCast)
3382 InitInsts.
push_back(cast<Instruction>(Val));
3393void FunctionStackPoisoner::processStaticAllocas() {
3394 if (AllocaVec.
empty()) {
3399 int StackMallocIdx = -1;
3401 if (
auto SP =
F.getSubprogram())
3402 EntryDebugLocation =
3411 auto InsBeforeB = InsBefore->
getParent();
3412 assert(InsBeforeB == &
F.getEntryBlock());
3413 for (
auto *AI : StaticAllocasToMoveUp)
3424 ArgInitInst->moveBefore(InsBefore);
3427 if (LocalEscapeCall) LocalEscapeCall->
moveBefore(InsBefore);
3433 ASan.getAllocaSizeInBytes(*AI),
3444 uint64_t Granularity = 1ULL << Mapping.Scale;
3445 uint64_t MinHeaderSize = std::max((
uint64_t)ASan.LongSize / 2, Granularity);
3451 for (
auto &
Desc : SVD)
3455 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3458 assert(ASan.isInterestingAlloca(*APC.AI));
3459 assert(APC.AI->isStaticAlloca());
3464 if (
const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3465 if (LifetimeLoc->getFile() == FnLoc->getFile())
3466 if (
unsigned Line = LifetimeLoc->getLine())
3467 Desc.Line = std::min(
Desc.Line ?
Desc.Line : Line, Line);
3473 LLVM_DEBUG(
dbgs() << DescriptionString <<
" --- " <<
L.FrameSize <<
"\n");
3475 bool DoStackMalloc =
3485 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3486 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3488 Value *StaticAlloca =
3489 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L,
false);
3492 Value *LocalStackBase;
3493 Value *LocalStackBaseAlloca;
3496 if (DoStackMalloc) {
3497 LocalStackBaseAlloca =
3498 IRB.
CreateAlloca(IntptrTy,
nullptr,
"asan_local_stack_base");
3505 Constant *OptionDetectUseAfterReturn =
F.getParent()->getOrInsertGlobal(
3515 Value *FakeStackValue =
3516 RTCI.createRuntimeCall(IRBIf, AsanStackMallocFunc[StackMallocIdx],
3517 ConstantInt::get(IntptrTy, LocalStackSize));
3519 FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
3520 ConstantInt::get(IntptrTy, 0));
3528 RTCI.createRuntimeCall(IRB, AsanStackMallocFunc[StackMallocIdx],
3529 ConstantInt::get(IntptrTy, LocalStackSize));
3531 Value *NoFakeStack =
3536 Value *AllocaValue =
3537 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L,
true) : StaticAlloca;
3540 LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
3541 IRB.
CreateStore(LocalStackBase, LocalStackBaseAlloca);
3546 FakeStack = ConstantInt::get(IntptrTy, 0);
3548 DoDynamicAlloca ? createAllocaForLayout(IRB, L,
true) : StaticAlloca;
3549 LocalStackBaseAlloca = LocalStackBase;
3555 Value *LocalStackBaseAllocaPtr =
3556 isa<PtrToIntInst>(LocalStackBaseAlloca)
3557 ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
3558 : LocalStackBaseAlloca;
3559 assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
3560 "Variable descriptions relative to ASan stack base will be dropped");
3563 for (
const auto &
Desc : SVD) {
3568 IRB.
CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy,
Desc.Offset)),
3581 ConstantInt::get(IntptrTy, ASan.LongSize / 8)),
3591 ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8)),
3598 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
3601 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3603 if (!StaticAllocaPoisonCallVec.empty()) {
3607 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3610 size_t Begin =
Desc.Offset /
L.Granularity;
3611 size_t End = Begin + (APC.Size +
L.Granularity - 1) /
L.Granularity;
3614 copyToShadow(ShadowAfterScope,
3615 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin,
End,
3629 if (DoStackMalloc) {
3630 assert(StackMallocIdx >= 0);
3647 if (ASan.MaxInlinePoisoningSize != 0 && StackMallocIdx <= 4) {
3649 ShadowAfterReturn.
resize(ClassSize /
L.Granularity,
3651 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3653 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
3655 ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
3656 Value *SavedFlagPtr = IRBPoison.CreateLoad(
3657 IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
3658 IRBPoison.CreateStore(
3660 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getPtrTy()));
3663 RTCI.createRuntimeCall(
3664 IRBPoison, AsanStackFreeFunc[StackMallocIdx],
3665 {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)});
3669 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3671 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3676 for (
auto *AI : AllocaVec)
3684 Value *SizeArg = ConstantInt::get(IntptrTy,
Size);
3685 RTCI.createRuntimeCall(
3686 IRB, DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3687 {AddrArg, SizeArg});
3698void FunctionStackPoisoner::handleDynamicAllocaCall(
AllocaInst *AI) {
3706 Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
3712 const unsigned ElementSize =
3716 ConstantInt::get(IntptrTy, ElementSize));
3744 ConstantInt::get(IntptrTy, Alignment.
value()));
3747 RTCI.createRuntimeCall(IRB, AsanAllocaPoisonFunc, {NewAddress, OldSize});
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp, RuntimeCallInserter &RTCI)
static const uint64_t kDefaultShadowScale
const char kAMDGPUUnreachableName[]
constexpr size_t kAccessSizeIndexMask
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
static const uint64_t kFreeBSD_ShadowOffset32
constexpr size_t kIsWriteShift
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
static bool isInterestingPointerSubtraction(Instruction *I)
const char kAMDGPUAddressSharedName[]
const char kAsanStackFreeNameTemplate[]
constexpr size_t kCompileKernelMask
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
const char kAsanStackMallocNameTemplate[]
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
static const uint64_t kEmscriptenShadowOffset
const char kAsanGenPrefix[]
constexpr size_t kIsWriteMask
static uint64_t getRedzoneSizeForScale(int MappingScale)
static const uint64_t kDefaultShadowOffset64
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
static const uint64_t kAsanCtorAndDtorPriority
const char kAsanUnpoisonGlobalsName[]
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
const char kAsanRegisterImageGlobalsName[]
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
const char kODRGenPrefix[]
static const uint64_t kSystemZ_ShadowOffset64
static const uint64_t kDefaultShadowOffset32
const char kAsanShadowMemoryDynamicAddress[]
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
const char kAsanStackMallocAlwaysNameTemplate[]
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
static const uint64_t kFreeBSDKasan_ShadowOffset64
const char kAsanModuleDtorName[]
static const uint64_t kDynamicShadowSentinel
static bool isInterestingPointerComparison(Instruction *I)
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
static const uint64_t kLinuxKasan_ShadowOffset64
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
const char kAsanUnpoisonStackMemoryName[]
static const uint64_t kLoongArch64_ShadowOffset64
const char kAsanRegisterGlobalsName[]
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
const char kAsanGlobalsRegisteredFlagName[]
static const size_t kMaxStackMallocSize
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
const char kAsanUnregisterImageGlobalsName[]
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
static const uint64_t kPPC64_ShadowOffset64
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kMIPS32_ShadowOffset32
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
static const uint64_t kFreeBSDAArch64_ShadowOffset64
static const uint64_t kSmallX86_64ShadowOffsetBase
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
static size_t TypeStoreSizeToSizeIndex(uint32_t TypeSize)
const char kAsanAllocaPoison[]
constexpr size_t kCompileKernelShift
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
const char kAMDGPUAddressPrivateName[]
static const uint64_t kNetBSDKasan_ShadowOffset64
const char kAMDGPUBallotName[]
const char kAsanRegisterElfGlobalsName[]
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(true), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanPoisonGlobalsName[]
const char kAsanHandleNoReturnName[]
static const size_t kMinStackMallocSize
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
static const uint64_t kAArch64_ShadowOffset64
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
This file contains the simple types necessary to represent the attributes associated with functions a...
static bool isPointerOperand(Value *I, User *U)
static const Function * getParent(const Value *V)
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
This defines the Use class.
Module.h This file contains the declarations for the Module class.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
uint64_t getZExtValue() const
Get zero extended value.
int64_t getSExtValue() const
Get sign extended value.
AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
AttributeList addParamAttribute(LLVMContext &C, unsigned ArgNo, Attribute::AttrKind Kind) const
Add an argument attribute to the list.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr if the function does no...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
static CallBase * addOperandBundle(CallBase *CB, uint32_t ID, OperandBundleDef OB, InsertPosition InsertPt=nullptr)
Create a clone of CB with operand bundle OB added.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, GEPNoWrapFlags NW=GEPNoWrapFlags::none(), std::optional< ConstantRange > InRange=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags and the LLVMContext applied.
bool hasPersonalityFn() const
Check whether this function has a personality function.
Constant * getPersonalityFn() const
Get the personality function associated with this function.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
const Constant * getAliasee() const
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalObject.
void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
void setComdat(Comdat *C)
void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
Module * getParent()
Get the module that this global value is contained inside of...
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
Common base class shared among various IRBuilders.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
Value * CreateExtractElement(Value *Vec, Value *Idx, const Twine &Name="")
LoadInst * CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align, const char *Name)
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
BasicBlock::iterator GetInsertPoint() const
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateTypeSize(Type *DstType, TypeSize Size)
Create an expression which evaluates to the number of units in Size at runtime.
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *TBAAStructTag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memcpy between the specified pointers.
Value * CreateAddrSpaceCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Base class for instruction visitors.
RetTy visitCallBase(CallBase &I)
RetTy visitCleanupReturnInst(CleanupReturnInst &I)
RetTy visitIntrinsicInst(IntrinsicInst &I)
void visit(Iterator Start, Iterator End)
RetTy visitReturnInst(ReturnInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitResumeInst(ResumeInst &I)
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
bool isEHPad() const
Return true if the instruction is a variety of EH-block.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
void emitError(uint64_t LocCookie, const Twine &ErrorStr)
emitError - Emit an error message to the currently installed error handler with optional location inf...
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
MDNode * createUnlikelyBranchWeights()
Return metadata containing two branch weights, with significant bias towards false destination.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value* statically.
SizeOffsetAPInt compute(Value *V)
A container for an operand bundle being viewed as a set of values rather than a set of uses.
Pass interface - Implemented by all 'passes'.
static PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void abandon()
Mark an analysis as abandoned.
Resume the propagation of an exception.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, InsertPosition InsertBefore=nullptr)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
constexpr bool empty() const
empty - Check if the string is empty.
constexpr const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Class to represent struct types.
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
TinyPtrVector - This class is specialized for cases where there are normally 0 or 1 element in a vect...
Triple - Helper class for working with autoconf configuration names.
bool isAndroidVersionLT(unsigned Major) const
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
bool isLoongArch64() const
Tests whether the target is 64-bit LoongArch.
EnvironmentType getEnvironment() const
Get the parsed environment type of this triple.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
static Type * getVoidTy(LLVMContext &C)
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt32Ty(LLVMContext &C)
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
static VectorType * get(Type *ElementType, ElementCount EC)
This static method is the primary way to construct an VectorType.
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable(