94#define DEBUG_TYPE "asan"
100 std::numeric_limits<uint64_t>::max();
141 "__asan_unregister_image_globals";
154 "__asan_stack_malloc_always_";
168 "__asan_option_detect_stack_use_after_return";
171 "__asan_shadow_memory_dynamic_address";
197 "asan-kernel",
cl::desc(
"Enable KernelAddressSanitizer instrumentation"),
202 cl::desc(
"Enable recovery mode (continue-after-error)."),
206 "asan-guard-against-version-mismatch",
212 cl::desc(
"instrument read instructions"),
216 "asan-instrument-writes",
cl::desc(
"instrument write instructions"),
225 "asan-instrument-atomics",
235 "asan-always-slow-path",
240 "asan-force-dynamic-shadow",
241 cl::desc(
"Load shadow address into a local variable for each function"),
246 cl::desc(
"Access dynamic shadow through an ifunc global on "
247 "platforms that support this"),
251 "asan-with-ifunc-suppress-remat",
252 cl::desc(
"Suppress rematerialization of dynamic shadow address by passing "
253 "it through inline asm in prologue."),
261 "asan-max-ins-per-bb",
cl::init(10000),
262 cl::desc(
"maximal number of instructions to instrument in any given BB"),
269 "asan-max-inline-poisoning-size",
271 "Inline shadow poisoning for blocks up to the given size in bytes."),
275 "asan-use-after-return",
276 cl::desc(
"Sets the mode of detection for stack-use-after-return."),
278 clEnumValN(AsanDetectStackUseAfterReturnMode::Never,
"never",
279 "Never detect stack use after return."),
281 AsanDetectStackUseAfterReturnMode::Runtime,
"runtime",
282 "Detect stack use after return if "
283 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
284 clEnumValN(AsanDetectStackUseAfterReturnMode::Always,
"always",
285 "Always detect stack use after return.")),
289 cl::desc(
"Create redzones for byval "
290 "arguments (extra copy "
295 cl::desc(
"Check stack-use-after-scope"),
304 cl::desc(
"Handle C++ initializer order"),
308 "asan-detect-invalid-pointer-pair",
313 "asan-detect-invalid-pointer-cmp",
318 "asan-detect-invalid-pointer-sub",
323 "asan-realign-stack",
324 cl::desc(
"Realign stack to the value of this flag (power of two)"),
328 "asan-instrumentation-with-call-threshold",
329 cl::desc(
"If the function being instrumented contains more than "
330 "this number of memory accesses, use callbacks instead of "
331 "inline checks (-1 means never use callbacks)."),
335 "asan-memory-access-callback-prefix",
340 "asan-kernel-mem-intrinsic-prefix",
346 cl::desc(
"instrument dynamic allocas"),
350 "asan-skip-promotable-allocas",
355 "asan-constructor-kind",
356 cl::desc(
"Sets the ASan constructor kind"),
359 "Use global constructors")),
366 cl::desc(
"scale of asan shadow mapping"),
371 cl::desc(
"offset of asan shadow mapping [EXPERIMENTAL]"),
385 "asan-opt-same-temp",
cl::desc(
"Instrument the same temp just once"),
389 cl::desc(
"Don't instrument scalar globals"),
393 "asan-opt-stack",
cl::desc(
"Don't instrument scalar stack variables"),
397 "asan-stack-dynamic-alloca",
402 "asan-force-experiment",
408 cl::desc(
"Use private aliases for global variables"),
413 cl::desc(
"Use odr indicators to improve ODR reporting"),
418 cl::desc(
"Use linker features to support dead "
419 "code stripping of globals"),
426 cl::desc(
"Place ASan constructors in comdat sections"),
430 "asan-destructor-kind",
431 cl::desc(
"Sets the ASan destructor kind. The default is to use the value "
432 "provided to the pass constructor"),
435 "Use global destructors")),
455STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
456STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
458 "Number of optimized accesses to global vars");
460 "Number of optimized accesses to stack vars");
469struct ShadowMapping {
480 bool IsAndroid = TargetTriple.
isAndroid();
483 bool IsMacOS = TargetTriple.
isMacOSX();
486 bool IsPS = TargetTriple.
isPS();
492 bool IsMIPSN32ABI = TargetTriple.
isABIN32();
493 bool IsMIPS32 = TargetTriple.
isMIPS32();
494 bool IsMIPS64 = TargetTriple.
isMIPS64();
495 bool IsArmOrThumb = TargetTriple.
isARM() || TargetTriple.
isThumb();
503 bool IsAMDGPU = TargetTriple.
isAMDGPU();
505 ShadowMapping Mapping;
512 if (LongSize == 32) {
515 else if (IsMIPSN32ABI)
527 else if (IsEmscripten)
540 else if (IsFreeBSD && IsAArch64)
542 else if (IsFreeBSD && !IsMIPS64) {
547 }
else if (IsNetBSD) {
554 else if (IsLinux && IsX86_64) {
560 }
else if (IsWindows && IsX86_64) {
566 else if (IsMacOS && IsAArch64)
570 else if (IsLoongArch64)
594 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
595 !IsRISCV64 && !IsLoongArch64 &&
596 !(Mapping.Offset & (Mapping.Offset - 1)) &&
598 bool IsAndroidWithIfuncSupport =
600 Mapping.InGlobal =
ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
608 int *MappingScale,
bool *OrShadowOffset) {
610 *ShadowBase = Mapping.Offset;
611 *MappingScale = Mapping.Scale;
612 *OrShadowOffset = Mapping.OrShadowOffset;
626 AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
627 CompileKernel(CompileKernel) {}
634 return std::max(32U, 1U << MappingScale);
653class RuntimeCallInserter {
655 bool TrackInsertedCalls =
false;
659 RuntimeCallInserter(
Function &Fn) : OwnerFn(&Fn) {
663 TrackInsertedCalls =
true;
667 ~RuntimeCallInserter() {
668 if (InsertedCalls.
empty())
670 assert(TrackInsertedCalls &&
"Calls were wrongly tracked");
673 for (
CallInst *CI : InsertedCalls) {
675 assert(BB &&
"Instruction doesn't belong to a BasicBlock");
677 "Instruction doesn't belong to the expected Function!");
685 if (Colors.
size() != 1) {
687 "Instruction's BasicBlock is not monochromatic");
694 if (EHPad && EHPad->
isEHPad()) {
698 OB, CI->getIterator());
699 NewCall->copyMetadata(*CI);
700 CI->replaceAllUsesWith(NewCall);
701 CI->eraseFromParent();
712 if (TrackInsertedCalls)
713 InsertedCalls.push_back(Inst);
719struct AddressSanitizer {
721 int InstrumentationWithCallsThreshold,
722 uint32_t MaxInlinePoisoningSize,
bool CompileKernel =
false,
723 bool Recover =
false,
bool UseAfterScope =
false,
725 AsanDetectStackUseAfterReturnMode::Runtime)
734 InstrumentationWithCallsThreshold(
737 : InstrumentationWithCallsThreshold),
740 : MaxInlinePoisoningSize) {
741 C = &(
M.getContext());
742 DL = &
M.getDataLayout();
743 LongSize =
M.getDataLayout().getPointerSizeInBits();
745 PtrTy = PointerType::getUnqual(*C);
747 TargetTriple =
Triple(
M.getTargetTriple());
751 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
759 bool isInterestingAlloca(
const AllocaInst &AI);
767 const DataLayout &DL, RuntimeCallInserter &RTCI);
768 void instrumentPointerComparisonOrSubtraction(
Instruction *
I,
769 RuntimeCallInserter &RTCI);
772 uint32_t TypeStoreSize,
bool IsWrite,
774 RuntimeCallInserter &RTCI);
777 uint32_t TypeStoreSize,
bool IsWrite,
778 Value *SizeArgument);
783 TypeSize TypeStoreSize,
bool IsWrite,
784 Value *SizeArgument,
bool UseCalls,
786 RuntimeCallInserter &RTCI);
787 void instrumentMaskedLoadOrStore(AddressSanitizer *
Pass,
const DataLayout &DL,
791 Type *OpType,
bool IsWrite,
792 Value *SizeArgument,
bool UseCalls,
793 uint32_t Exp, RuntimeCallInserter &RTCI);
797 bool IsWrite,
size_t AccessSizeIndex,
799 RuntimeCallInserter &RTCI);
800 void instrumentMemIntrinsic(
MemIntrinsic *
MI, RuntimeCallInserter &RTCI);
802 bool suppressInstrumentationSiteForDebug(
int &Instrumented);
804 bool maybeInsertAsanInitAtFunctionEntry(
Function &
F);
805 bool maybeInsertDynamicShadowAtFunctionEntry(
Function &
F);
806 void markEscapedLocalAllocas(
Function &
F);
809 friend struct FunctionStackPoisoner;
819 struct FunctionStateRAII {
820 AddressSanitizer *
Pass;
822 FunctionStateRAII(AddressSanitizer *
Pass) :
Pass(
Pass) {
824 "last pass forgot to clear cache");
828 ~FunctionStateRAII() {
829 Pass->LocalDynamicShadow =
nullptr;
830 Pass->ProcessedAllocas.clear();
846 ShadowMapping Mapping;
860 Value *LocalDynamicShadow =
nullptr;
866 int InstrumentationWithCallsThreshold;
870class ModuleAddressSanitizer {
872 ModuleAddressSanitizer(
Module &M,
bool InsertVersionCheck,
873 bool CompileKernel =
false,
bool Recover =
false,
874 bool UseGlobalsGC =
true,
bool UseOdrIndicator =
true,
882 : InsertVersionCheck),
884 UseGlobalsGC(UseGlobalsGC &&
ClUseGlobalsGC && !this->CompileKernel),
899 UseCtorComdat(UseGlobalsGC &&
ClWithComdat && !this->CompileKernel),
900 DestructorKind(DestructorKind),
904 C = &(
M.getContext());
905 int LongSize =
M.getDataLayout().getPointerSizeInBits();
907 PtrTy = PointerType::getUnqual(*C);
908 TargetTriple =
Triple(
M.getTargetTriple());
913 assert(this->DestructorKind != AsanDtorKind::Invalid);
916 bool instrumentModule();
919 void initializeCallbacks();
921 void instrumentGlobals(
IRBuilder<> &IRB,
bool *CtorComdat);
928 const std::string &UniqueModuleId);
933 InstrumentGlobalsWithMetadataArray(
IRBuilder<> &IRB,
945 bool ShouldUseMachOGlobalsSection()
const;
946 StringRef getGlobalMetadataSection()
const;
947 void poisonOneInitializer(
Function &GlobalInit);
948 void createInitializerPoisonCalls();
949 uint64_t getMinRedzoneSizeForGlobal()
const {
953 int GetAsanVersion()
const;
958 bool InsertVersionCheck;
961 bool UsePrivateAlias;
962 bool UseOdrIndicator;
970 ShadowMapping Mapping;
980 Function *AsanCtorFunction =
nullptr;
981 Function *AsanDtorFunction =
nullptr;
994struct FunctionStackPoisoner :
public InstVisitor<FunctionStackPoisoner> {
996 AddressSanitizer &ASan;
997 RuntimeCallInserter &RTCI;
1002 ShadowMapping Mapping;
1011 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
1015 struct AllocaPoisonCall {
1023 bool HasUntracedLifetimeIntrinsic =
false;
1030 bool HasInlineAsm =
false;
1031 bool HasReturnsTwiceCall =
false;
1034 FunctionStackPoisoner(
Function &F, AddressSanitizer &ASan,
1035 RuntimeCallInserter &RTCI)
1036 :
F(
F), ASan(ASan), RTCI(RTCI),
1038 IntptrTy(ASan.IntptrTy),
1040 Mapping(ASan.Mapping),
1049 copyArgsPassedByValToAllocas();
1054 if (AllocaVec.
empty() && DynamicAllocaVec.
empty())
return false;
1056 initializeCallbacks(*
F.getParent());
1058 if (HasUntracedLifetimeIntrinsic) {
1062 StaticAllocaPoisonCallVec.
clear();
1063 DynamicAllocaPoisonCallVec.
clear();
1066 processDynamicAllocas();
1067 processStaticAllocas();
1078 void copyArgsPassedByValToAllocas();
1083 void processStaticAllocas();
1084 void processDynamicAllocas();
1086 void createDynamicAllocasInitStorage();
1104 void unpoisonDynamicAllocasBeforeInst(
Instruction *InstBefore,
1105 Value *SavedStack) {
1112 if (!isa<ReturnInst>(InstBefore)) {
1114 Intrinsic::get_dynamic_area_offset, {IntptrTy}, {});
1120 RTCI.createRuntimeCall(
1121 IRB, AsanAllocasUnpoisonFunc,
1122 {IRB.
CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1126 void unpoisonDynamicAllocas() {
1128 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1130 for (
Instruction *StackRestoreInst : StackRestoreVec)
1131 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1132 StackRestoreInst->getOperand(0));
1145 void handleDynamicAllocaCall(
AllocaInst *AI);
1151 const auto *STy = dyn_cast<StructType>(AllocaType);
1152 if (!ASan.isInterestingAlloca(AI) || isa<ScalableVectorType>(AllocaType) ||
1153 (STy && STy->containsHomogeneousScalableVectorTypes())) {
1157 if (AllocaVec.
empty())
1175 if (
ID == Intrinsic::stackrestore) StackRestoreVec.
push_back(&
II);
1176 if (
ID == Intrinsic::localescape) LocalEscapeCall = &
II;
1177 if (!ASan.UseAfterScope)
1179 if (!
II.isLifetimeStartOrEnd())
1182 auto *
Size = cast<ConstantInt>(
II.getArgOperand(0));
1184 if (
Size->isMinusOne())
return;
1187 const uint64_t SizeValue =
Size->getValue().getLimitedValue();
1188 if (SizeValue == ~0ULL ||
1196 HasUntracedLifetimeIntrinsic =
true;
1200 if (!ASan.isInterestingAlloca(*AI))
1202 bool DoPoison = (
ID == Intrinsic::lifetime_end);
1203 AllocaPoisonCall APC = {&
II, AI, SizeValue, DoPoison};
1205 StaticAllocaPoisonCallVec.
push_back(APC);
1207 DynamicAllocaPoisonCallVec.
push_back(APC);
1211 if (
CallInst *CI = dyn_cast<CallInst>(&CB)) {
1212 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1213 HasReturnsTwiceCall |= CI->canReturnTwice();
1218 void initializeCallbacks(
Module &M);
1245 OS, MapClassName2PassName);
1257 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1258 ConstructorKind(ConstructorKind) {}
1267 ModuleAddressSanitizer ModuleSanitizer(
1269 UseGlobalGC, UseOdrIndicator, DestructorKind, ConstructorKind);
1275 AddressSanitizer FunctionSanitizer(
1280 Modified |= FunctionSanitizer.instrumentFunction(
F, &TLI);
1282 Modified |= ModuleSanitizer.instrumentModule();
1303 if (
G->getName().starts_with(
"llvm.") ||
1305 G->getName().starts_with(
"__llvm_gcov_ctr") ||
1307 G->getName().starts_with(
"__llvm_rtti_proxy"))
1320 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1322 if (AddrSpace == 3 || AddrSpace == 5)
1329 Shadow = IRB.
CreateLShr(Shadow, Mapping.Scale);
1330 if (Mapping.Offset == 0)
return Shadow;
1333 if (LocalDynamicShadow)
1334 ShadowBase = LocalDynamicShadow;
1336 ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
1337 if (Mapping.OrShadowOffset)
1338 return IRB.
CreateOr(Shadow, ShadowBase);
1340 return IRB.
CreateAdd(Shadow, ShadowBase);
1345 RuntimeCallInserter &RTCI) {
1347 if (isa<MemTransferInst>(
MI)) {
1348 RTCI.createRuntimeCall(
1349 IRB, isa<MemMoveInst>(
MI) ? AsanMemmove : AsanMemcpy,
1353 }
else if (isa<MemSetInst>(
MI)) {
1354 RTCI.createRuntimeCall(
1360 MI->eraseFromParent();
1364bool AddressSanitizer::isInterestingAlloca(
const AllocaInst &AI) {
1365 auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
1367 if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
1368 return PreviouslySeenAllocaInfo->getSecond();
1370 bool IsInteresting =
1383 !(SSGI && SSGI->
isSafe(AI)));
1385 ProcessedAllocas[&AI] = IsInteresting;
1386 return IsInteresting;
1391 Type *PtrTy = cast<PointerType>(
Ptr->getType()->getScalarType());
1400 if (
Ptr->isSwiftError())
1406 if (
auto AI = dyn_cast_or_null<AllocaInst>(
Ptr))
1417void AddressSanitizer::getInterestingMemoryOperands(
1420 if (LocalDynamicShadow ==
I)
1423 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1426 Interesting.
emplace_back(
I, LI->getPointerOperandIndex(),
false,
1427 LI->getType(), LI->getAlign());
1428 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(
I)) {
1432 SI->getValueOperand()->getType(),
SI->getAlign());
1436 Interesting.
emplace_back(
I, RMW->getPointerOperandIndex(),
true,
1437 RMW->getValOperand()->getType(), std::nullopt);
1441 Interesting.
emplace_back(
I, XCHG->getPointerOperandIndex(),
true,
1442 XCHG->getCompareOperand()->getType(),
1444 }
else if (
auto CI = dyn_cast<CallInst>(
I)) {
1445 switch (CI->getIntrinsicID()) {
1446 case Intrinsic::masked_load:
1447 case Intrinsic::masked_store:
1448 case Intrinsic::masked_gather:
1449 case Intrinsic::masked_scatter: {
1450 bool IsWrite = CI->getType()->isVoidTy();
1452 unsigned OpOffset = IsWrite ? 1 : 0;
1456 auto BasePtr = CI->getOperand(OpOffset);
1457 if (ignoreAccess(
I, BasePtr))
1459 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1462 if (
auto *
Op = dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
1463 Alignment =
Op->getMaybeAlignValue();
1464 Value *
Mask = CI->getOperand(2 + OpOffset);
1465 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, Mask);
1468 case Intrinsic::masked_expandload:
1469 case Intrinsic::masked_compressstore: {
1470 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_compressstore;
1471 unsigned OpOffset = IsWrite ? 1 : 0;
1474 auto BasePtr = CI->getOperand(OpOffset);
1475 if (ignoreAccess(
I, BasePtr))
1478 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1481 Value *
Mask = CI->getOperand(1 + OpOffset);
1484 Value *ExtMask =
IB.CreateZExt(Mask, ExtTy);
1485 Value *EVL =
IB.CreateAddReduce(ExtMask);
1486 Value *TrueMask = ConstantInt::get(
Mask->getType(), 1);
1487 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, TrueMask,
1491 case Intrinsic::vp_load:
1492 case Intrinsic::vp_store:
1493 case Intrinsic::experimental_vp_strided_load:
1494 case Intrinsic::experimental_vp_strided_store: {
1495 auto *VPI = cast<VPIntrinsic>(CI);
1496 unsigned IID = CI->getIntrinsicID();
1497 bool IsWrite = CI->getType()->isVoidTy();
1500 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1501 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1502 MaybeAlign Alignment = VPI->getOperand(PtrOpNo)->getPointerAlignment(*
DL);
1503 Value *Stride =
nullptr;
1504 if (IID == Intrinsic::experimental_vp_strided_store ||
1505 IID == Intrinsic::experimental_vp_strided_load) {
1506 Stride = VPI->getOperand(PtrOpNo + 1);
1511 if (!isa<ConstantInt>(Stride) ||
1512 cast<ConstantInt>(Stride)->getZExtValue() % PointerAlign != 0)
1513 Alignment =
Align(1);
1515 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1516 VPI->getMaskParam(), VPI->getVectorLengthParam(),
1520 case Intrinsic::vp_gather:
1521 case Intrinsic::vp_scatter: {
1522 auto *VPI = cast<VPIntrinsic>(CI);
1523 unsigned IID = CI->getIntrinsicID();
1524 bool IsWrite = IID == Intrinsic::vp_scatter;
1527 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1528 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1529 MaybeAlign Alignment = VPI->getPointerAlignment();
1530 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1531 VPI->getMaskParam(),
1532 VPI->getVectorLengthParam());
1536 for (
unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1538 ignoreAccess(
I, CI->getArgOperand(ArgNo)))
1540 Type *Ty = CI->getParamByValType(ArgNo);
1548 return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
1555 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(
I)) {
1556 if (!Cmp->isRelational())
1570 if (BO->getOpcode() != Instruction::Sub)
1583 if (!
G->hasInitializer())
1586 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().IsDynInit)
1592void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1595 FunctionCallee F = isa<ICmpInst>(
I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
1596 Value *
Param[2] = {
I->getOperand(0),
I->getOperand(1)};
1597 for (
Value *&i : Param) {
1598 if (i->getType()->isPointerTy())
1601 RTCI.createRuntimeCall(IRB,
F, Param);
1607 TypeSize TypeStoreSize,
bool IsWrite,
1608 Value *SizeArgument,
bool UseCalls,
1609 uint32_t Exp, RuntimeCallInserter &RTCI) {
1614 switch (FixedSize) {
1620 if (!Alignment || *Alignment >= Granularity ||
1621 *Alignment >= FixedSize / 8)
1622 return Pass->instrumentAddress(
I, InsertBefore,
Addr, Alignment,
1623 FixedSize, IsWrite,
nullptr, UseCalls,
1627 Pass->instrumentUnusualSizeOrAlignment(
I, InsertBefore,
Addr, TypeStoreSize,
1628 IsWrite,
nullptr, UseCalls, Exp, RTCI);
1631void AddressSanitizer::instrumentMaskedLoadOrStore(
1634 MaybeAlign Alignment,
unsigned Granularity,
Type *OpType,
bool IsWrite,
1636 RuntimeCallInserter &RTCI) {
1637 auto *VTy = cast<VectorType>(OpType);
1638 TypeSize ElemTypeSize =
DL.getTypeStoreSizeInBits(VTy->getScalarType());
1639 auto Zero = ConstantInt::get(IntptrTy, 0);
1647 Value *IsEVLZero =
IB.CreateICmpNE(EVL, ConstantInt::get(EVLType, 0));
1649 IB.SetInsertPoint(LoopInsertBefore);
1651 EVL =
IB.CreateZExtOrTrunc(EVL, IntptrTy);
1654 Value *
EC =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1655 EVL =
IB.CreateBinaryIntrinsic(Intrinsic::umin, EVL, EC);
1657 EVL =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1662 Stride =
IB.CreateZExtOrTrunc(Stride, IntptrTy);
1667 if (
auto *MaskElemC = dyn_cast<ConstantInt>(MaskElem)) {
1668 if (MaskElemC->isZero())
1679 Value *InstrumentedAddress;
1680 if (isa<VectorType>(
Addr->getType())) {
1682 cast<VectorType>(
Addr->getType())->getElementType()->isPointerTy() &&
1683 "Expected vector of pointer.");
1685 }
else if (Stride) {
1692 Alignment, Granularity, ElemTypeSize, IsWrite,
1693 SizeArgument, UseCalls, Exp, RTCI);
1700 RuntimeCallInserter &RTCI) {
1721 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1722 NumOptimizedAccessesToGlobalVar++;
1730 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1731 NumOptimizedAccessesToStackVar++;
1737 NumInstrumentedWrites++;
1739 NumInstrumentedReads++;
1741 unsigned Granularity = 1 << Mapping.Scale;
1743 instrumentMaskedLoadOrStore(
this,
DL, IntptrTy,
O.MaybeMask,
O.MaybeEVL,
1744 O.MaybeStride,
O.getInsn(),
Addr,
O.Alignment,
1745 Granularity,
O.OpType,
O.IsWrite,
nullptr,
1746 UseCalls, Exp, RTCI);
1749 Granularity,
O.TypeStoreSize,
O.IsWrite,
nullptr,
1750 UseCalls, Exp, RTCI);
1756 size_t AccessSizeIndex,
1757 Value *SizeArgument,
1759 RuntimeCallInserter &RTCI) {
1765 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][0],
1766 {
Addr, SizeArgument});
1768 Call = RTCI.createRuntimeCall(IRB, AsanErrorCallbackSized[IsWrite][1],
1769 {
Addr, SizeArgument, ExpVal});
1772 Call = RTCI.createRuntimeCall(
1773 IRB, AsanErrorCallback[IsWrite][0][AccessSizeIndex],
Addr);
1775 Call = RTCI.createRuntimeCall(
1776 IRB, AsanErrorCallback[IsWrite][1][AccessSizeIndex], {
Addr, ExpVal});
1779 Call->setCannotMerge();
1786 size_t Granularity =
static_cast<size_t>(1) << Mapping.Scale;
1788 Value *LastAccessedByte =
1789 IRB.
CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
1791 if (TypeStoreSize / 8 > 1)
1793 LastAccessedByte, ConstantInt::get(IntptrTy, TypeStoreSize / 8 - 1));
1801Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1803 uint32_t TypeStoreSize,
bool IsWrite,
Value *SizeArgument) {
1807 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1810 return InsertBefore;
1815 Value *IsSharedOrPrivate = IRB.
CreateOr(IsShared, IsPrivate);
1817 Value *AddrSpaceZeroLanding =
1819 InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
1820 return InsertBefore;
1836 Trm->getParent()->setName(
"asan.report");
1847void AddressSanitizer::instrumentAddress(
Instruction *OrigIns,
1850 uint32_t TypeStoreSize,
bool IsWrite,
1851 Value *SizeArgument,
bool UseCalls,
1853 RuntimeCallInserter &RTCI) {
1854 if (TargetTriple.isAMDGPU()) {
1855 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore,
Addr,
1856 TypeStoreSize, IsWrite, SizeArgument);
1865 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1868 ConstantInt::get(Int32Ty, AccessInfo.Packed)});
1875 RTCI.createRuntimeCall(
1876 IRB, AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex], AddrLong);
1878 RTCI.createRuntimeCall(
1879 IRB, AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1880 {AddrLong, ConstantInt::get(IRB.
getInt32Ty(), Exp)});
1887 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1889 std::max<uint64_t>(Alignment.
valueOrOne().
value() >> Mapping.Scale, 1);
1894 size_t Granularity = 1ULL << Mapping.Scale;
1897 bool GenSlowPath = (
ClAlwaysSlowPath || (TypeStoreSize < 8 * Granularity));
1899 if (TargetTriple.isAMDGCN()) {
1901 auto *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1904 CrashTerm = genAMDGPUReportBlock(IRB, Cmp, Recover);
1905 }
else if (GenSlowPath) {
1910 assert(cast<BranchInst>(CheckTerm)->isUnconditional());
1913 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1928 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument, Exp, RTCI);
1937void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1939 TypeSize TypeStoreSize,
bool IsWrite,
Value *SizeArgument,
bool UseCalls,
1940 uint32_t Exp, RuntimeCallInserter &RTCI) {
1948 RTCI.createRuntimeCall(IRB, AsanMemoryAccessCallbackSized[IsWrite][0],
1951 RTCI.createRuntimeCall(
1952 IRB, AsanMemoryAccessCallbackSized[IsWrite][1],
1966void ModuleAddressSanitizer::poisonOneInitializer(
Function &GlobalInit) {
1972 Value *ModuleNameAddr =
1974 IRB.
CreateCall(AsanPoisonGlobals, ModuleNameAddr);
1977 for (
auto &BB : GlobalInit)
1982void ModuleAddressSanitizer::createInitializerPoisonCalls() {
1992 if (isa<ConstantAggregateZero>(
OP))
continue;
1998 auto *Priority = cast<ConstantInt>(CS->
getOperand(0));
2002 poisonOneInitializer(*
F);
2008ModuleAddressSanitizer::getExcludedAliasedGlobal(
const GlobalAlias &GA)
const {
2013 assert(CompileKernel &&
"Only expecting to be called when compiling kernel");
2020 return dyn_cast<GlobalVariable>(
C->stripPointerCastsAndAliases());
2025bool ModuleAddressSanitizer::shouldInstrumentGlobal(
GlobalVariable *
G)
const {
2026 Type *Ty =
G->getValueType();
2029 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().NoAddress)
2031 if (!Ty->
isSized())
return false;
2032 if (!
G->hasInitializer())
return false;
2034 if (
G->getAddressSpace() &&
2041 if (
G->isThreadLocal())
return false;
2043 if (
G->getAlign() && *
G->getAlign() > getMinRedzoneSizeForGlobal())
return false;
2049 if (!TargetTriple.isOSBinFormatCOFF()) {
2050 if (!
G->hasExactDefinition() ||
G->hasComdat())
2054 if (
G->isInterposable())
2058 if (
G->hasAvailableExternallyLinkage())
2065 switch (
C->getSelectionKind()) {
2076 if (
G->hasSection()) {
2086 if (Section ==
"llvm.metadata")
return false;
2093 if (
Section.starts_with(
".preinit_array") ||
2094 Section.starts_with(
".init_array") ||
2095 Section.starts_with(
".fini_array")) {
2101 if (TargetTriple.isOSBinFormatELF()) {
2103 [](
char c) {
return llvm::isAlnum(c) || c ==
'_'; }))
2115 if (TargetTriple.isOSBinFormatCOFF() &&
Section.contains(
'$')) {
2116 LLVM_DEBUG(
dbgs() <<
"Ignoring global in sorted section (contains '$'): "
2121 if (TargetTriple.isOSBinFormatMachO()) {
2123 unsigned TAA = 0, StubSize = 0;
2126 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
2131 if (ParsedSegment ==
"__OBJC" ||
2132 (ParsedSegment ==
"__DATA" && ParsedSection.
starts_with(
"__objc_"))) {
2144 if (ParsedSegment ==
"__DATA" && ParsedSection ==
"__cfstring") {
2157 if (CompileKernel) {
2160 if (
G->getName().starts_with(
"__"))
2170bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection()
const {
2171 if (!TargetTriple.isOSBinFormatMachO())
2174 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
2176 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
2178 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
2180 if (TargetTriple.isDriverKit())
2182 if (TargetTriple.isXROS())
2188StringRef ModuleAddressSanitizer::getGlobalMetadataSection()
const {
2189 switch (TargetTriple.getObjectFormat()) {
2199 "ModuleAddressSanitizer not implemented for object file format");
2206void ModuleAddressSanitizer::initializeCallbacks() {
2212 AsanUnpoisonGlobals =
2216 AsanRegisterGlobals =
M.getOrInsertFunction(
2218 AsanUnregisterGlobals =
M.getOrInsertFunction(
2223 AsanRegisterImageGlobals =
M.getOrInsertFunction(
2225 AsanUnregisterImageGlobals =
M.getOrInsertFunction(
2228 AsanRegisterElfGlobals =
2230 IntptrTy, IntptrTy, IntptrTy);
2231 AsanUnregisterElfGlobals =
2233 IntptrTy, IntptrTy, IntptrTy);
2238void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
2243 if (!
G->hasName()) {
2247 G->setName(
genName(
"anon_global"));
2250 if (!InternalSuffix.
empty() &&
G->hasLocalLinkage()) {
2251 std::string
Name = std::string(
G->getName());
2252 Name += InternalSuffix;
2253 C =
M.getOrInsertComdat(
Name);
2255 C =
M.getOrInsertComdat(
G->getName());
2261 if (TargetTriple.isOSBinFormatCOFF()) {
2263 if (
G->hasPrivateLinkage())
2276ModuleAddressSanitizer::CreateMetadataGlobal(
Constant *Initializer,
2278 auto Linkage = TargetTriple.isOSBinFormatMachO()
2282 M, Initializer->
getType(),
false, Linkage, Initializer,
2284 Metadata->setSection(getGlobalMetadataSection());
2291Instruction *ModuleAddressSanitizer::CreateAsanModuleDtor() {
2295 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2303void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2307 auto &
DL =
M.getDataLayout();
2310 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2311 Constant *Initializer = MetadataInitializers[i];
2315 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2321 unsigned SizeOfGlobalStruct =
DL.getTypeAllocSize(Initializer->
getType());
2323 "global metadata will not be padded appropriately");
2326 SetComdatForGlobalMetadata(
G,
Metadata,
"");
2331 if (!MetadataGlobals.empty())
2335void ModuleAddressSanitizer::instrumentGlobalsELF(
2338 const std::string &UniqueModuleId) {
2345 bool UseComdatForGlobalsGC = UseOdrIndicator && !UniqueModuleId.empty();
2348 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2351 CreateMetadataGlobal(MetadataInitializers[i],
G->getName());
2353 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2356 if (UseComdatForGlobalsGC)
2357 SetComdatForGlobalMetadata(
G,
Metadata, UniqueModuleId);
2362 if (!MetadataGlobals.empty())
2379 "__start_" + getGlobalMetadataSection());
2383 "__stop_" + getGlobalMetadataSection());
2397 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2404void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2415 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2416 Constant *Initializer = MetadataInitializers[i];
2422 auto LivenessBinder =
2427 Twine(
"__asan_binder_") +
G->getName());
2428 Liveness->
setSection(
"__DATA,__asan_liveness,regular,live_support");
2429 LivenessGlobals[i] = Liveness;
2436 if (!LivenessGlobals.empty())
2458 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2463void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2467 unsigned N = ExtendedGlobals.
size();
2477 if (Mapping.Scale > 3)
2478 AllGlobals->setAlignment(
Align(1ULL << Mapping.Scale));
2483 ConstantInt::get(IntptrTy,
N)});
2489 IrbDtor.CreateCall(AsanUnregisterGlobals,
2491 ConstantInt::get(IntptrTy,
N)});
2500void ModuleAddressSanitizer::instrumentGlobals(
IRBuilder<> &IRB,
2505 if (CompileKernel) {
2506 for (
auto &GA :
M.aliases()) {
2508 AliasedGlobalExclusions.
insert(GV);
2513 for (
auto &
G :
M.globals()) {
2514 if (!AliasedGlobalExclusions.
count(&
G) && shouldInstrumentGlobal(&
G))
2518 size_t n = GlobalsToChange.
size();
2519 auto &
DL =
M.getDataLayout();
2533 IntptrTy, IntptrTy, IntptrTy);
2537 for (
size_t i = 0; i < n; i++) {
2541 if (
G->hasSanitizerMetadata())
2542 MD =
G->getSanitizerMetadata();
2547 std::string NameForGlobal =
G->getName().str();
2552 Type *Ty =
G->getValueType();
2553 const uint64_t SizeInBytes =
DL.getTypeAllocSize(Ty);
2566 M, NewTy,
G->isConstant(), Linkage, NewInitializer,
"",
G,
2567 G->getThreadLocalMode(),
G->getAddressSpace());
2577 if (TargetTriple.isOSBinFormatMachO() && !
G->hasSection() &&
2579 auto Seq = dyn_cast<ConstantDataSequential>(
G->getInitializer());
2580 if (Seq && Seq->isCString())
2581 NewGlobal->
setSection(
"__TEXT,__asan_cstring,regular");
2592 G->replaceAllUsesWith(
2595 G->eraseFromParent();
2596 NewGlobals[i] = NewGlobal;
2601 bool CanUsePrivateAliases =
2602 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2603 TargetTriple.isOSBinFormatWasm();
2604 if (CanUsePrivateAliases && UsePrivateAlias) {
2607 InstrumentedGlobal =
2615 }
else if (UseOdrIndicator) {
2618 auto *ODRIndicatorSym =
2627 ODRIndicatorSym->setAlignment(
Align(1));
2628 ODRIndicator = ODRIndicatorSym;
2634 ConstantInt::get(IntptrTy, SizeInBytes),
2635 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
2638 ConstantInt::get(IntptrTy, MD.
IsDynInit),
2644 Initializers[i] = Initializer;
2650 for (
size_t i = 0; i < n; i++) {
2652 if (
G->getName().empty())
continue;
2657 if (UseGlobalsGC && TargetTriple.isOSBinFormatELF()) {
2664 }
else if (n == 0) {
2667 *CtorComdat = TargetTriple.isOSBinFormatELF();
2669 *CtorComdat =
false;
2670 if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2671 InstrumentGlobalsCOFF(IRB, NewGlobals, Initializers);
2672 }
else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2673 InstrumentGlobalsMachO(IRB, NewGlobals, Initializers);
2675 InstrumentGlobalsWithMetadataArray(IRB, NewGlobals, Initializers);
2681 createInitializerPoisonCalls();
2687ModuleAddressSanitizer::getRedzoneSizeForGlobal(
uint64_t SizeInBytes)
const {
2688 constexpr uint64_t kMaxRZ = 1 << 18;
2689 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2692 if (SizeInBytes <= MinRZ / 2) {
2696 RZ = MinRZ - SizeInBytes;
2699 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2702 if (SizeInBytes % MinRZ)
2703 RZ += MinRZ - (SizeInBytes % MinRZ);
2706 assert((RZ + SizeInBytes) % MinRZ == 0);
2711int ModuleAddressSanitizer::GetAsanVersion()
const {
2712 int LongSize =
M.getDataLayout().getPointerSizeInBits();
2717 Version += (LongSize == 32 && isAndroid);
2732bool ModuleAddressSanitizer::instrumentModule() {
2733 initializeCallbacks();
2738 if (CompileKernel) {
2743 std::string AsanVersion = std::to_string(GetAsanVersion());
2744 std::string VersionCheckName =
2746 std::tie(AsanCtorFunction, std::ignore) =
2749 {}, VersionCheckName);
2753 bool CtorComdat =
true;
2756 if (AsanCtorFunction) {
2757 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2758 instrumentGlobals(IRB, &CtorComdat);
2761 instrumentGlobals(IRB, &CtorComdat);
2770 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2771 if (AsanCtorFunction) {
2775 if (AsanDtorFunction) {
2780 if (AsanCtorFunction)
2782 if (AsanDtorFunction)
2793 for (
int Exp = 0;
Exp < 2;
Exp++) {
2794 for (
size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2795 const std::string TypeStr = AccessIsWrite ?
"store" :
"load";
2796 const std::string ExpStr =
Exp ?
"exp_" :
"";
2797 const std::string EndingStr = Recover ?
"_noabort" :
"";
2806 Args1.push_back(ExpType);
2807 if (
auto AK = TLI->getExtAttrForI32Param(
false)) {
2812 AsanErrorCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2816 AsanMemoryAccessCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2821 AccessSizeIndex++) {
2822 const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
2823 AsanErrorCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2824 M.getOrInsertFunction(
2828 AsanMemoryAccessCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2829 M.getOrInsertFunction(
2836 const std::string MemIntrinCallbackPrefix =
2840 AsanMemmove =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memmove",
2841 PtrTy, PtrTy, PtrTy, IntptrTy);
2842 AsanMemcpy =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memcpy", PtrTy,
2843 PtrTy, PtrTy, IntptrTy);
2844 AsanMemset =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memset",
2848 AsanHandleNoReturnFunc =
2851 AsanPtrCmpFunction =
2853 AsanPtrSubFunction =
2855 if (Mapping.InGlobal)
2856 AsanShadowGlobal =
M.getOrInsertGlobal(
"__asan_shadow",
2859 AMDGPUAddressShared =
2861 AMDGPUAddressPrivate =
2865bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(
Function &
F) {
2873 if (
F.getName().contains(
" load]")) {
2883bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(
Function &
F) {
2889 if (Mapping.InGlobal) {
2897 LocalDynamicShadow =
2898 IRB.
CreateCall(Asm, {AsanShadowGlobal},
".asan.shadow");
2900 LocalDynamicShadow =
2904 Value *GlobalDynamicAddress =
F.getParent()->getOrInsertGlobal(
2906 LocalDynamicShadow = IRB.
CreateLoad(IntptrTy, GlobalDynamicAddress);
2911void AddressSanitizer::markEscapedLocalAllocas(
Function &
F) {
2916 assert(ProcessedAllocas.empty() &&
"must process localescape before allocas");
2920 if (!
F.getParent()->getFunction(
"llvm.localescape"))
return;
2926 if (
II &&
II->getIntrinsicID() == Intrinsic::localescape) {
2928 for (
Value *Arg :
II->args()) {
2929 AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
2931 "non-static alloca arg to localescape");
2932 ProcessedAllocas[AI] =
false;
2939bool AddressSanitizer::suppressInstrumentationSiteForDebug(
int &Instrumented) {
2940 bool ShouldInstrument =
2944 return !ShouldInstrument;
2947bool AddressSanitizer::instrumentFunction(
Function &
F,
2953 if (
F.getName().starts_with(
"__asan_"))
return false;
2954 if (
F.isPresplitCoroutine())
2957 bool FunctionModified =
false;
2960 if (
F.hasFnAttribute(Attribute::Naked))
2961 return FunctionModified;
2966 if (maybeInsertAsanInitAtFunctionEntry(
F))
2967 FunctionModified =
true;
2970 if (!
F.hasFnAttribute(Attribute::SanitizeAddress))
return FunctionModified;
2972 if (
F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
2973 return FunctionModified;
2977 initializeCallbacks(TLI);
2979 FunctionStateRAII CleanupObj(
this);
2981 RuntimeCallInserter RTCI(
F);
2983 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(
F);
2987 markEscapedLocalAllocas(
F);
2999 for (
auto &BB :
F) {
3001 TempsToInstrument.
clear();
3002 int NumInsnsPerBB = 0;
3003 for (
auto &Inst : BB) {
3004 if (LooksLikeCodeInBug11395(&Inst))
return false;
3011 if (!InterestingOperands.
empty()) {
3012 for (
auto &Operand : InterestingOperands) {
3018 if (Operand.MaybeMask) {
3022 if (!TempsToInstrument.
insert(
Ptr).second)
3026 OperandsToInstrument.
push_back(Operand);
3033 PointerComparisonsOrSubtracts.
push_back(&Inst);
3039 if (
auto *CB = dyn_cast<CallBase>(&Inst)) {
3041 TempsToInstrument.
clear();
3045 if (
CallInst *CI = dyn_cast<CallInst>(&Inst))
3052 bool UseCalls = (InstrumentationWithCallsThreshold >= 0 &&
3053 OperandsToInstrument.
size() + IntrinToInstrument.
size() >
3054 (
unsigned)InstrumentationWithCallsThreshold);
3059 int NumInstrumented = 0;
3060 for (
auto &Operand : OperandsToInstrument) {
3061 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3062 instrumentMop(ObjSizeVis, Operand, UseCalls,
3063 F.getDataLayout(), RTCI);
3064 FunctionModified =
true;
3066 for (
auto *Inst : IntrinToInstrument) {
3067 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
3068 instrumentMemIntrinsic(Inst, RTCI);
3069 FunctionModified =
true;
3072 FunctionStackPoisoner FSP(
F, *
this, RTCI);
3073 bool ChangedStack = FSP.runOnFunction();
3077 for (
auto *CI : NoReturnCalls) {
3079 RTCI.createRuntimeCall(IRB, AsanHandleNoReturnFunc, {});
3082 for (
auto *Inst : PointerComparisonsOrSubtracts) {
3083 instrumentPointerComparisonOrSubtraction(Inst, RTCI);
3084 FunctionModified =
true;
3087 if (ChangedStack || !NoReturnCalls.empty())
3088 FunctionModified =
true;
3090 LLVM_DEBUG(
dbgs() <<
"ASAN done instrumenting: " << FunctionModified <<
" "
3093 return FunctionModified;
3099bool AddressSanitizer::LooksLikeCodeInBug11395(
Instruction *
I) {
3100 if (LongSize != 32)
return false;
3109void FunctionStackPoisoner::initializeCallbacks(
Module &M) {
3113 const char *MallocNameTemplate =
3118 std::string Suffix = itostr(Index);
3119 AsanStackMallocFunc[
Index] =
M.getOrInsertFunction(
3120 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
3121 AsanStackFreeFunc[
Index] =
3126 if (ASan.UseAfterScope) {
3127 AsanPoisonStackMemoryFunc =
M.getOrInsertFunction(
3129 AsanUnpoisonStackMemoryFunc =
M.getOrInsertFunction(
3133 for (
size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
3134 0xf3, 0xf5, 0xf8}) {
3135 std::ostringstream
Name;
3137 Name << std::setw(2) << std::setfill(
'0') << std::hex << Val;
3138 AsanSetShadowFunc[Val] =
3139 M.getOrInsertFunction(
Name.str(), IRB.
getVoidTy(), IntptrTy, IntptrTy);
3142 AsanAllocaPoisonFunc =
M.getOrInsertFunction(
3144 AsanAllocasUnpoisonFunc =
M.getOrInsertFunction(
3150 size_t Begin,
size_t End,
3152 Value *ShadowBase) {
3156 const size_t LargestStoreSizeInBytes =
3157 std::min<size_t>(
sizeof(
uint64_t), ASan.LongSize / 8);
3159 const bool IsLittleEndian =
F.getDataLayout().isLittleEndian();
3165 for (
size_t i = Begin; i <
End;) {
3166 if (!ShadowMask[i]) {
3172 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
3174 while (StoreSizeInBytes >
End - i)
3175 StoreSizeInBytes /= 2;
3178 for (
size_t j = StoreSizeInBytes - 1;
j && !ShadowMask[i +
j]; --
j) {
3179 while (j <= StoreSizeInBytes / 2)
3180 StoreSizeInBytes /= 2;
3184 for (
size_t j = 0;
j < StoreSizeInBytes;
j++) {
3186 Val |= (
uint64_t)ShadowBytes[i + j] << (8 * j);
3188 Val = (Val << 8) | ShadowBytes[i + j];
3197 i += StoreSizeInBytes;
3204 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.
size(), IRB, ShadowBase);
3209 size_t Begin,
size_t End,
3212 size_t Done = Begin;
3213 for (
size_t i = Begin, j = Begin + 1; i <
End; i =
j++) {
3214 if (!ShadowMask[i]) {
3219 if (!AsanSetShadowFunc[Val])
3223 for (;
j <
End && ShadowMask[
j] && Val == ShadowBytes[
j]; ++
j) {
3226 if (j - i >= ASan.MaxInlinePoisoningSize) {
3227 copyToShadowInline(ShadowMask, ShadowBytes,
Done, i, IRB, ShadowBase);
3228 RTCI.createRuntimeCall(
3229 IRB, AsanSetShadowFunc[Val],
3230 {IRB.
CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
3231 ConstantInt::get(IntptrTy, j - i)});
3236 copyToShadowInline(ShadowMask, ShadowBytes,
Done,
End, IRB, ShadowBase);
3244 for (
int i = 0;; i++, MaxSize *= 2)
3245 if (LocalStackSize <= MaxSize)
return i;
3249void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
3251 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
3259 if (Arg.hasByValAttr()) {
3260 Type *Ty = Arg.getParamByValType();
3261 const Align Alignment =
3262 DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
3266 (Arg.hasName() ? Arg.getName() :
"Arg" +
Twine(Arg.getArgNo())) +
3269 Arg.replaceAllUsesWith(AI);
3271 uint64_t AllocSize =
DL.getTypeAllocSize(Ty);
3272 IRB.
CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
3280 Value *ValueIfFalse) {
3283 PHI->addIncoming(ValueIfFalse, CondBlock);
3285 PHI->addIncoming(ValueIfTrue, ThenBlock);
3289Value *FunctionStackPoisoner::createAllocaForLayout(
3298 nullptr,
"MyAlloca");
3307void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3310 DynamicAllocaLayout = IRB.
CreateAlloca(IntptrTy,
nullptr);
3315void FunctionStackPoisoner::processDynamicAllocas() {
3322 for (
const auto &APC : DynamicAllocaPoisonCallVec) {
3325 assert(ASan.isInterestingAlloca(*APC.AI));
3326 assert(!APC.AI->isStaticAlloca());
3329 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3336 createDynamicAllocasInitStorage();
3337 for (
auto &AI : DynamicAllocaVec)
3338 handleDynamicAllocaCall(AI);
3339 unpoisonDynamicAllocas();
3351 for (
Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
3361 if (isa<AllocaInst>(It) || isa<CastInst>(It))
3363 if (
auto *Store = dyn_cast<StoreInst>(It)) {
3367 auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
3368 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3371 Value *Val = Store->getValueOperand();
3372 bool IsDirectArgInit = isa<Argument>(Val);
3373 bool IsArgInitViaCast =
3374 isa<CastInst>(Val) &&
3375 isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
3378 Val == It->getPrevNonDebugInstruction();
3379 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3383 if (IsArgInitViaCast)
3384 InitInsts.
push_back(cast<Instruction>(Val));
3395void FunctionStackPoisoner::processStaticAllocas() {
3396 if (AllocaVec.
empty()) {
3401 int StackMallocIdx = -1;
3403 if (
auto SP =
F.getSubprogram())
3404 EntryDebugLocation =
3413 auto InsBeforeB = InsBefore->
getParent();
3414 assert(InsBeforeB == &
F.getEntryBlock());
3415 for (
auto *AI : StaticAllocasToMoveUp)
3426 ArgInitInst->moveBefore(InsBefore);
3429 if (LocalEscapeCall) LocalEscapeCall->
moveBefore(InsBefore);
3435 ASan.getAllocaSizeInBytes(*AI),
3446 uint64_t Granularity = 1ULL << Mapping.Scale;
3447 uint64_t MinHeaderSize = std::max((
uint64_t)ASan.LongSize / 2, Granularity);
3453 for (
auto &
Desc : SVD)
3457 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3460 assert(ASan.isInterestingAlloca(*APC.AI));
3461 assert(APC.AI->isStaticAlloca());
3466 if (
const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3467 if (LifetimeLoc->getFile() == FnLoc->getFile())
3468 if (
unsigned Line = LifetimeLoc->getLine())
3469 Desc.Line = std::min(
Desc.Line ?
Desc.Line : Line, Line);
3475 LLVM_DEBUG(
dbgs() << DescriptionString <<
" --- " <<
L.FrameSize <<
"\n");
3477 bool DoStackMalloc =
3487 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3488 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3490 Value *StaticAlloca =
3491 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L,
false);
3494 Value *LocalStackBase;
3495 Value *LocalStackBaseAlloca;
3498 if (DoStackMalloc) {
3499 LocalStackBaseAlloca =
3500 IRB.
CreateAlloca(IntptrTy,
nullptr,
"asan_local_stack_base");
3507 Constant *OptionDetectUseAfterReturn =
F.getParent()->getOrInsertGlobal(
3517 Value *FakeStackValue =
3518 RTCI.createRuntimeCall(IRBIf, AsanStackMallocFunc[StackMallocIdx],
3519 ConstantInt::get(IntptrTy, LocalStackSize));
3521 FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
3522 ConstantInt::get(IntptrTy, 0));
3530 RTCI.createRuntimeCall(IRB, AsanStackMallocFunc[StackMallocIdx],
3531 ConstantInt::get(IntptrTy, LocalStackSize));
3533 Value *NoFakeStack =
3538 Value *AllocaValue =
3539 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L,
true) : StaticAlloca;
3542 LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
3543 IRB.
CreateStore(LocalStackBase, LocalStackBaseAlloca);
3548 FakeStack = ConstantInt::get(IntptrTy, 0);
3550 DoDynamicAlloca ? createAllocaForLayout(IRB, L,
true) : StaticAlloca;
3551 LocalStackBaseAlloca = LocalStackBase;
3557 Value *LocalStackBaseAllocaPtr =
3558 isa<PtrToIntInst>(LocalStackBaseAlloca)
3559 ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
3560 : LocalStackBaseAlloca;
3561 assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
3562 "Variable descriptions relative to ASan stack base will be dropped");
3565 for (
const auto &
Desc : SVD) {
3570 IRB.
CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy,
Desc.Offset)),
3583 ConstantInt::get(IntptrTy, ASan.LongSize / 8)),
3593 ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8)),
3600 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
3603 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3605 if (!StaticAllocaPoisonCallVec.empty()) {
3609 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3612 size_t Begin =
Desc.Offset /
L.Granularity;
3613 size_t End = Begin + (APC.Size +
L.Granularity - 1) /
L.Granularity;
3616 copyToShadow(ShadowAfterScope,
3617 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin,
End,
3631 if (DoStackMalloc) {
3632 assert(StackMallocIdx >= 0);
3649 if (ASan.MaxInlinePoisoningSize != 0 && StackMallocIdx <= 4) {
3651 ShadowAfterReturn.
resize(ClassSize /
L.Granularity,
3653 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3655 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
3657 ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
3658 Value *SavedFlagPtr = IRBPoison.CreateLoad(
3659 IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
3660 IRBPoison.CreateStore(
3662 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getPtrTy()));
3665 RTCI.createRuntimeCall(
3666 IRBPoison, AsanStackFreeFunc[StackMallocIdx],
3667 {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)});
3671 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3673 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3678 for (
auto *AI : AllocaVec)
3686 Value *SizeArg = ConstantInt::get(IntptrTy,
Size);
3687 RTCI.createRuntimeCall(
3688 IRB, DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3689 {AddrArg, SizeArg});
3700void FunctionStackPoisoner::handleDynamicAllocaCall(
AllocaInst *AI) {
3708 Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
3714 const unsigned ElementSize =
3718 ConstantInt::get(IntptrTy, ElementSize));
3746 ConstantInt::get(IntptrTy, Alignment.
value()));
3749 RTCI.createRuntimeCall(IRB, AsanAllocaPoisonFunc, {NewAddress, OldSize});
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp, RuntimeCallInserter &RTCI)
static const uint64_t kDefaultShadowScale
const char kAMDGPUUnreachableName[]
constexpr size_t kAccessSizeIndexMask
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
static const uint64_t kFreeBSD_ShadowOffset32
constexpr size_t kIsWriteShift
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
static bool isInterestingPointerSubtraction(Instruction *I)
const char kAMDGPUAddressSharedName[]
const char kAsanStackFreeNameTemplate[]
constexpr size_t kCompileKernelMask
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static Twine genName(StringRef suffix)
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
const char kAsanStackMallocNameTemplate[]
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
static const uint64_t kEmscriptenShadowOffset
const char kAsanGenPrefix[]
constexpr size_t kIsWriteMask
static uint64_t getRedzoneSizeForScale(int MappingScale)
static const uint64_t kDefaultShadowOffset64
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
static const uint64_t kAsanCtorAndDtorPriority
const char kAsanUnpoisonGlobalsName[]
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
const char kAsanRegisterImageGlobalsName[]
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
const char kODRGenPrefix[]
static const uint64_t kSystemZ_ShadowOffset64
static const uint64_t kDefaultShadowOffset32
const char kAsanShadowMemoryDynamicAddress[]
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
const char kAsanStackMallocAlwaysNameTemplate[]
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
static const uint64_t kFreeBSDKasan_ShadowOffset64
const char kAsanModuleDtorName[]
static const uint64_t kDynamicShadowSentinel
static bool isInterestingPointerComparison(Instruction *I)
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
static const uint64_t kLinuxKasan_ShadowOffset64
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
const char kAsanUnpoisonStackMemoryName[]
static const uint64_t kLoongArch64_ShadowOffset64
const char kAsanRegisterGlobalsName[]
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
const char kAsanGlobalsRegisteredFlagName[]
static const size_t kMaxStackMallocSize
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
const char kAsanUnregisterImageGlobalsName[]
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
static const uint64_t kPPC64_ShadowOffset64
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kMIPS32_ShadowOffset32
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
static const uint64_t kFreeBSDAArch64_ShadowOffset64
static const uint64_t kSmallX86_64ShadowOffsetBase
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
static size_t TypeStoreSizeToSizeIndex(uint32_t TypeSize)
const char kAsanAllocaPoison[]
constexpr size_t kCompileKernelShift
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
const char kAMDGPUAddressPrivateName[]
static const uint64_t kNetBSDKasan_ShadowOffset64
const char kAMDGPUBallotName[]
const char kAsanRegisterElfGlobalsName[]
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(true), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanPoisonGlobalsName[]
const char kAsanHandleNoReturnName[]
static const size_t kMinStackMallocSize
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
static const uint64_t kAArch64_ShadowOffset64
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
This file contains the simple types necessary to represent the attributes associated with functions a...
static bool isPointerOperand(Value *I, User *U)
static const Function * getParent(const Value *V)
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
Module.h This file contains the declarations for the Module class.
This defines the Use class.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
uint64_t getZExtValue() const
Get zero extended value.
int64_t getSExtValue() const
Get sign extended value.
AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
AttributeList addParamAttribute(LLVMContext &C, unsigned ArgNo, Attribute::AttrKind Kind) const
Add an argument attribute to the list.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr if the function does no...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
static CallBase * addOperandBundle(CallBase *CB, uint32_t ID, OperandBundleDef OB, InsertPosition InsertPt=nullptr)
Create a clone of CB with operand bundle OB added.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, GEPNoWrapFlags NW=GEPNoWrapFlags::none(), std::optional< ConstantRange > InRange=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags and the LLVMContext applied.
bool hasPersonalityFn() const
Check whether this function has a personality function.
Constant * getPersonalityFn() const
Get the personality function associated with this function.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
const Constant * getAliasee() const
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalObject.
void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
void setComdat(Comdat *C)
void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
Common base class shared among various IRBuilders.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
Value * CreateExtractElement(Value *Vec, Value *Idx, const Twine &Name="")
LoadInst * CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align, const char *Name)
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
BasicBlock::iterator GetInsertPoint() const
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateTypeSize(Type *DstType, TypeSize Size)
Create an expression which evaluates to the number of units in Size at runtime.
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
CallInst * CreateIntrinsic(Intrinsic::ID ID, ArrayRef< Type * > Types, ArrayRef< Value * > Args, FMFSource FMFSource={}, const Twine &Name="")
Create a call to intrinsic ID with Args, mangled using Types.
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args={}, const Twine &Name="", MDNode *FPMathTag=nullptr)
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *TBAAStructTag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memcpy between the specified pointers.
Value * CreateAddrSpaceCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Base class for instruction visitors.
RetTy visitCallBase(CallBase &I)
RetTy visitCleanupReturnInst(CleanupReturnInst &I)
RetTy visitIntrinsicInst(IntrinsicInst &I)
void visit(Iterator Start, Iterator End)
RetTy visitReturnInst(ReturnInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitResumeInst(ResumeInst &I)
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
bool isEHPad() const
Return true if the instruction is a variety of EH-block.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
void emitError(const Instruction *I, const Twine &ErrorStr)
emitError - Emit an error message to the currently installed error handler with optional location inf...
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
MDNode * createUnlikelyBranchWeights()
Return metadata containing two branch weights, with significant bias towards false destination.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value* statically.
SizeOffsetAPInt compute(Value *V)
A container for an operand bundle being viewed as a set of values rather than a set of uses.
Pass interface - Implemented by all 'passes'.
static PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void abandon()
Mark an analysis as abandoned.
Resume the propagation of an exception.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, InsertPosition InsertBefore=nullptr)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
constexpr bool empty() const
empty - Check if the string is empty.
constexpr const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Class to represent struct types.
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
TinyPtrVector - This class is specialized for cases where there are normally 0 or 1 element in a vect...
Triple - Helper class for working with autoconf configuration names.
bool isAndroidVersionLT(unsigned Major) const
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
bool isLoongArch64() const
Tests whether the target is 64-bit LoongArch.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
static Type * getVoidTy(LLVMContext &C)
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt32Ty(LLVMContext &C)
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
static VectorType * get(Type *ElementType, ElementCount EC)
This static method is the primary way to construct an VectorType.
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
This file contains the declaration of the Comdat class, which represents a single COMDAT in LLVM.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void getInterestingMemoryOperands(Module &M, Instruction *I, SmallVectorImpl< InterestingMemoryOperand > &Interesting)
Get all the memory operands from the instruction that needs to be instrumented.
void instrumentAddress(Module &M, IRBuilder<> &IRB, Instruction *OrigIns, Instruction *InsertBefore, Value *Addr, Align Alignment, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, bool Recover, int AsanScale, int AsanOffset)
Instrument the memory operand Addr.
uint64_t getRedzoneSizeForGlobal(int AsanScale, uint64_t SizeInBytes)
Given SizeInBytes of the Value to be instrunmented, Returns the redzone size corresponding to it.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
@ S_CSTRING_LITERALS
S_CSTRING_LITERALS - Section with literal C strings.
@ OB
OB - OneByte - Set if this instruction has a one byte opcode.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage. This can be used to resolve definition clashes.
uint64_t getAllocaSizeInBytes(const AllocaInst &AI)
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging, Twine NamePrefix="")
AllocaInst * findAllocaForValue(Value *V, bool OffsetZero=false)
Returns unique alloca where the value comes from, or nullptr.
Function * createSanitizerCtor(Module &M, StringRef CtorName)
Creates sanitizer constructor function.
AsanDetectStackUseAfterReturnMode
Mode of ASan detect stack use after return.
@ Always
Always detect stack use after return.
@ Never
Never detect stack use after return.
@ Runtime
Detect stack use after return if not disabled runtime with (ASAN_OPTIONS=detect_stack_use_after_retur...
DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch,...
SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
FunctionCallee declareSanitizerInitFunction(Module &M, StringRef InitName, ArrayRef< Type * > InitArgTypes, bool Weak=false)
std::string getUniqueModuleId(Module *M)
Produce a unique identifier for this module by taking the MD5 sum of the names of the module's strong...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
std::pair< Function *, FunctionCallee > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef(), bool Weak=false)
Creates sanitizer constructor function, and calls sanitizer's init function from it.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
void SplitBlockAndInsertIfThenElse(Value *Cond, BasicBlock::iterator SplitBefore, Instruction **ThenTerm, Instruction **ElseTerm, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
void SplitBlockAndInsertForEachLane(ElementCount EC, Type *IndexTy, Instruction *InsertBefore, std::function< void(IRBuilderBase &, Value *)> Func)
Utility function for performing a given action on each lane of a vector with EC elements.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
AsanDtorKind
Types of ASan module destructors supported.
@ None
Do not emit any destructors for ASan.
ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, uint64_t Granularity, uint64_t MinHeaderSize)
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
static const int kAsanStackUseAfterReturnMagic
void setGlobalVariableLargeSection(const Triple &TargetTriple, GlobalVariable &GV)
@ Dynamic
Denotes mode unknown at compile time.
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
iterator_range< df_iterator< T > > depth_first(const T &G)
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AsanCtorKind
Types of ASan module constructors supported.
void maybeMarkSanitizerLibraryCallNoBuiltin(CallInst *CI, const TargetLibraryInfo *TLI)
Given a CallInst, check if it calls a string function known to CodeGen, and mark it with NoBuiltin if...
void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
void appendToGlobalDtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Same as appendToGlobalCtors(), but for global dtors.
bool checkIfAlreadyInstrumented(Module &M, StringRef Flag)
Check if module has flag attached, if not add the flag.
void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize, bool IsKasan, uint64_t *ShadowBase, int *MappingScale, bool *OrShadowOffset)
std::string demangle(std::string_view MangledName)
Attempt to demangle a string using different demangling schemes.
bool replaceDbgDeclare(Value *Address, Value *NewAddress, DIBuilder &Builder, uint8_t DIExprFlags, int Offset)
Replaces llvm.dbg.declare instruction when the address it describes is replaced with a new value.
ASanAccessInfo(int32_t Packed)
AsanDetectStackUseAfterReturnMode UseAfterReturn
int InstrumentationWithCallsThreshold
uint32_t MaxInlinePoisoningSize
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Description of the encoding of one expression Op.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
Align valueOrOne() const
For convenience, returns a valid alignment or 1 if undefined.
A CRTP mix-in to automatically provide informational APIs needed for passes.
SizeOffsetAPInt - Used by ObjectSizeOffsetVisitor, which works with APInts.