93#define DEBUG_TYPE "asan"
99 std::numeric_limits<uint64_t>::max();
140 "__asan_unregister_image_globals";
153 "__asan_stack_malloc_always_";
167 "__asan_option_detect_stack_use_after_return";
170 "__asan_shadow_memory_dynamic_address";
194 "asan-kernel",
cl::desc(
"Enable KernelAddressSanitizer instrumentation"),
199 cl::desc(
"Enable recovery mode (continue-after-error)."),
203 "asan-guard-against-version-mismatch",
204 cl::desc(
"Guard against compiler/runtime version mismatch."),
209 cl::desc(
"instrument read instructions"),
213 "asan-instrument-writes",
cl::desc(
"instrument write instructions"),
222 "asan-instrument-atomics",
232 "asan-always-slow-path",
237 "asan-force-dynamic-shadow",
238 cl::desc(
"Load shadow address into a local variable for each function"),
243 cl::desc(
"Access dynamic shadow through an ifunc global on "
244 "platforms that support this"),
248 "asan-with-ifunc-suppress-remat",
249 cl::desc(
"Suppress rematerialization of dynamic shadow address by passing "
250 "it through inline asm in prologue."),
258 "asan-max-ins-per-bb",
cl::init(10000),
259 cl::desc(
"maximal number of instructions to instrument in any given BB"),
266 "asan-max-inline-poisoning-size",
268 "Inline shadow poisoning for blocks up to the given size in bytes."),
272 "asan-use-after-return",
273 cl::desc(
"Sets the mode of detection for stack-use-after-return."),
275 clEnumValN(AsanDetectStackUseAfterReturnMode::Never,
"never",
276 "Never detect stack use after return."),
278 AsanDetectStackUseAfterReturnMode::Runtime,
"runtime",
279 "Detect stack use after return if "
280 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
281 clEnumValN(AsanDetectStackUseAfterReturnMode::Always,
"always",
282 "Always detect stack use after return.")),
286 cl::desc(
"Create redzones for byval "
287 "arguments (extra copy "
292 cl::desc(
"Check stack-use-after-scope"),
301 cl::desc(
"Handle C++ initializer order"),
305 "asan-detect-invalid-pointer-pair",
310 "asan-detect-invalid-pointer-cmp",
315 "asan-detect-invalid-pointer-sub",
320 "asan-realign-stack",
321 cl::desc(
"Realign stack to the value of this flag (power of two)"),
325 "asan-instrumentation-with-call-threshold",
327 "If the function being instrumented contains more than "
328 "this number of memory accesses, use callbacks instead of "
329 "inline checks (-1 means never use callbacks)."),
333 "asan-memory-access-callback-prefix",
338 "asan-kernel-mem-intrinsic-prefix",
344 cl::desc(
"instrument dynamic allocas"),
348 "asan-skip-promotable-allocas",
353 "asan-constructor-kind",
354 cl::desc(
"Sets the ASan constructor kind"),
357 "Use global constructors")),
364 cl::desc(
"scale of asan shadow mapping"),
369 cl::desc(
"offset of asan shadow mapping [EXPERIMENTAL]"),
383 "asan-opt-same-temp",
cl::desc(
"Instrument the same temp just once"),
387 cl::desc(
"Don't instrument scalar globals"),
391 "asan-opt-stack",
cl::desc(
"Don't instrument scalar stack variables"),
395 "asan-stack-dynamic-alloca",
400 "asan-force-experiment",
406 cl::desc(
"Use private aliases for global variables"),
411 cl::desc(
"Use odr indicators to improve ODR reporting"),
416 cl::desc(
"Use linker features to support dead "
417 "code stripping of globals"),
424 cl::desc(
"Place ASan constructors in comdat sections"),
428 "asan-destructor-kind",
429 cl::desc(
"Sets the ASan destructor kind. The default is to use the value "
430 "provided to the pass constructor"),
433 "Use global destructors")),
453STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
454STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
456 "Number of optimized accesses to global vars");
458 "Number of optimized accesses to stack vars");
467struct ShadowMapping {
478 bool IsAndroid = TargetTriple.
isAndroid();
481 bool IsMacOS = TargetTriple.
isMacOSX();
484 bool IsPS = TargetTriple.
isPS();
491 bool IsMIPS32 = TargetTriple.
isMIPS32();
492 bool IsMIPS64 = TargetTriple.
isMIPS64();
493 bool IsArmOrThumb = TargetTriple.
isARM() || TargetTriple.
isThumb();
500 bool IsAMDGPU = TargetTriple.
isAMDGPU();
502 ShadowMapping Mapping;
509 if (LongSize == 32) {
512 else if (IsMIPSN32ABI)
524 else if (IsEmscripten)
537 else if (IsFreeBSD && IsAArch64)
539 else if (IsFreeBSD && !IsMIPS64) {
544 }
else if (IsNetBSD) {
551 else if (IsLinux && IsX86_64) {
557 }
else if (IsWindows && IsX86_64) {
563 else if (IsMacOS && IsAArch64)
567 else if (IsLoongArch64)
591 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
592 !IsRISCV64 && !IsLoongArch64 &&
593 !(Mapping.Offset & (Mapping.Offset - 1)) &&
595 bool IsAndroidWithIfuncSupport =
597 Mapping.InGlobal =
ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
605 int *MappingScale,
bool *OrShadowOffset) {
607 *ShadowBase = Mapping.Offset;
608 *MappingScale = Mapping.Scale;
609 *OrShadowOffset = Mapping.OrShadowOffset;
619 uint8_t AccessSizeIndex)
623 AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
624 CompileKernel(CompileKernel) {}
631 return std::max(32U, 1U << MappingScale);
645struct AddressSanitizer {
647 bool CompileKernel =
false,
bool Recover =
false,
648 bool UseAfterScope =
false,
650 AsanDetectStackUseAfterReturnMode::Runtime)
658 C = &(
M.getContext());
659 DL = &
M.getDataLayout();
660 LongSize =
M.getDataLayout().getPointerSizeInBits();
664 TargetTriple =
Triple(
M.getTargetTriple());
668 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
676 bool isInterestingAlloca(
const AllocaInst &AI);
679 void getInterestingMemoryOperands(
685 void instrumentPointerComparisonOrSubtraction(
Instruction *
I);
688 uint32_t TypeStoreSize,
bool IsWrite,
692 uint32_t TypeStoreSize,
bool IsWrite,
693 Value *SizeArgument);
696 TypeSize TypeStoreSize,
bool IsWrite,
697 Value *SizeArgument,
bool UseCalls,
699 void instrumentMaskedLoadOrStore(AddressSanitizer *
Pass,
const DataLayout &DL,
703 Type *OpType,
bool IsWrite,
704 Value *SizeArgument,
bool UseCalls,
709 bool IsWrite,
size_t AccessSizeIndex,
713 bool suppressInstrumentationSiteForDebug(
int &Instrumented);
715 bool maybeInsertAsanInitAtFunctionEntry(
Function &
F);
716 bool maybeInsertDynamicShadowAtFunctionEntry(
Function &
F);
717 void markEscapedLocalAllocas(
Function &
F);
720 friend struct FunctionStackPoisoner;
730 struct FunctionStateRAII {
731 AddressSanitizer *
Pass;
733 FunctionStateRAII(AddressSanitizer *
Pass) :
Pass(
Pass) {
735 "last pass forgot to clear cache");
739 ~FunctionStateRAII() {
740 Pass->LocalDynamicShadow =
nullptr;
741 Pass->ProcessedAllocas.clear();
756 ShadowMapping Mapping;
770 Value *LocalDynamicShadow =
nullptr;
778class ModuleAddressSanitizer {
780 ModuleAddressSanitizer(
Module &M,
bool CompileKernel =
false,
781 bool Recover =
false,
bool UseGlobalsGC =
true,
782 bool UseOdrIndicator =
true,
788 UseGlobalsGC(UseGlobalsGC &&
ClUseGlobalsGC && !this->CompileKernel),
803 UseCtorComdat(UseGlobalsGC &&
ClWithComdat && !this->CompileKernel),
804 DestructorKind(DestructorKind),
805 ConstructorKind(ConstructorKind) {
806 C = &(
M.getContext());
807 int LongSize =
M.getDataLayout().getPointerSizeInBits();
809 TargetTriple =
Triple(
M.getTargetTriple());
814 assert(this->DestructorKind != AsanDtorKind::Invalid);
817 bool instrumentModule(
Module &);
820 void initializeCallbacks(
Module &M);
829 const std::string &UniqueModuleId);
846 bool ShouldUseMachOGlobalsSection()
const;
847 StringRef getGlobalMetadataSection()
const;
850 uint64_t getMinRedzoneSizeForGlobal()
const {
854 int GetAsanVersion(
const Module &M)
const;
859 bool UsePrivateAlias;
860 bool UseOdrIndicator;
867 ShadowMapping Mapping;
877 Function *AsanCtorFunction =
nullptr;
878 Function *AsanDtorFunction =
nullptr;
890struct FunctionStackPoisoner :
public InstVisitor<FunctionStackPoisoner> {
892 AddressSanitizer &ASan;
897 ShadowMapping Mapping;
906 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
910 struct AllocaPoisonCall {
918 bool HasUntracedLifetimeIntrinsic =
false;
925 bool HasInlineAsm =
false;
926 bool HasReturnsTwiceCall =
false;
929 FunctionStackPoisoner(
Function &F, AddressSanitizer &ASan)
931 C(ASan.
C), IntptrTy(ASan.IntptrTy),
932 IntptrPtrTy(
PointerType::
get(IntptrTy, 0)), Mapping(ASan.Mapping),
941 copyArgsPassedByValToAllocas();
946 if (AllocaVec.
empty() && DynamicAllocaVec.
empty())
return false;
948 initializeCallbacks(*
F.getParent());
950 if (HasUntracedLifetimeIntrinsic) {
954 StaticAllocaPoisonCallVec.
clear();
955 DynamicAllocaPoisonCallVec.
clear();
958 processDynamicAllocas();
959 processStaticAllocas();
970 void copyArgsPassedByValToAllocas();
975 void processStaticAllocas();
976 void processDynamicAllocas();
978 void createDynamicAllocasInitStorage();
996 void unpoisonDynamicAllocasBeforeInst(
Instruction *InstBefore,
999 Value *DynamicAreaPtr = IRB.CreatePtrToInt(SavedStack, IntptrTy);
1004 if (!isa<ReturnInst>(InstBefore)) {
1006 InstBefore->
getModule(), Intrinsic::get_dynamic_area_offset,
1009 Value *DynamicAreaOffset = IRB.CreateCall(DynamicAreaOffsetFunc, {});
1011 DynamicAreaPtr = IRB.CreateAdd(IRB.CreatePtrToInt(SavedStack, IntptrTy),
1016 AsanAllocasUnpoisonFunc,
1017 {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1021 void unpoisonDynamicAllocas() {
1023 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1025 for (
Instruction *StackRestoreInst : StackRestoreVec)
1026 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1027 StackRestoreInst->getOperand(0));
1040 void handleDynamicAllocaCall(
AllocaInst *AI);
1045 if (!ASan.isInterestingAlloca(AI) ||
1050 if (AllocaVec.
empty())
1068 if (
ID == Intrinsic::stackrestore) StackRestoreVec.
push_back(&II);
1069 if (
ID == Intrinsic::localescape) LocalEscapeCall = &II;
1070 if (!ASan.UseAfterScope)
1077 if (
Size->isMinusOne())
return;
1080 const uint64_t SizeValue =
Size->getValue().getLimitedValue();
1081 if (SizeValue == ~0ULL ||
1089 HasUntracedLifetimeIntrinsic =
true;
1093 if (!ASan.isInterestingAlloca(*AI))
1095 bool DoPoison = (
ID == Intrinsic::lifetime_end);
1096 AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
1098 StaticAllocaPoisonCallVec.
push_back(APC);
1100 DynamicAllocaPoisonCallVec.
push_back(APC);
1104 if (
CallInst *CI = dyn_cast<CallInst>(&CB)) {
1105 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1106 HasReturnsTwiceCall |= CI->canReturnTwice();
1111 void initializeCallbacks(
Module &M);
1138 OS, MapClassName2PassName);
1150 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1155 ModuleAddressSanitizer ModuleSanitizer(M, Options.
CompileKernel,
1157 UseOdrIndicator, DestructorKind,
1164 AddressSanitizer FunctionSanitizer(M, SSGI, Options.
CompileKernel,
1168 Modified |= FunctionSanitizer.instrumentFunction(
F, &TLI);
1170 Modified |= ModuleSanitizer.instrumentModule(M);
1191 if (
G->getName().startswith(
"llvm.") ||
1193 G->getName().startswith(
"__llvm_gcov_ctr") ||
1195 G->getName().startswith(
"__llvm_rtti_proxy"))
1208 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1210 if (AddrSpace == 3 || AddrSpace == 5)
1217 Shadow = IRB.
CreateLShr(Shadow, Mapping.Scale);
1218 if (Mapping.Offset == 0)
return Shadow;
1221 if (LocalDynamicShadow)
1222 ShadowBase = LocalDynamicShadow;
1225 if (Mapping.OrShadowOffset)
1226 return IRB.
CreateOr(Shadow, ShadowBase);
1228 return IRB.
CreateAdd(Shadow, ShadowBase);
1234 if (isa<MemTransferInst>(
MI)) {
1236 isa<MemMoveInst>(
MI) ? AsanMemmove : AsanMemcpy,
1240 }
else if (isa<MemSetInst>(
MI)) {
1247 MI->eraseFromParent();
1251bool AddressSanitizer::isInterestingAlloca(
const AllocaInst &AI) {
1252 auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
1254 if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
1255 return PreviouslySeenAllocaInfo->getSecond();
1257 bool IsInteresting =
1270 !(SSGI && SSGI->
isSafe(AI)));
1272 ProcessedAllocas[&AI] = IsInteresting;
1273 return IsInteresting;
1278 Type *PtrTy = cast<PointerType>(
Ptr->getType()->getScalarType());
1287 if (
Ptr->isSwiftError())
1293 if (
auto AI = dyn_cast_or_null<AllocaInst>(
Ptr))
1304void AddressSanitizer::getInterestingMemoryOperands(
1307 if (LocalDynamicShadow ==
I)
1310 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1313 Interesting.
emplace_back(
I, LI->getPointerOperandIndex(),
false,
1314 LI->getType(), LI->getAlign());
1315 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(
I)) {
1319 SI->getValueOperand()->getType(),
SI->getAlign());
1323 Interesting.
emplace_back(
I, RMW->getPointerOperandIndex(),
true,
1324 RMW->getValOperand()->getType(), std::nullopt);
1328 Interesting.
emplace_back(
I, XCHG->getPointerOperandIndex(),
true,
1329 XCHG->getCompareOperand()->getType(),
1331 }
else if (
auto CI = dyn_cast<CallInst>(
I)) {
1332 switch (CI->getIntrinsicID()) {
1333 case Intrinsic::masked_load:
1334 case Intrinsic::masked_store:
1335 case Intrinsic::masked_gather:
1336 case Intrinsic::masked_scatter: {
1337 bool IsWrite = CI->getType()->isVoidTy();
1339 unsigned OpOffset = IsWrite ? 1 : 0;
1343 auto BasePtr = CI->getOperand(OpOffset);
1344 if (ignoreAccess(
I, BasePtr))
1346 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1349 if (
auto *
Op = dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
1350 Alignment =
Op->getMaybeAlignValue();
1351 Value *
Mask = CI->getOperand(2 + OpOffset);
1352 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, Mask);
1355 case Intrinsic::masked_expandload:
1356 case Intrinsic::masked_compressstore: {
1357 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_compressstore;
1358 unsigned OpOffset = IsWrite ? 1 : 0;
1361 auto BasePtr = CI->getOperand(OpOffset);
1362 if (ignoreAccess(
I, BasePtr))
1365 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1368 Value *
Mask = CI->getOperand(1 + OpOffset);
1371 Value *ExtMask =
IB.CreateZExt(Mask, ExtTy);
1372 Value *EVL =
IB.CreateAddReduce(ExtMask);
1374 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, TrueMask,
1378 case Intrinsic::vp_load:
1379 case Intrinsic::vp_store:
1380 case Intrinsic::experimental_vp_strided_load:
1381 case Intrinsic::experimental_vp_strided_store: {
1382 auto *VPI = cast<VPIntrinsic>(CI);
1383 unsigned IID = CI->getIntrinsicID();
1384 bool IsWrite = CI->getType()->isVoidTy();
1387 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1388 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1389 MaybeAlign Alignment = VPI->getOperand(PtrOpNo)->getPointerAlignment(*
DL);
1390 Value *Stride =
nullptr;
1391 if (IID == Intrinsic::experimental_vp_strided_store ||
1392 IID == Intrinsic::experimental_vp_strided_load) {
1393 Stride = VPI->getOperand(PtrOpNo + 1);
1398 if (!isa<ConstantInt>(Stride) ||
1399 cast<ConstantInt>(Stride)->getZExtValue() % PointerAlign != 0)
1400 Alignment =
Align(1);
1402 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1403 VPI->getMaskParam(), VPI->getVectorLengthParam(),
1407 case Intrinsic::vp_gather:
1408 case Intrinsic::vp_scatter: {
1409 auto *VPI = cast<VPIntrinsic>(CI);
1410 unsigned IID = CI->getIntrinsicID();
1411 bool IsWrite = IID == Intrinsic::vp_scatter;
1414 unsigned PtrOpNo = *VPI->getMemoryPointerParamPos(IID);
1415 Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
1416 MaybeAlign Alignment = VPI->getPointerAlignment();
1417 Interesting.
emplace_back(
I, PtrOpNo, IsWrite, Ty, Alignment,
1418 VPI->getMaskParam(),
1419 VPI->getVectorLengthParam());
1423 for (
unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1425 ignoreAccess(
I, CI->getArgOperand(ArgNo)))
1427 Type *Ty = CI->getParamByValType(ArgNo);
1435 return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
1442 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(
I)) {
1443 if (!Cmp->isRelational())
1457 if (BO->getOpcode() != Instruction::Sub)
1470 if (!
G->hasInitializer())
1473 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().IsDynInit)
1479void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1482 FunctionCallee F = isa<ICmpInst>(
I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
1483 Value *
Param[2] = {
I->getOperand(0),
I->getOperand(1)};
1484 for (
Value *&i : Param) {
1485 if (i->getType()->isPointerTy())
1494 TypeSize TypeStoreSize,
bool IsWrite,
1495 Value *SizeArgument,
bool UseCalls,
1501 switch (FixedSize) {
1507 if (!Alignment || *Alignment >= Granularity ||
1508 *Alignment >= FixedSize / 8)
1509 return Pass->instrumentAddress(
I, InsertBefore,
Addr, Alignment,
1510 FixedSize, IsWrite,
nullptr, UseCalls,
1514 Pass->instrumentUnusualSizeOrAlignment(
I, InsertBefore,
Addr, TypeStoreSize,
1515 IsWrite,
nullptr, UseCalls, Exp);
1518void AddressSanitizer::instrumentMaskedLoadOrStore(
1521 MaybeAlign Alignment,
unsigned Granularity,
Type *OpType,
bool IsWrite,
1523 auto *VTy = cast<VectorType>(OpType);
1524 TypeSize ElemTypeSize =
DL.getTypeStoreSizeInBits(VTy->getScalarType());
1535 IB.SetInsertPoint(LoopInsertBefore);
1537 EVL =
IB.CreateZExtOrTrunc(EVL, IntptrTy);
1540 Value *
EC =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1541 EVL =
IB.CreateBinaryIntrinsic(Intrinsic::umin, EVL, EC);
1543 EVL =
IB.CreateElementCount(IntptrTy, VTy->getElementCount());
1548 Stride =
IB.CreateZExtOrTrunc(Stride, IntptrTy);
1553 if (
auto *MaskElemC = dyn_cast<ConstantInt>(MaskElem)) {
1554 if (MaskElemC->isZero())
1565 Value *InstrumentedAddress;
1566 if (isa<VectorType>(
Addr->getType())) {
1568 cast<VectorType>(
Addr->getType())->getElementType()->isPointerTy() &&
1569 "Expected vector of pointer.");
1571 }
else if (Stride) {
1579 InstrumentedAddress, Alignment, Granularity,
1580 ElemTypeSize, IsWrite, SizeArgument, UseCalls, Exp);
1607 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1608 NumOptimizedAccessesToGlobalVar++;
1616 isSafeAccess(ObjSizeVis,
Addr,
O.TypeStoreSize)) {
1617 NumOptimizedAccessesToStackVar++;
1623 NumInstrumentedWrites++;
1625 NumInstrumentedReads++;
1627 unsigned Granularity = 1 << Mapping.Scale;
1629 instrumentMaskedLoadOrStore(
this,
DL, IntptrTy,
O.MaybeMask,
O.MaybeEVL,
1630 O.MaybeStride,
O.getInsn(),
Addr,
O.Alignment,
1631 Granularity,
O.OpType,
O.IsWrite,
nullptr,
1635 Granularity,
O.TypeStoreSize,
O.IsWrite,
nullptr, UseCalls,
1642 size_t AccessSizeIndex,
1643 Value *SizeArgument,
1651 {
Addr, SizeArgument});
1654 {
Addr, SizeArgument, ExpVal});
1658 IRB.
CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex],
Addr);
1660 Call = IRB.
CreateCall(AsanErrorCallback[IsWrite][1][AccessSizeIndex],
1664 Call->setCannotMerge();
1671 size_t Granularity =
static_cast<size_t>(1) << Mapping.Scale;
1673 Value *LastAccessedByte =
1676 if (TypeStoreSize / 8 > 1)
1686Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1688 uint32_t TypeStoreSize,
bool IsWrite,
Value *SizeArgument) {
1692 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1695 return InsertBefore;
1701 Value *IsSharedOrPrivate = IRB.
CreateOr(IsShared, IsPrivate);
1703 Value *AddrSpaceZeroLanding =
1705 InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
1706 return InsertBefore;
1709void AddressSanitizer::instrumentAddress(
Instruction *OrigIns,
1712 uint32_t TypeStoreSize,
bool IsWrite,
1713 Value *SizeArgument,
bool UseCalls,
1715 if (TargetTriple.isAMDGPU()) {
1716 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore,
Addr,
1717 TypeStoreSize, IsWrite, SizeArgument);
1724 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1727 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1739 IRB.
CreateCall(AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex],
1742 IRB.
CreateCall(AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1750 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1752 std::max<uint64_t>(Alignment.
valueOrOne().
value() >> Mapping.Scale, 1);
1757 size_t Granularity = 1ULL << Mapping.Scale;
1765 assert(cast<BranchInst>(CheckTerm)->isUnconditional());
1768 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeStoreSize);
1782 Instruction *Crash = generateCrashCode(CrashTerm, AddrLong, IsWrite,
1783 AccessSizeIndex, SizeArgument, Exp);
1792void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1794 bool IsWrite,
Value *SizeArgument,
bool UseCalls,
uint32_t Exp) {
1802 IRB.
CreateCall(AsanMemoryAccessCallbackSized[IsWrite][0],
1805 IRB.
CreateCall(AsanMemoryAccessCallbackSized[IsWrite][1],
1812 instrumentAddress(
I, InsertBefore,
Addr, {}, 8, IsWrite,
Size,
false,
Exp);
1813 instrumentAddress(
I, InsertBefore, LastByte, {}, 8, IsWrite,
Size,
false,
1818void ModuleAddressSanitizer::poisonOneInitializer(
Function &GlobalInit,
1826 IRB.
CreateCall(AsanPoisonGlobals, ModuleNameAddr);
1829 for (
auto &BB : GlobalInit)
1830 if (
ReturnInst *RI = dyn_cast<ReturnInst>(BB.getTerminator()))
1834void ModuleAddressSanitizer::createInitializerPoisonCalls(
1845 if (isa<ConstantAggregateZero>(
OP))
continue;
1851 auto *Priority = cast<ConstantInt>(CS->
getOperand(0));
1861ModuleAddressSanitizer::getExcludedAliasedGlobal(
const GlobalAlias &GA)
const {
1866 assert(CompileKernel &&
"Only expecting to be called when compiling kernel");
1873 return dyn_cast<GlobalVariable>(
C->stripPointerCastsAndAliases());
1878bool ModuleAddressSanitizer::shouldInstrumentGlobal(
GlobalVariable *
G)
const {
1879 Type *Ty =
G->getValueType();
1882 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().NoAddress)
1884 if (!Ty->
isSized())
return false;
1885 if (!
G->hasInitializer())
return false;
1887 if (
G->getAddressSpace() &&
1894 if (
G->isThreadLocal())
return false;
1896 if (
G->getAlign() && *
G->getAlign() > getMinRedzoneSizeForGlobal())
return false;
1902 if (!TargetTriple.isOSBinFormatCOFF()) {
1903 if (!
G->hasExactDefinition() ||
G->hasComdat())
1907 if (
G->isInterposable())
1914 switch (
C->getSelectionKind()) {
1925 if (
G->hasSection()) {
1935 if (Section ==
"llvm.metadata")
return false;
1942 if (
Section.startswith(
".preinit_array") ||
1943 Section.startswith(
".init_array") ||
1944 Section.startswith(
".fini_array")) {
1950 if (TargetTriple.isOSBinFormatELF()) {
1952 [](
char c) {
return llvm::isAlnum(c) || c ==
'_'; }))
1964 if (TargetTriple.isOSBinFormatCOFF() &&
Section.contains(
'$')) {
1965 LLVM_DEBUG(
dbgs() <<
"Ignoring global in sorted section (contains '$'): "
1970 if (TargetTriple.isOSBinFormatMachO()) {
1972 unsigned TAA = 0, StubSize = 0;
1975 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
1980 if (ParsedSegment ==
"__OBJC" ||
1981 (ParsedSegment ==
"__DATA" && ParsedSection.
startswith(
"__objc_"))) {
1993 if (ParsedSegment ==
"__DATA" && ParsedSection ==
"__cfstring") {
2006 if (CompileKernel) {
2009 if (
G->getName().startswith(
"__"))
2019bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection()
const {
2020 if (!TargetTriple.isOSBinFormatMachO())
2023 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
2025 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
2027 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
2029 if (TargetTriple.isDriverKit())
2035StringRef ModuleAddressSanitizer::getGlobalMetadataSection()
const {
2036 switch (TargetTriple.getObjectFormat()) {
2046 "ModuleAddressSanitizer not implemented for object file format");
2053void ModuleAddressSanitizer::initializeCallbacks(
Module &M) {
2059 AsanUnpoisonGlobals =
2063 AsanRegisterGlobals =
M.getOrInsertFunction(
2065 AsanUnregisterGlobals =
M.getOrInsertFunction(
2070 AsanRegisterImageGlobals =
M.getOrInsertFunction(
2072 AsanUnregisterImageGlobals =
M.getOrInsertFunction(
2075 AsanRegisterElfGlobals =
2077 IntptrTy, IntptrTy, IntptrTy);
2078 AsanUnregisterElfGlobals =
2080 IntptrTy, IntptrTy, IntptrTy);
2085void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
2090 if (!
G->hasName()) {
2097 if (!InternalSuffix.
empty() &&
G->hasLocalLinkage()) {
2098 std::string
Name = std::string(
G->getName());
2099 Name += InternalSuffix;
2100 C =
M.getOrInsertComdat(
Name);
2102 C =
M.getOrInsertComdat(
G->getName());
2108 if (TargetTriple.isOSBinFormatCOFF()) {
2110 if (
G->hasPrivateLinkage())
2123ModuleAddressSanitizer::CreateMetadataGlobal(
Module &M,
Constant *Initializer,
2125 auto Linkage = TargetTriple.isOSBinFormatMachO()
2129 M, Initializer->
getType(),
false, Linkage, Initializer,
2131 Metadata->setSection(getGlobalMetadataSection());
2139 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2147void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2151 auto &
DL =
M.getDataLayout();
2154 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2155 Constant *Initializer = MetadataInitializers[i];
2158 CreateMetadataGlobal(M, Initializer,
G->getName());
2160 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2166 unsigned SizeOfGlobalStruct =
DL.getTypeAllocSize(Initializer->
getType());
2168 "global metadata will not be padded appropriately");
2171 SetComdatForGlobalMetadata(
G,
Metadata,
"");
2176 if (!MetadataGlobals.empty())
2180void ModuleAddressSanitizer::InstrumentGlobalsELF(
2183 const std::string &UniqueModuleId) {
2190 bool UseComdatForGlobalsGC = UseOdrIndicator;
2193 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2196 CreateMetadataGlobal(M, MetadataInitializers[i],
G->getName());
2198 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2201 if (UseComdatForGlobalsGC)
2202 SetComdatForGlobalMetadata(
G,
Metadata, UniqueModuleId);
2207 if (!MetadataGlobals.empty())
2224 "__start_" + getGlobalMetadataSection());
2228 "__stop_" + getGlobalMetadataSection());
2242 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2249void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2260 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2261 Constant *Initializer = MetadataInitializers[i];
2264 CreateMetadataGlobal(M, Initializer,
G->getName());
2268 auto LivenessBinder =
2273 Twine(
"__asan_binder_") +
G->getName());
2274 Liveness->
setSection(
"__DATA,__asan_liveness,regular,live_support");
2275 LivenessGlobals[i] = Liveness;
2282 if (!LivenessGlobals.empty())
2304 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2309void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2313 unsigned N = ExtendedGlobals.
size();
2323 if (Mapping.Scale > 3)
2324 AllGlobals->setAlignment(
Align(1ULL << Mapping.Scale));
2335 IrbDtor.CreateCall(AsanUnregisterGlobals,
2348 *CtorComdat =
false;
2353 if (CompileKernel) {
2354 for (
auto &GA :
M.aliases()) {
2356 AliasedGlobalExclusions.
insert(GV);
2361 for (
auto &
G :
M.globals()) {
2362 if (!AliasedGlobalExclusions.
count(&
G) && shouldInstrumentGlobal(&
G))
2366 size_t n = GlobalsToChange.
size();
2372 auto &
DL =
M.getDataLayout();
2386 IntptrTy, IntptrTy, IntptrTy);
2390 bool HasDynamicallyInitializedGlobals =
false;
2397 for (
size_t i = 0; i < n; i++) {
2401 if (
G->hasSanitizerMetadata())
2402 MD =
G->getSanitizerMetadata();
2407 std::string NameForGlobal =
G->getName().str();
2412 Type *Ty =
G->getValueType();
2413 const uint64_t SizeInBytes =
DL.getTypeAllocSize(Ty);
2414 const uint64_t RightRedzoneSize = getRedzoneSizeForGlobal(SizeInBytes);
2426 M, NewTy,
G->isConstant(), Linkage, NewInitializer,
"",
G,
2427 G->getThreadLocalMode(),
G->getAddressSpace());
2437 if (TargetTriple.isOSBinFormatMachO() && !
G->hasSection() &&
2439 auto Seq = dyn_cast<ConstantDataSequential>(
G->getInitializer());
2440 if (Seq && Seq->isCString())
2441 NewGlobal->
setSection(
"__TEXT,__asan_cstring,regular");
2452 G->replaceAllUsesWith(
2455 G->eraseFromParent();
2456 NewGlobals[i] = NewGlobal;
2461 bool CanUsePrivateAliases =
2462 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2463 TargetTriple.isOSBinFormatWasm();
2464 if (CanUsePrivateAliases && UsePrivateAlias) {
2467 InstrumentedGlobal =
2475 }
else if (UseOdrIndicator) {
2478 auto *ODRIndicatorSym =
2487 ODRIndicatorSym->setAlignment(
Align(1));
2488 ODRIndicator = ODRIndicatorSym;
2503 HasDynamicallyInitializedGlobals =
true;
2507 Initializers[i] = Initializer;
2513 for (
size_t i = 0; i < n; i++) {
2515 if (
G->getName().empty())
continue;
2520 std::string ELFUniqueModuleId =
2524 if (!ELFUniqueModuleId.empty()) {
2525 InstrumentGlobalsELF(IRB, M, NewGlobals, Initializers, ELFUniqueModuleId);
2527 }
else if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2528 InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
2529 }
else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2530 InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
2532 InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
2536 if (HasDynamicallyInitializedGlobals)
2544ModuleAddressSanitizer::getRedzoneSizeForGlobal(
uint64_t SizeInBytes)
const {
2545 constexpr uint64_t kMaxRZ = 1 << 18;
2546 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2549 if (SizeInBytes <= MinRZ / 2) {
2553 RZ = MinRZ - SizeInBytes;
2556 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2559 if (SizeInBytes % MinRZ)
2560 RZ += MinRZ - (SizeInBytes % MinRZ);
2563 assert((RZ + SizeInBytes) % MinRZ == 0);
2568int ModuleAddressSanitizer::GetAsanVersion(
const Module &M)
const {
2569 int LongSize =
M.getDataLayout().getPointerSizeInBits();
2574 Version += (LongSize == 32 && isAndroid);
2578bool ModuleAddressSanitizer::instrumentModule(
Module &M) {
2579 initializeCallbacks(M);
2584 if (CompileKernel) {
2589 std::string AsanVersion = std::to_string(GetAsanVersion(M));
2590 std::string VersionCheckName =
2592 std::tie(AsanCtorFunction, std::ignore) =
2595 {}, VersionCheckName);
2599 bool CtorComdat =
true;
2602 if (AsanCtorFunction) {
2603 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2604 InstrumentGlobals(IRB, M, &CtorComdat);
2607 InstrumentGlobals(IRB, M, &CtorComdat);
2616 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2617 if (AsanCtorFunction) {
2621 if (AsanDtorFunction) {
2626 if (AsanCtorFunction)
2628 if (AsanDtorFunction)
2639 for (
int Exp = 0;
Exp < 2;
Exp++) {
2640 for (
size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2641 const std::string TypeStr = AccessIsWrite ?
"store" :
"load";
2642 const std::string ExpStr =
Exp ?
"exp_" :
"";
2643 const std::string EndingStr = Recover ?
"_noabort" :
"";
2652 Args1.push_back(ExpType);
2653 if (
auto AK = TLI->getExtAttrForI32Param(
false)) {
2658 AsanErrorCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2662 AsanMemoryAccessCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2667 AccessSizeIndex++) {
2668 const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
2669 AsanErrorCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2670 M.getOrInsertFunction(
2674 AsanMemoryAccessCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2675 M.getOrInsertFunction(
2682 const std::string MemIntrinCallbackPrefix =
2686 AsanMemmove =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memmove",
2689 AsanMemcpy =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memcpy",
2692 AsanMemset =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memset",
2697 AsanHandleNoReturnFunc =
2700 AsanPtrCmpFunction =
2702 AsanPtrSubFunction =
2704 if (Mapping.InGlobal)
2705 AsanShadowGlobal =
M.getOrInsertGlobal(
"__asan_shadow",
2708 AMDGPUAddressShared =
M.getOrInsertFunction(
2710 AMDGPUAddressPrivate =
M.getOrInsertFunction(
2714bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(
Function &
F) {
2722 if (
F.getName().find(
" load]") != std::string::npos) {
2732bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(
Function &
F) {
2738 if (Mapping.InGlobal) {
2746 LocalDynamicShadow =
2747 IRB.
CreateCall(Asm, {AsanShadowGlobal},
".asan.shadow");
2749 LocalDynamicShadow =
2753 Value *GlobalDynamicAddress =
F.getParent()->getOrInsertGlobal(
2755 LocalDynamicShadow = IRB.
CreateLoad(IntptrTy, GlobalDynamicAddress);
2760void AddressSanitizer::markEscapedLocalAllocas(
Function &
F) {
2765 assert(ProcessedAllocas.empty() &&
"must process localescape before allocas");
2769 if (!
F.getParent()->getFunction(
"llvm.localescape"))
return;
2778 AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
2780 "non-static alloca arg to localescape");
2781 ProcessedAllocas[AI] =
false;
2788bool AddressSanitizer::suppressInstrumentationSiteForDebug(
int &Instrumented) {
2789 bool ShouldInstrument =
2793 return !ShouldInstrument;
2796bool AddressSanitizer::instrumentFunction(
Function &
F,
2802 if (
F.getName().startswith(
"__asan_"))
return false;
2804 bool FunctionModified =
false;
2809 if (maybeInsertAsanInitAtFunctionEntry(
F))
2810 FunctionModified =
true;
2813 if (!
F.hasFnAttribute(Attribute::SanitizeAddress))
return FunctionModified;
2815 if (
F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
2816 return FunctionModified;
2820 initializeCallbacks(*
F.getParent(), TLI);
2822 FunctionStateRAII CleanupObj(
this);
2824 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(
F);
2828 markEscapedLocalAllocas(
F);
2840 for (
auto &BB :
F) {
2842 TempsToInstrument.
clear();
2843 int NumInsnsPerBB = 0;
2844 for (
auto &Inst : BB) {
2845 if (LooksLikeCodeInBug11395(&Inst))
return false;
2850 getInterestingMemoryOperands(&Inst, InterestingOperands);
2852 if (!InterestingOperands.
empty()) {
2853 for (
auto &Operand : InterestingOperands) {
2859 if (Operand.MaybeMask) {
2863 if (!TempsToInstrument.
insert(
Ptr).second)
2867 OperandsToInstrument.
push_back(Operand);
2874 PointerComparisonsOrSubtracts.
push_back(&Inst);
2880 if (
auto *CB = dyn_cast<CallBase>(&Inst)) {
2882 TempsToInstrument.
clear();
2886 if (
CallInst *CI = dyn_cast<CallInst>(&Inst))
2894 OperandsToInstrument.
size() + IntrinToInstrument.
size() >
2902 int NumInstrumented = 0;
2903 for (
auto &Operand : OperandsToInstrument) {
2904 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
2905 instrumentMop(ObjSizeVis, Operand, UseCalls,
2906 F.getParent()->getDataLayout());
2907 FunctionModified =
true;
2909 for (
auto *Inst : IntrinToInstrument) {
2910 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
2911 instrumentMemIntrinsic(Inst);
2912 FunctionModified =
true;
2915 FunctionStackPoisoner FSP(
F, *
this);
2916 bool ChangedStack = FSP.runOnFunction();
2920 for (
auto *CI : NoReturnCalls) {
2925 for (
auto *Inst : PointerComparisonsOrSubtracts) {
2926 instrumentPointerComparisonOrSubtraction(Inst);
2927 FunctionModified =
true;
2930 if (ChangedStack || !NoReturnCalls.empty())
2931 FunctionModified =
true;
2933 LLVM_DEBUG(
dbgs() <<
"ASAN done instrumenting: " << FunctionModified <<
" "
2936 return FunctionModified;
2942bool AddressSanitizer::LooksLikeCodeInBug11395(
Instruction *
I) {
2943 if (LongSize != 32)
return false;
2952void FunctionStackPoisoner::initializeCallbacks(
Module &M) {
2956 const char *MallocNameTemplate =
2961 std::string Suffix = itostr(
Index);
2962 AsanStackMallocFunc[
Index] =
M.getOrInsertFunction(
2963 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
2964 AsanStackFreeFunc[
Index] =
2969 if (ASan.UseAfterScope) {
2970 AsanPoisonStackMemoryFunc =
M.getOrInsertFunction(
2972 AsanUnpoisonStackMemoryFunc =
M.getOrInsertFunction(
2976 for (
size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
2977 0xf3, 0xf5, 0xf8}) {
2978 std::ostringstream
Name;
2980 Name << std::setw(2) << std::setfill(
'0') << std::hex << Val;
2981 AsanSetShadowFunc[Val] =
2982 M.getOrInsertFunction(
Name.str(), IRB.
getVoidTy(), IntptrTy, IntptrTy);
2985 AsanAllocaPoisonFunc =
M.getOrInsertFunction(
2987 AsanAllocasUnpoisonFunc =
M.getOrInsertFunction(
2993 size_t Begin,
size_t End,
2995 Value *ShadowBase) {
2999 const size_t LargestStoreSizeInBytes =
3000 std::min<size_t>(
sizeof(
uint64_t), ASan.LongSize / 8);
3002 const bool IsLittleEndian =
F.getParent()->getDataLayout().isLittleEndian();
3008 for (
size_t i = Begin; i <
End;) {
3009 if (!ShadowMask[i]) {
3015 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
3017 while (StoreSizeInBytes >
End - i)
3018 StoreSizeInBytes /= 2;
3021 for (
size_t j = StoreSizeInBytes - 1;
j && !ShadowMask[i +
j]; --
j) {
3022 while (j <= StoreSizeInBytes / 2)
3023 StoreSizeInBytes /= 2;
3027 for (
size_t j = 0;
j < StoreSizeInBytes;
j++) {
3029 Val |= (
uint64_t)ShadowBytes[i + j] << (8 * j);
3031 Val = (Val << 8) | ShadowBytes[i + j];
3035 Value *Poison = IRB.
getIntN(StoreSizeInBytes * 8, Val);
3040 i += StoreSizeInBytes;
3047 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.
size(), IRB, ShadowBase);
3052 size_t Begin,
size_t End,
3055 size_t Done = Begin;
3056 for (
size_t i = Begin, j = Begin + 1; i <
End; i =
j++) {
3057 if (!ShadowMask[i]) {
3061 uint8_t Val = ShadowBytes[i];
3062 if (!AsanSetShadowFunc[Val])
3066 for (;
j <
End && ShadowMask[
j] && Val == ShadowBytes[
j]; ++
j) {
3070 copyToShadowInline(ShadowMask, ShadowBytes,
Done, i, IRB, ShadowBase);
3078 copyToShadowInline(ShadowMask, ShadowBytes,
Done,
End, IRB, ShadowBase);
3086 for (
int i = 0;; i++, MaxSize *= 2)
3087 if (LocalStackSize <= MaxSize)
return i;
3091void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
3093 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
3101 if (Arg.hasByValAttr()) {
3102 Type *Ty = Arg.getParamByValType();
3103 const Align Alignment =
3104 DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
3108 (Arg.hasName() ? Arg.getName() :
"Arg" +
Twine(Arg.getArgNo())) +
3111 Arg.replaceAllUsesWith(AI);
3113 uint64_t AllocSize =
DL.getTypeAllocSize(Ty);
3114 IRB.
CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
3122 Value *ValueIfFalse) {
3125 PHI->addIncoming(ValueIfFalse, CondBlock);
3127 PHI->addIncoming(ValueIfTrue, ThenBlock);
3131Value *FunctionStackPoisoner::createAllocaForLayout(
3140 nullptr,
"MyAlloca");
3149void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3152 DynamicAllocaLayout = IRB.
CreateAlloca(IntptrTy,
nullptr);
3157void FunctionStackPoisoner::processDynamicAllocas() {
3164 for (
const auto &APC : DynamicAllocaPoisonCallVec) {
3167 assert(ASan.isInterestingAlloca(*APC.AI));
3168 assert(!APC.AI->isStaticAlloca());
3171 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3178 createDynamicAllocasInitStorage();
3179 for (
auto &AI : DynamicAllocaVec)
3180 handleDynamicAllocaCall(AI);
3181 unpoisonDynamicAllocas();
3193 for (
Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
3203 if (isa<AllocaInst>(It) || isa<CastInst>(It))
3205 if (
auto *Store = dyn_cast<StoreInst>(It)) {
3209 auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
3210 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3213 Value *Val = Store->getValueOperand();
3214 bool IsDirectArgInit = isa<Argument>(Val);
3215 bool IsArgInitViaCast =
3216 isa<CastInst>(Val) &&
3217 isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
3220 Val == It->getPrevNonDebugInstruction();
3221 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3225 if (IsArgInitViaCast)
3226 InitInsts.
push_back(cast<Instruction>(Val));
3237void FunctionStackPoisoner::processStaticAllocas() {
3238 if (AllocaVec.
empty()) {
3243 int StackMallocIdx = -1;
3245 if (
auto SP =
F.getSubprogram())
3246 EntryDebugLocation =
3255 auto InsBeforeB = InsBefore->
getParent();
3256 assert(InsBeforeB == &
F.getEntryBlock());
3257 for (
auto *AI : StaticAllocasToMoveUp)
3268 ArgInitInst->moveBefore(InsBefore);
3271 if (LocalEscapeCall) LocalEscapeCall->
moveBefore(InsBefore);
3277 ASan.getAllocaSizeInBytes(*AI),
3288 uint64_t Granularity = 1ULL << Mapping.Scale;
3289 uint64_t MinHeaderSize = std::max((
uint64_t)ASan.LongSize / 2, Granularity);
3295 for (
auto &
Desc : SVD)
3299 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3302 assert(ASan.isInterestingAlloca(*APC.AI));
3303 assert(APC.AI->isStaticAlloca());
3308 if (
const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3309 if (LifetimeLoc->getFile() == FnLoc->getFile())
3310 if (
unsigned Line = LifetimeLoc->getLine())
3311 Desc.Line = std::min(
Desc.Line ?
Desc.Line : Line, Line);
3317 LLVM_DEBUG(
dbgs() << DescriptionString <<
" --- " <<
L.FrameSize <<
"\n");
3319 bool DoStackMalloc =
3329 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3330 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3332 Value *StaticAlloca =
3333 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L,
false);
3336 Value *LocalStackBase;
3337 Value *LocalStackBaseAlloca;
3340 if (DoStackMalloc) {
3341 LocalStackBaseAlloca =
3342 IRB.
CreateAlloca(IntptrTy,
nullptr,
"asan_local_stack_base");
3349 Constant *OptionDetectUseAfterReturn =
F.getParent()->getOrInsertGlobal(
3359 Value *FakeStackValue =
3360 IRBIf.CreateCall(AsanStackMallocFunc[StackMallocIdx],
3363 FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
3371 FakeStack = IRB.
CreateCall(AsanStackMallocFunc[StackMallocIdx],
3374 Value *NoFakeStack =
3379 Value *AllocaValue =
3380 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L,
true) : StaticAlloca;
3383 LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
3384 IRB.
CreateStore(LocalStackBase, LocalStackBaseAlloca);
3391 DoDynamicAlloca ? createAllocaForLayout(IRB, L,
true) : StaticAlloca;
3392 LocalStackBaseAlloca = LocalStackBase;
3398 Value *LocalStackBaseAllocaPtr =
3399 isa<PtrToIntInst>(LocalStackBaseAlloca)
3400 ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
3401 : LocalStackBaseAlloca;
3402 assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
3403 "Variable descriptions relative to ASan stack base will be dropped");
3406 for (
const auto &
Desc : SVD) {
3441 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
3444 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3446 if (!StaticAllocaPoisonCallVec.empty()) {
3450 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3453 size_t Begin =
Desc.Offset /
L.Granularity;
3454 size_t End = Begin + (APC.Size +
L.Granularity - 1) /
L.Granularity;
3457 copyToShadow(ShadowAfterScope,
3458 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin,
End,
3472 if (DoStackMalloc) {
3473 assert(StackMallocIdx >= 0);
3490 if (StackMallocIdx <= 4) {
3492 ShadowAfterReturn.
resize(ClassSize /
L.Granularity,
3494 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3496 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
3499 Value *SavedFlagPtr = IRBPoison.CreateLoad(
3500 IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
3501 IRBPoison.CreateStore(
3503 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
3506 IRBPoison.CreateCall(
3507 AsanStackFreeFunc[StackMallocIdx],
3512 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3514 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3519 for (
auto *AI : AllocaVec)
3529 DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3530 {AddrArg, SizeArg});
3541void FunctionStackPoisoner::handleDynamicAllocaCall(
AllocaInst *AI) {
3555 const unsigned ElementSize =
3590 IRB.
CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize});
3615 if (!ObjSizeVis.
bothKnown(SizeOffset))
return false;
3617 int64_t
Offset = SizeOffset.second.getSExtValue();
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
static const uint64_t kDefaultShadowScale
constexpr size_t kAccessSizeIndexMask
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
static const uint64_t kFreeBSD_ShadowOffset32
constexpr size_t kIsWriteShift
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
static bool isInterestingPointerSubtraction(Instruction *I)
const char kAMDGPUAddressSharedName[]
const char kAsanStackFreeNameTemplate[]
constexpr size_t kCompileKernelMask
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
const char kAsanStackMallocNameTemplate[]
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
static const uint64_t kEmscriptenShadowOffset
const char kAsanGenPrefix[]
constexpr size_t kIsWriteMask
static uint64_t getRedzoneSizeForScale(int MappingScale)
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, TypeSize TypeStoreSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp)
static const uint64_t kDefaultShadowOffset64
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
static const uint64_t kAsanCtorAndDtorPriority
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(false), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanUnpoisonGlobalsName[]
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
const char kAsanRegisterImageGlobalsName[]
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
const char kODRGenPrefix[]
static const uint64_t kSystemZ_ShadowOffset64
static const uint64_t kDefaultShadowOffset32
const char kAsanShadowMemoryDynamicAddress[]
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
const char kAsanStackMallocAlwaysNameTemplate[]
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static bool isPointerOperand(Value *V)
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
static const uint64_t kFreeBSDKasan_ShadowOffset64
const char kAsanModuleDtorName[]
static const uint64_t kDynamicShadowSentinel
static bool isInterestingPointerComparison(Instruction *I)
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
static const uint64_t kLinuxKasan_ShadowOffset64
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
const char kAsanUnpoisonStackMemoryName[]
static const uint64_t kLoongArch64_ShadowOffset64
const char kAsanRegisterGlobalsName[]
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
const char kAsanGlobalsRegisteredFlagName[]
static const size_t kMaxStackMallocSize
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
const char kAsanUnregisterImageGlobalsName[]
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
static const uint64_t kPPC64_ShadowOffset64
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kMIPS32_ShadowOffset32
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
static const uint64_t kFreeBSDAArch64_ShadowOffset64
static const uint64_t kSmallX86_64ShadowOffsetBase
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
static size_t TypeStoreSizeToSizeIndex(uint32_t TypeSize)
const char kAsanAllocaPoison[]
constexpr size_t kCompileKernelShift
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
const char kAMDGPUAddressPrivateName[]
static const uint64_t kNetBSDKasan_ShadowOffset64
const char kAsanRegisterElfGlobalsName[]
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
const char kAsanPoisonGlobalsName[]
const char kAsanHandleNoReturnName[]
static const size_t kMinStackMallocSize
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
static const uint64_t kAArch64_ShadowOffset64
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
This file contains the simple types necessary to represent the attributes associated with functions a...
static const Function * getParent(const Value *V)
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
This defines the Use class.
AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
AttributeList addParamAttribute(LLVMContext &C, unsigned ArgNo, Attribute::AttrKind Kind) const
Add an argument attribute to the list.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
const CallInst * getTerminatingMustTailCall() const
Returns the call instruction marked 'musttail' prior to the terminating return instruction of this ba...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
Value * getArgOperand(unsigned i) const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, std::optional< unsigned > InRangeIndex=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags applied.
const Constant * getAliasee() const
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalObject.
void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
void setComdat(Comdat *C)
void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
Module * getParent()
Get the module that this global value is contained inside of...
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
Common base class shared among various IRBuilders.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
Value * CreateExtractElement(Value *Vec, Value *Idx, const Twine &Name="")
LoadInst * CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align, const char *Name)
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
BasicBlock::iterator GetInsertPoint() const
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateTypeSize(Type *DstType, TypeSize Size)
Create an expression which evaluates to the number of units in Size at runtime.
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreateBitCast(Value *V, Type *DestTy, const Twine &Name="")
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
PointerType * getInt8PtrTy(unsigned AddrSpace=0)
Fetch the type representing a pointer to an 8-bit integer value.
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", bool IsInBounds=false)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *TBAAStructTag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memcpy between the specified pointers.
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Base class for instruction visitors.
RetTy visitCallBase(CallBase &I)
RetTy visitCleanupReturnInst(CleanupReturnInst &I)
RetTy visitIntrinsicInst(IntrinsicInst &I)
void visit(Iterator Start, Iterator End)
RetTy visitReturnInst(ReturnInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitResumeInst(ResumeInst &I)
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
const BasicBlock * getParent() const
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
Intrinsic::ID getIntrinsicID() const
Return the intrinsic ID of this intrinsic.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight)
Return metadata containing two branch weights.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Evaluate the size and offset of an object pointed to by a Value* statically.
static bool bothKnown(const SizeOffsetType &SizeOffset)
SizeOffsetType compute(Value *V)
Pass interface - Implemented by all 'passes'.
static PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void abandon()
Mark an analysis as abandoned.
Resume the propagation of an exception.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, Instruction *InsertBefore=nullptr)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
constexpr bool empty() const
empty - Check if the string is empty.
bool startswith(StringRef Prefix) const
const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Class to represent struct types.
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
Triple - Helper class for working with autoconf configuration names.
bool isAndroidVersionLT(unsigned Major) const
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
bool isLoongArch64() const
Tests whether the target is 64-bit LoongArch.
EnvironmentType getEnvironment() const
Get the parsed environment type of this triple.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
static Type * getVoidTy(LLVMContext &C)
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
static IntegerType * getInt32Ty(LLVMContext &C)
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
static VectorType * get(Type *ElementType, ElementCount EC)
This static method is the primary way to construct an VectorType.
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
This file contains the declaration of the Comdat class, which represents a single COMDAT in LLVM.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=std::nullopt)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
@ S_CSTRING_LITERALS
S_CSTRING_LITERALS - Section with literal C strings.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage. This can be used to resolve definition clashes.
uint64_t getAllocaSizeInBytes(const AllocaInst &AI)
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
std::pair< APInt, APInt > SizeOffsetType
AllocaInst * findAllocaForValue(Value *V, bool OffsetZero=false)
Returns unique alloca where the value comes from, or nullptr.
Function * createSanitizerCtor(Module &M, StringRef CtorName)
Creates sanitizer constructor function.
AsanDetectStackUseAfterReturnMode
Mode of ASan detect stack use after return.
@ Always
Always detect stack use after return.
@ Never
Never detect stack use after return.
@ Runtime
Detect stack use after return if not disabled runtime with (ASAN_OPTIONS=detect_stack_use_after_retur...
GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging, const char *NamePrefix="")
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value,...
bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
FunctionCallee declareSanitizerInitFunction(Module &M, StringRef InitName, ArrayRef< Type * > InitArgTypes, bool Weak=false)
std::string getUniqueModuleId(Module *M)
Produce a unique identifier for this module by taking the MD5 sum of the names of the module's strong...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
std::pair< Function *, FunctionCallee > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef(), bool Weak=false)
Creates sanitizer constructor function, and calls sanitizer's init function from it.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
void SplitBlockAndInsertIfThenElse(Value *Cond, BasicBlock::iterator SplitBefore, Instruction **ThenTerm, Instruction **ElseTerm, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
void SplitBlockAndInsertForEachLane(ElementCount EC, Type *IndexTy, Instruction *InsertBefore, std::function< void(IRBuilderBase &, Value *)> Func)
Utility function for performing a given action on each lane of a vector with EC elements.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
AsanDtorKind
Types of ASan module destructors supported.
@ None
Do not emit any destructors for ASan.
ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, uint64_t Granularity, uint64_t MinHeaderSize)
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
static const int kAsanStackUseAfterReturnMagic
@ Dynamic
Denotes mode unknown at compile time.