93#define DEBUG_TYPE "asan"
99 std::numeric_limits<uint64_t>::max();
140 "__asan_unregister_image_globals";
153 "__asan_stack_malloc_always_";
167 "__asan_option_detect_stack_use_after_return";
170 "__asan_shadow_memory_dynamic_address";
194 "asan-kernel",
cl::desc(
"Enable KernelAddressSanitizer instrumentation"),
199 cl::desc(
"Enable recovery mode (continue-after-error)."),
203 "asan-guard-against-version-mismatch",
204 cl::desc(
"Guard against compiler/runtime version mismatch."),
209 cl::desc(
"instrument read instructions"),
213 "asan-instrument-writes",
cl::desc(
"instrument write instructions"),
222 "asan-instrument-atomics",
232 "asan-always-slow-path",
237 "asan-force-dynamic-shadow",
238 cl::desc(
"Load shadow address into a local variable for each function"),
243 cl::desc(
"Access dynamic shadow through an ifunc global on "
244 "platforms that support this"),
248 "asan-with-ifunc-suppress-remat",
249 cl::desc(
"Suppress rematerialization of dynamic shadow address by passing "
250 "it through inline asm in prologue."),
258 "asan-max-ins-per-bb",
cl::init(10000),
259 cl::desc(
"maximal number of instructions to instrument in any given BB"),
266 "asan-max-inline-poisoning-size",
268 "Inline shadow poisoning for blocks up to the given size in bytes."),
272 "asan-use-after-return",
273 cl::desc(
"Sets the mode of detection for stack-use-after-return."),
275 clEnumValN(AsanDetectStackUseAfterReturnMode::Never,
"never",
276 "Never detect stack use after return."),
278 AsanDetectStackUseAfterReturnMode::Runtime,
"runtime",
279 "Detect stack use after return if "
280 "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
281 clEnumValN(AsanDetectStackUseAfterReturnMode::Always,
"always",
282 "Always detect stack use after return.")),
286 cl::desc(
"Create redzones for byval "
287 "arguments (extra copy "
292 cl::desc(
"Check stack-use-after-scope"),
301 cl::desc(
"Handle C++ initializer order"),
305 "asan-detect-invalid-pointer-pair",
310 "asan-detect-invalid-pointer-cmp",
315 "asan-detect-invalid-pointer-sub",
320 "asan-realign-stack",
321 cl::desc(
"Realign stack to the value of this flag (power of two)"),
325 "asan-instrumentation-with-call-threshold",
327 "If the function being instrumented contains more than "
328 "this number of memory accesses, use callbacks instead of "
329 "inline checks (-1 means never use callbacks)."),
333 "asan-memory-access-callback-prefix",
338 "asan-kernel-mem-intrinsic-prefix",
344 cl::desc(
"instrument dynamic allocas"),
348 "asan-skip-promotable-allocas",
353 "asan-constructor-kind",
354 cl::desc(
"Sets the ASan constructor kind"),
357 "Use global constructors")),
364 cl::desc(
"scale of asan shadow mapping"),
369 cl::desc(
"offset of asan shadow mapping [EXPERIMENTAL]"),
383 "asan-opt-same-temp",
cl::desc(
"Instrument the same temp just once"),
387 cl::desc(
"Don't instrument scalar globals"),
391 "asan-opt-stack",
cl::desc(
"Don't instrument scalar stack variables"),
395 "asan-stack-dynamic-alloca",
400 "asan-force-experiment",
406 cl::desc(
"Use private aliases for global variables"),
411 cl::desc(
"Use odr indicators to improve ODR reporting"),
416 cl::desc(
"Use linker features to support dead "
417 "code stripping of globals"),
424 cl::desc(
"Place ASan constructors in comdat sections"),
428 "asan-destructor-kind",
429 cl::desc(
"Sets the ASan destructor kind. The default is to use the value "
430 "provided to the pass constructor"),
433 "Use global destructors")),
453STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
454STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
456 "Number of optimized accesses to global vars");
458 "Number of optimized accesses to stack vars");
467struct ShadowMapping {
478 bool IsAndroid = TargetTriple.
isAndroid();
481 bool IsMacOS = TargetTriple.
isMacOSX();
484 bool IsPS = TargetTriple.
isPS();
491 bool IsMIPS32 = TargetTriple.
isMIPS32();
492 bool IsMIPS64 = TargetTriple.
isMIPS64();
493 bool IsArmOrThumb = TargetTriple.
isARM() || TargetTriple.
isThumb();
500 bool IsAMDGPU = TargetTriple.
isAMDGPU();
502 ShadowMapping Mapping;
509 if (LongSize == 32) {
512 else if (IsMIPSN32ABI)
524 else if (IsEmscripten)
537 else if (IsFreeBSD && IsAArch64)
539 else if (IsFreeBSD && !IsMIPS64) {
544 }
else if (IsNetBSD) {
551 else if (IsLinux && IsX86_64) {
557 }
else if (IsWindows && IsX86_64) {
563 else if (IsMacOS && IsAArch64)
567 else if (IsLoongArch64)
591 Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
592 !IsRISCV64 && !IsLoongArch64 &&
593 !(Mapping.Offset & (Mapping.Offset - 1)) &&
595 bool IsAndroidWithIfuncSupport =
597 Mapping.InGlobal =
ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
605 int *MappingScale,
bool *OrShadowOffset) {
607 *ShadowBase = Mapping.Offset;
608 *MappingScale = Mapping.Scale;
609 *OrShadowOffset = Mapping.OrShadowOffset;
619 uint8_t AccessSizeIndex)
623 AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
624 CompileKernel(CompileKernel) {}
631 return std::max(32U, 1U << MappingScale);
645struct AddressSanitizer {
647 bool CompileKernel =
false,
bool Recover =
false,
648 bool UseAfterScope =
false,
650 AsanDetectStackUseAfterReturnMode::Runtime)
658 C = &(
M.getContext());
659 LongSize =
M.getDataLayout().getPointerSizeInBits();
663 TargetTriple =
Triple(
M.getTargetTriple());
667 assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
674 assert(CI &&
"non-constant array size");
680 return SizeInBytes * ArraySize;
684 bool isInterestingAlloca(
const AllocaInst &AI);
687 void getInterestingMemoryOperands(
693 void instrumentPointerComparisonOrSubtraction(
Instruction *
I);
700 Value *SizeArgument);
704 Value *SizeArgument,
bool UseCalls,
709 bool IsWrite,
size_t AccessSizeIndex,
713 bool suppressInstrumentationSiteForDebug(
int &Instrumented);
715 bool maybeInsertAsanInitAtFunctionEntry(
Function &
F);
716 bool maybeInsertDynamicShadowAtFunctionEntry(
Function &
F);
717 void markEscapedLocalAllocas(
Function &
F);
720 friend struct FunctionStackPoisoner;
730 struct FunctionStateRAII {
731 AddressSanitizer *
Pass;
733 FunctionStateRAII(AddressSanitizer *
Pass) :
Pass(
Pass) {
735 "last pass forgot to clear cache");
739 ~FunctionStateRAII() {
740 Pass->LocalDynamicShadow =
nullptr;
741 Pass->ProcessedAllocas.clear();
755 ShadowMapping Mapping;
769 Value *LocalDynamicShadow =
nullptr;
777class ModuleAddressSanitizer {
779 ModuleAddressSanitizer(
Module &M,
bool CompileKernel =
false,
780 bool Recover =
false,
bool UseGlobalsGC =
true,
781 bool UseOdrIndicator =
true,
787 UseGlobalsGC(UseGlobalsGC &&
ClUseGlobalsGC && !this->CompileKernel),
802 UseCtorComdat(UseGlobalsGC &&
ClWithComdat && !this->CompileKernel),
803 DestructorKind(DestructorKind),
804 ConstructorKind(ConstructorKind) {
805 C = &(
M.getContext());
806 int LongSize =
M.getDataLayout().getPointerSizeInBits();
808 TargetTriple =
Triple(
M.getTargetTriple());
813 assert(this->DestructorKind != AsanDtorKind::Invalid);
816 bool instrumentModule(
Module &);
819 void initializeCallbacks(
Module &M);
828 const std::string &UniqueModuleId);
845 bool ShouldUseMachOGlobalsSection()
const;
846 StringRef getGlobalMetadataSection()
const;
849 uint64_t getMinRedzoneSizeForGlobal()
const {
853 int GetAsanVersion(
const Module &M)
const;
858 bool UsePrivateAlias;
859 bool UseOdrIndicator;
866 ShadowMapping Mapping;
876 Function *AsanCtorFunction =
nullptr;
877 Function *AsanDtorFunction =
nullptr;
889struct FunctionStackPoisoner :
public InstVisitor<FunctionStackPoisoner> {
891 AddressSanitizer &ASan;
896 ShadowMapping Mapping;
905 FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
909 struct AllocaPoisonCall {
917 bool HasUntracedLifetimeIntrinsic =
false;
924 bool HasInlineAsm =
false;
925 bool HasReturnsTwiceCall =
false;
928 FunctionStackPoisoner(
Function &F, AddressSanitizer &ASan)
930 C(ASan.
C), IntptrTy(ASan.IntptrTy),
931 IntptrPtrTy(
PointerType::
get(IntptrTy, 0)), Mapping(ASan.Mapping),
940 copyArgsPassedByValToAllocas();
945 if (AllocaVec.
empty() && DynamicAllocaVec.
empty())
return false;
947 initializeCallbacks(*
F.getParent());
949 if (HasUntracedLifetimeIntrinsic) {
953 StaticAllocaPoisonCallVec.
clear();
954 DynamicAllocaPoisonCallVec.
clear();
957 processDynamicAllocas();
958 processStaticAllocas();
969 void copyArgsPassedByValToAllocas();
974 void processStaticAllocas();
975 void processDynamicAllocas();
977 void createDynamicAllocasInitStorage();
995 void unpoisonDynamicAllocasBeforeInst(
Instruction *InstBefore,
998 Value *DynamicAreaPtr = IRB.CreatePtrToInt(SavedStack, IntptrTy);
1003 if (!isa<ReturnInst>(InstBefore)) {
1005 InstBefore->
getModule(), Intrinsic::get_dynamic_area_offset,
1008 Value *DynamicAreaOffset = IRB.CreateCall(DynamicAreaOffsetFunc, {});
1010 DynamicAreaPtr = IRB.CreateAdd(IRB.CreatePtrToInt(SavedStack, IntptrTy),
1015 AsanAllocasUnpoisonFunc,
1016 {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
1020 void unpoisonDynamicAllocas() {
1022 unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
1024 for (
Instruction *StackRestoreInst : StackRestoreVec)
1025 unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
1026 StackRestoreInst->getOperand(0));
1039 void handleDynamicAllocaCall(
AllocaInst *AI);
1043 if (!ASan.isInterestingAlloca(AI)) {
1047 if (AllocaVec.
empty())
1065 if (
ID == Intrinsic::stackrestore) StackRestoreVec.
push_back(&II);
1066 if (
ID == Intrinsic::localescape) LocalEscapeCall = &II;
1067 if (!ASan.UseAfterScope)
1074 if (
Size->isMinusOne())
return;
1077 const uint64_t SizeValue =
Size->getValue().getLimitedValue();
1078 if (SizeValue == ~0ULL ||
1086 HasUntracedLifetimeIntrinsic =
true;
1090 if (!ASan.isInterestingAlloca(*AI))
1092 bool DoPoison = (
ID == Intrinsic::lifetime_end);
1093 AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
1095 StaticAllocaPoisonCallVec.
push_back(APC);
1097 DynamicAllocaPoisonCallVec.
push_back(APC);
1101 if (
CallInst *CI = dyn_cast<CallInst>(&CB)) {
1102 HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
1103 HasReturnsTwiceCall |= CI->canReturnTwice();
1108 void initializeCallbacks(
Module &M);
1135 OS, MapClassName2PassName);
1147 UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
1152 ModuleAddressSanitizer ModuleSanitizer(M, Options.
CompileKernel,
1154 UseOdrIndicator, DestructorKind,
1161 AddressSanitizer FunctionSanitizer(M, SSGI, Options.
CompileKernel,
1165 Modified |= FunctionSanitizer.instrumentFunction(
F, &TLI);
1167 Modified |= ModuleSanitizer.instrumentModule(M);
1188 if (
G->getName().startswith(
"llvm.") ||
1190 G->getName().startswith(
"__llvm_gcov_ctr") ||
1192 G->getName().startswith(
"__llvm_rtti_proxy"))
1205 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1207 if (AddrSpace == 3 || AddrSpace == 5)
1214 Shadow = IRB.
CreateLShr(Shadow, Mapping.Scale);
1215 if (Mapping.Offset == 0)
return Shadow;
1218 if (LocalDynamicShadow)
1219 ShadowBase = LocalDynamicShadow;
1222 if (Mapping.OrShadowOffset)
1223 return IRB.
CreateOr(Shadow, ShadowBase);
1225 return IRB.
CreateAdd(Shadow, ShadowBase);
1231 if (isa<MemTransferInst>(
MI)) {
1233 isa<MemMoveInst>(
MI) ? AsanMemmove : AsanMemcpy,
1237 }
else if (isa<MemSetInst>(
MI)) {
1244 MI->eraseFromParent();
1248bool AddressSanitizer::isInterestingAlloca(
const AllocaInst &AI) {
1249 auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
1251 if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
1252 return PreviouslySeenAllocaInfo->getSecond();
1254 bool IsInteresting =
1267 !(SSGI && SSGI->
isSafe(AI)));
1269 ProcessedAllocas[&AI] = IsInteresting;
1270 return IsInteresting;
1275 Type *PtrTy = cast<PointerType>(
Ptr->getType()->getScalarType());
1284 if (
Ptr->isSwiftError())
1290 if (
auto AI = dyn_cast_or_null<AllocaInst>(
Ptr))
1301void AddressSanitizer::getInterestingMemoryOperands(
1304 if (LocalDynamicShadow ==
I)
1307 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1310 Interesting.
emplace_back(
I, LI->getPointerOperandIndex(),
false,
1311 LI->getType(), LI->getAlign());
1312 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(
I)) {
1316 SI->getValueOperand()->getType(),
SI->getAlign());
1320 Interesting.
emplace_back(
I, RMW->getPointerOperandIndex(),
true,
1321 RMW->getValOperand()->getType(), std::nullopt);
1325 Interesting.
emplace_back(
I, XCHG->getPointerOperandIndex(),
true,
1326 XCHG->getCompareOperand()->getType(),
1328 }
else if (
auto CI = dyn_cast<CallInst>(
I)) {
1329 if (CI->getIntrinsicID() == Intrinsic::masked_load ||
1330 CI->getIntrinsicID() == Intrinsic::masked_store) {
1331 bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_store;
1333 unsigned OpOffset = IsWrite ? 1 : 0;
1338 if (ignoreAccess(
I, BasePtr))
1343 if (
auto *Op = dyn_cast<ConstantInt>(CI->
getOperand(1 + OpOffset)))
1344 Alignment =
Op->getMaybeAlignValue();
1346 Interesting.
emplace_back(
I, OpOffset, IsWrite, Ty, Alignment, Mask);
1348 for (
unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
1350 ignoreAccess(
I, CI->getArgOperand(ArgNo)))
1352 Type *Ty = CI->getParamByValType(ArgNo);
1367 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(
I)) {
1368 if (!Cmp->isRelational())
1382 if (BO->getOpcode() != Instruction::Sub)
1395 if (!
G->hasInitializer())
1398 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().IsDynInit)
1404void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
1407 FunctionCallee F = isa<ICmpInst>(
I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
1408 Value *
Param[2] = {
I->getOperand(0),
I->getOperand(1)};
1409 for (
Value *&i : Param) {
1410 if (i->getType()->isPointerTy())
1420 Value *SizeArgument,
bool UseCalls,
1426 (!Alignment || *Alignment >= Granularity || *Alignment >=
TypeSize / 8))
1428 nullptr, UseCalls, Exp);
1430 IsWrite,
nullptr, UseCalls, Exp);
1437 unsigned Granularity,
Type *OpType,
1438 bool IsWrite,
Value *SizeArgument,
1440 auto *VTy = cast<FixedVectorType>(OpType);
1441 uint64_t ElemTypeSize =
DL.getTypeStoreSizeInBits(VTy->getScalarType());
1442 unsigned Num = VTy->getNumElements();
1444 for (
unsigned Idx = 0;
Idx < Num; ++
Idx) {
1445 Value *InstrumentedAddress =
nullptr;
1447 if (
auto *
Vector = dyn_cast<ConstantVector>(Mask)) {
1449 if (
auto *Masked = dyn_cast<ConstantInt>(
Vector->getOperand(
Idx))) {
1450 if (Masked->isZero())
1460 InsertBefore = ThenTerm;
1464 InstrumentedAddress =
1467 Granularity, ElemTypeSize, IsWrite, SizeArgument,
1495 isSafeAccess(ObjSizeVis,
Addr,
O.TypeSize)) {
1496 NumOptimizedAccessesToGlobalVar++;
1504 isSafeAccess(ObjSizeVis,
Addr,
O.TypeSize)) {
1505 NumOptimizedAccessesToStackVar++;
1511 NumInstrumentedWrites++;
1513 NumInstrumentedReads++;
1515 unsigned Granularity = 1 << Mapping.Scale;
1518 Addr,
O.Alignment, Granularity,
O.OpType,
1519 O.IsWrite,
nullptr, UseCalls, Exp);
1522 Granularity,
O.TypeSize,
O.IsWrite,
nullptr, UseCalls,
1529 size_t AccessSizeIndex,
1530 Value *SizeArgument,
1538 {
Addr, SizeArgument});
1541 {
Addr, SizeArgument, ExpVal});
1545 IRB.
CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex],
Addr);
1547 Call = IRB.
CreateCall(AsanErrorCallback[IsWrite][1][AccessSizeIndex],
1551 Call->setCannotMerge();
1558 size_t Granularity =
static_cast<size_t>(1) << Mapping.Scale;
1560 Value *LastAccessedByte =
1573Instruction *AddressSanitizer::instrumentAMDGPUAddress(
1579 Type *PtrTy = cast<PointerType>(
Addr->getType()->getScalarType());
1582 return InsertBefore;
1588 Value *IsSharedOrPrivate = IRB.
CreateOr(IsShared, IsPrivate);
1590 Value *AddrSpaceZeroLanding =
1592 InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
1593 return InsertBefore;
1596void AddressSanitizer::instrumentAddress(
Instruction *OrigIns,
1599 Value *SizeArgument,
bool UseCalls,
1601 if (TargetTriple.isAMDGPU()) {
1602 InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore,
Addr,
1610 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1613 const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
1625 IRB.
CreateCall(AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex],
1628 IRB.
CreateCall(AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
1636 Value *ShadowPtr = memToShadow(AddrLong, IRB);
1637 Value *ShadowValue =
1641 size_t Granularity = 1ULL << Mapping.Scale;
1649 assert(cast<BranchInst>(CheckTerm)->isUnconditional());
1652 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue,
TypeSize);
1666 Instruction *Crash = generateCrashCode(CrashTerm, AddrLong, IsWrite,
1667 AccessSizeIndex, SizeArgument, Exp);
1675void AddressSanitizer::instrumentUnusualSizeOrAlignment(
1677 bool IsWrite,
Value *SizeArgument,
bool UseCalls,
uint32_t Exp) {
1683 IRB.
CreateCall(AsanMemoryAccessCallbackSized[IsWrite][0],
1686 IRB.
CreateCall(AsanMemoryAccessCallbackSized[IsWrite][1],
1692 instrumentAddress(
I, InsertBefore,
Addr, 8, IsWrite,
Size,
false, Exp);
1693 instrumentAddress(
I, InsertBefore, LastByte, 8, IsWrite,
Size,
false, Exp);
1697void ModuleAddressSanitizer::poisonOneInitializer(
Function &GlobalInit,
1705 IRB.
CreateCall(AsanPoisonGlobals, ModuleNameAddr);
1708 for (
auto &BB : GlobalInit)
1709 if (
ReturnInst *RI = dyn_cast<ReturnInst>(BB.getTerminator()))
1713void ModuleAddressSanitizer::createInitializerPoisonCalls(
1724 if (isa<ConstantAggregateZero>(
OP))
continue;
1730 auto *Priority = cast<ConstantInt>(CS->
getOperand(0));
1740ModuleAddressSanitizer::getExcludedAliasedGlobal(
const GlobalAlias &GA)
const {
1745 assert(CompileKernel &&
"Only expecting to be called when compiling kernel");
1752 return dyn_cast<GlobalVariable>(
C->stripPointerCastsAndAliases());
1757bool ModuleAddressSanitizer::shouldInstrumentGlobal(
GlobalVariable *
G)
const {
1758 Type *Ty =
G->getValueType();
1761 if (
G->hasSanitizerMetadata() &&
G->getSanitizerMetadata().NoAddress)
1763 if (!Ty->
isSized())
return false;
1764 if (!
G->hasInitializer())
return false;
1766 if (
G->getAddressSpace() &&
1773 if (
G->isThreadLocal())
return false;
1775 if (
G->getAlign() && *
G->getAlign() > getMinRedzoneSizeForGlobal())
return false;
1781 if (!TargetTriple.isOSBinFormatCOFF()) {
1782 if (!
G->hasExactDefinition() ||
G->hasComdat())
1786 if (
G->isInterposable())
1793 switch (
C->getSelectionKind()) {
1804 if (
G->hasSection()) {
1814 if (Section ==
"llvm.metadata")
return false;
1821 if (
Section.startswith(
".preinit_array") ||
1822 Section.startswith(
".init_array") ||
1823 Section.startswith(
".fini_array")) {
1829 if (TargetTriple.isOSBinFormatELF()) {
1831 [](
char c) {
return llvm::isAlnum(c) || c ==
'_'; }))
1843 if (TargetTriple.isOSBinFormatCOFF() &&
Section.contains(
'$')) {
1844 LLVM_DEBUG(
dbgs() <<
"Ignoring global in sorted section (contains '$'): "
1849 if (TargetTriple.isOSBinFormatMachO()) {
1851 unsigned TAA = 0, StubSize = 0;
1854 Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
1859 if (ParsedSegment ==
"__OBJC" ||
1860 (ParsedSegment ==
"__DATA" && ParsedSection.
startswith(
"__objc_"))) {
1872 if (ParsedSegment ==
"__DATA" && ParsedSection ==
"__cfstring") {
1885 if (CompileKernel) {
1888 if (
G->getName().startswith(
"__"))
1898bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection()
const {
1899 if (!TargetTriple.isOSBinFormatMachO())
1902 if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
1904 if (TargetTriple.isiOS() && !TargetTriple.isOSVersionLT(9))
1906 if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
1908 if (TargetTriple.isDriverKit())
1914StringRef ModuleAddressSanitizer::getGlobalMetadataSection()
const {
1915 switch (TargetTriple.getObjectFormat()) {
1925 "ModuleAddressSanitizer not implemented for object file format");
1932void ModuleAddressSanitizer::initializeCallbacks(
Module &M) {
1938 AsanUnpoisonGlobals =
1942 AsanRegisterGlobals =
M.getOrInsertFunction(
1944 AsanUnregisterGlobals =
M.getOrInsertFunction(
1949 AsanRegisterImageGlobals =
M.getOrInsertFunction(
1951 AsanUnregisterImageGlobals =
M.getOrInsertFunction(
1954 AsanRegisterElfGlobals =
1956 IntptrTy, IntptrTy, IntptrTy);
1957 AsanUnregisterElfGlobals =
1959 IntptrTy, IntptrTy, IntptrTy);
1964void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
1969 if (!
G->hasName()) {
1976 if (!InternalSuffix.
empty() &&
G->hasLocalLinkage()) {
1977 std::string
Name = std::string(
G->getName());
1978 Name += InternalSuffix;
1979 C =
M.getOrInsertComdat(
Name);
1981 C =
M.getOrInsertComdat(
G->getName());
1987 if (TargetTriple.isOSBinFormatCOFF()) {
1989 if (
G->hasPrivateLinkage())
2002ModuleAddressSanitizer::CreateMetadataGlobal(
Module &M,
Constant *Initializer,
2004 auto Linkage = TargetTriple.isOSBinFormatMachO()
2008 M, Initializer->
getType(),
false, Linkage, Initializer,
2010 Metadata->setSection(getGlobalMetadataSection());
2018 AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
2026void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
2030 auto &
DL =
M.getDataLayout();
2033 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2034 Constant *Initializer = MetadataInitializers[i];
2037 CreateMetadataGlobal(M, Initializer,
G->getName());
2039 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2045 unsigned SizeOfGlobalStruct =
DL.getTypeAllocSize(Initializer->
getType());
2047 "global metadata will not be padded appropriately");
2050 SetComdatForGlobalMetadata(
G,
Metadata,
"");
2055 if (!MetadataGlobals.empty())
2059void ModuleAddressSanitizer::InstrumentGlobalsELF(
2062 const std::string &UniqueModuleId) {
2069 bool UseComdatForGlobalsGC = UseOdrIndicator;
2072 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2075 CreateMetadataGlobal(M, MetadataInitializers[i],
G->getName());
2077 Metadata->setMetadata(LLVMContext::MD_associated, MD);
2080 if (UseComdatForGlobalsGC)
2081 SetComdatForGlobalMetadata(
G,
Metadata, UniqueModuleId);
2086 if (!MetadataGlobals.empty())
2103 "__start_" + getGlobalMetadataSection());
2107 "__stop_" + getGlobalMetadataSection());
2121 IrbDtor.CreateCall(AsanUnregisterElfGlobals,
2128void ModuleAddressSanitizer::InstrumentGlobalsMachO(
2139 for (
size_t i = 0; i < ExtendedGlobals.
size(); i++) {
2140 Constant *Initializer = MetadataInitializers[i];
2143 CreateMetadataGlobal(M, Initializer,
G->getName());
2147 auto LivenessBinder =
2152 Twine(
"__asan_binder_") +
G->getName());
2153 Liveness->
setSection(
"__DATA,__asan_liveness,regular,live_support");
2154 LivenessGlobals[i] = Liveness;
2161 if (!LivenessGlobals.empty())
2183 IrbDtor.CreateCall(AsanUnregisterImageGlobals,
2188void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
2192 unsigned N = ExtendedGlobals.
size();
2202 if (Mapping.Scale > 3)
2203 AllGlobals->setAlignment(
Align(1ULL << Mapping.Scale));
2214 IrbDtor.CreateCall(AsanUnregisterGlobals,
2227 *CtorComdat =
false;
2232 if (CompileKernel) {
2233 for (
auto &GA :
M.aliases()) {
2235 AliasedGlobalExclusions.
insert(GV);
2240 for (
auto &
G :
M.globals()) {
2241 if (!AliasedGlobalExclusions.
count(&
G) && shouldInstrumentGlobal(&
G))
2245 size_t n = GlobalsToChange.
size();
2251 auto &
DL =
M.getDataLayout();
2265 IntptrTy, IntptrTy, IntptrTy);
2269 bool HasDynamicallyInitializedGlobals =
false;
2276 for (
size_t i = 0; i < n; i++) {
2280 if (
G->hasSanitizerMetadata())
2281 MD =
G->getSanitizerMetadata();
2286 std::string NameForGlobal =
G->getName().str();
2291 Type *Ty =
G->getValueType();
2292 const uint64_t SizeInBytes =
DL.getTypeAllocSize(Ty);
2293 const uint64_t RightRedzoneSize = getRedzoneSizeForGlobal(SizeInBytes);
2305 M, NewTy,
G->isConstant(), Linkage, NewInitializer,
"",
G,
2306 G->getThreadLocalMode(),
G->getAddressSpace());
2316 if (TargetTriple.isOSBinFormatMachO() && !
G->hasSection() &&
2318 auto Seq = dyn_cast<ConstantDataSequential>(
G->getInitializer());
2319 if (Seq && Seq->isCString())
2320 NewGlobal->
setSection(
"__TEXT,__asan_cstring,regular");
2331 G->replaceAllUsesWith(
2334 G->eraseFromParent();
2335 NewGlobals[i] = NewGlobal;
2340 bool CanUsePrivateAliases =
2341 TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
2342 TargetTriple.isOSBinFormatWasm();
2343 if (CanUsePrivateAliases && UsePrivateAlias) {
2346 InstrumentedGlobal =
2354 }
else if (UseOdrIndicator) {
2357 auto *ODRIndicatorSym =
2366 ODRIndicatorSym->setAlignment(
Align(1));
2367 ODRIndicator = ODRIndicatorSym;
2382 HasDynamicallyInitializedGlobals =
true;
2386 Initializers[i] = Initializer;
2392 for (
size_t i = 0; i < n; i++) {
2394 if (
G->getName().empty())
continue;
2399 std::string ELFUniqueModuleId =
2403 if (!ELFUniqueModuleId.empty()) {
2404 InstrumentGlobalsELF(IRB, M, NewGlobals, Initializers, ELFUniqueModuleId);
2406 }
else if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
2407 InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
2408 }
else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
2409 InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
2411 InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
2415 if (HasDynamicallyInitializedGlobals)
2423ModuleAddressSanitizer::getRedzoneSizeForGlobal(
uint64_t SizeInBytes)
const {
2424 constexpr uint64_t kMaxRZ = 1 << 18;
2425 const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
2428 if (SizeInBytes <= MinRZ / 2) {
2432 RZ = MinRZ - SizeInBytes;
2435 RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
2438 if (SizeInBytes % MinRZ)
2439 RZ += MinRZ - (SizeInBytes % MinRZ);
2442 assert((RZ + SizeInBytes) % MinRZ == 0);
2447int ModuleAddressSanitizer::GetAsanVersion(
const Module &M)
const {
2448 int LongSize =
M.getDataLayout().getPointerSizeInBits();
2453 Version += (LongSize == 32 && isAndroid);
2457bool ModuleAddressSanitizer::instrumentModule(
Module &M) {
2458 initializeCallbacks(M);
2463 if (CompileKernel) {
2468 std::string AsanVersion = std::to_string(GetAsanVersion(M));
2469 std::string VersionCheckName =
2471 std::tie(AsanCtorFunction, std::ignore) =
2474 {}, VersionCheckName);
2478 bool CtorComdat =
true;
2481 if (AsanCtorFunction) {
2482 IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
2483 InstrumentGlobals(IRB, M, &CtorComdat);
2486 InstrumentGlobals(IRB, M, &CtorComdat);
2495 if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
2496 if (AsanCtorFunction) {
2500 if (AsanDtorFunction) {
2505 if (AsanCtorFunction)
2507 if (AsanDtorFunction)
2518 for (
int Exp = 0;
Exp < 2;
Exp++) {
2519 for (
size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
2520 const std::string TypeStr = AccessIsWrite ?
"store" :
"load";
2521 const std::string ExpStr =
Exp ?
"exp_" :
"";
2522 const std::string EndingStr = Recover ?
"_noabort" :
"";
2531 Args1.push_back(ExpType);
2532 if (
auto AK = TLI->getExtAttrForI32Param(
false)) {
2537 AsanErrorCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2541 AsanMemoryAccessCallbackSized[AccessIsWrite][
Exp] =
M.getOrInsertFunction(
2546 AccessSizeIndex++) {
2547 const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
2548 AsanErrorCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2549 M.getOrInsertFunction(
2553 AsanMemoryAccessCallback[AccessIsWrite][
Exp][AccessSizeIndex] =
2554 M.getOrInsertFunction(
2561 const std::string MemIntrinCallbackPrefix =
2565 AsanMemmove =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memmove",
2568 AsanMemcpy =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memcpy",
2571 AsanMemset =
M.getOrInsertFunction(MemIntrinCallbackPrefix +
"memset",
2576 AsanHandleNoReturnFunc =
2579 AsanPtrCmpFunction =
2581 AsanPtrSubFunction =
2583 if (Mapping.InGlobal)
2584 AsanShadowGlobal =
M.getOrInsertGlobal(
"__asan_shadow",
2587 AMDGPUAddressShared =
M.getOrInsertFunction(
2589 AMDGPUAddressPrivate =
M.getOrInsertFunction(
2593bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(
Function &
F) {
2601 if (
F.getName().find(
" load]") != std::string::npos) {
2611bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(
Function &
F) {
2617 if (Mapping.InGlobal) {
2625 LocalDynamicShadow =
2626 IRB.
CreateCall(Asm, {AsanShadowGlobal},
".asan.shadow");
2628 LocalDynamicShadow =
2632 Value *GlobalDynamicAddress =
F.getParent()->getOrInsertGlobal(
2634 LocalDynamicShadow = IRB.
CreateLoad(IntptrTy, GlobalDynamicAddress);
2639void AddressSanitizer::markEscapedLocalAllocas(
Function &
F) {
2644 assert(ProcessedAllocas.empty() &&
"must process localescape before allocas");
2648 if (!
F.getParent()->getFunction(
"llvm.localescape"))
return;
2657 AllocaInst *AI = dyn_cast<AllocaInst>(
Arg->stripPointerCasts());
2659 "non-static alloca arg to localescape");
2660 ProcessedAllocas[AI] =
false;
2667bool AddressSanitizer::suppressInstrumentationSiteForDebug(
int &Instrumented) {
2668 bool ShouldInstrument =
2672 return !ShouldInstrument;
2675bool AddressSanitizer::instrumentFunction(
Function &
F,
2681 if (
F.getName().startswith(
"__asan_"))
return false;
2683 bool FunctionModified =
false;
2688 if (maybeInsertAsanInitAtFunctionEntry(
F))
2689 FunctionModified =
true;
2692 if (!
F.hasFnAttribute(Attribute::SanitizeAddress))
return FunctionModified;
2694 if (
F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
2695 return FunctionModified;
2699 initializeCallbacks(*
F.getParent(), TLI);
2701 FunctionStateRAII CleanupObj(
this);
2703 FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(
F);
2707 markEscapedLocalAllocas(
F);
2719 for (
auto &BB :
F) {
2721 TempsToInstrument.
clear();
2722 int NumInsnsPerBB = 0;
2723 for (
auto &Inst : BB) {
2724 if (LooksLikeCodeInBug11395(&Inst))
return false;
2729 getInterestingMemoryOperands(&Inst, InterestingOperands);
2731 if (!InterestingOperands.
empty()) {
2732 for (
auto &Operand : InterestingOperands) {
2738 if (Operand.MaybeMask) {
2742 if (!TempsToInstrument.
insert(
Ptr).second)
2746 OperandsToInstrument.
push_back(Operand);
2753 PointerComparisonsOrSubtracts.
push_back(&Inst);
2759 if (
auto *CB = dyn_cast<CallBase>(&Inst)) {
2761 TempsToInstrument.
clear();
2765 if (
CallInst *CI = dyn_cast<CallInst>(&Inst))
2773 OperandsToInstrument.
size() + IntrinToInstrument.
size() >
2781 int NumInstrumented = 0;
2782 for (
auto &Operand : OperandsToInstrument) {
2783 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
2784 instrumentMop(ObjSizeVis, Operand, UseCalls,
2785 F.getParent()->getDataLayout());
2786 FunctionModified =
true;
2788 for (
auto *Inst : IntrinToInstrument) {
2789 if (!suppressInstrumentationSiteForDebug(NumInstrumented))
2790 instrumentMemIntrinsic(Inst);
2791 FunctionModified =
true;
2794 FunctionStackPoisoner FSP(
F, *
this);
2795 bool ChangedStack = FSP.runOnFunction();
2799 for (
auto *CI : NoReturnCalls) {
2804 for (
auto *Inst : PointerComparisonsOrSubtracts) {
2805 instrumentPointerComparisonOrSubtraction(Inst);
2806 FunctionModified =
true;
2809 if (ChangedStack || !NoReturnCalls.empty())
2810 FunctionModified =
true;
2812 LLVM_DEBUG(
dbgs() <<
"ASAN done instrumenting: " << FunctionModified <<
" "
2815 return FunctionModified;
2821bool AddressSanitizer::LooksLikeCodeInBug11395(
Instruction *
I) {
2822 if (LongSize != 32)
return false;
2831void FunctionStackPoisoner::initializeCallbacks(
Module &M) {
2835 const char *MallocNameTemplate =
2840 std::string Suffix = itostr(
Index);
2841 AsanStackMallocFunc[
Index] =
M.getOrInsertFunction(
2842 MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
2843 AsanStackFreeFunc[
Index] =
2848 if (ASan.UseAfterScope) {
2849 AsanPoisonStackMemoryFunc =
M.getOrInsertFunction(
2851 AsanUnpoisonStackMemoryFunc =
M.getOrInsertFunction(
2855 for (
size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
2856 0xf3, 0xf5, 0xf8}) {
2857 std::ostringstream
Name;
2859 Name << std::setw(2) << std::setfill(
'0') << std::hex << Val;
2860 AsanSetShadowFunc[Val] =
2861 M.getOrInsertFunction(
Name.str(), IRB.
getVoidTy(), IntptrTy, IntptrTy);
2864 AsanAllocaPoisonFunc =
M.getOrInsertFunction(
2866 AsanAllocasUnpoisonFunc =
M.getOrInsertFunction(
2872 size_t Begin,
size_t End,
2874 Value *ShadowBase) {
2878 const size_t LargestStoreSizeInBytes =
2879 std::min<size_t>(
sizeof(
uint64_t), ASan.LongSize / 8);
2881 const bool IsLittleEndian =
F.getParent()->getDataLayout().isLittleEndian();
2887 for (
size_t i = Begin; i < End;) {
2888 if (!ShadowMask[i]) {
2894 size_t StoreSizeInBytes = LargestStoreSizeInBytes;
2896 while (StoreSizeInBytes > End - i)
2897 StoreSizeInBytes /= 2;
2900 for (
size_t j = StoreSizeInBytes - 1; j && !ShadowMask[i + j]; --j) {
2901 while (j <= StoreSizeInBytes / 2)
2902 StoreSizeInBytes /= 2;
2906 for (
size_t j = 0; j < StoreSizeInBytes; j++) {
2908 Val |= (
uint64_t)ShadowBytes[i + j] << (8 * j);
2910 Val = (Val << 8) | ShadowBytes[i + j];
2914 Value *Poison = IRB.
getIntN(StoreSizeInBytes * 8, Val);
2919 i += StoreSizeInBytes;
2926 copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.
size(), IRB, ShadowBase);
2931 size_t Begin,
size_t End,
2934 size_t Done = Begin;
2935 for (
size_t i = Begin, j = Begin + 1; i < End; i = j++) {
2936 if (!ShadowMask[i]) {
2940 uint8_t Val = ShadowBytes[i];
2941 if (!AsanSetShadowFunc[Val])
2945 for (; j < End && ShadowMask[j] && Val == ShadowBytes[j]; ++j) {
2949 copyToShadowInline(ShadowMask, ShadowBytes,
Done, i, IRB, ShadowBase);
2957 copyToShadowInline(ShadowMask, ShadowBytes,
Done, End, IRB, ShadowBase);
2965 for (
int i = 0;; i++, MaxSize *= 2)
2966 if (LocalStackSize <= MaxSize)
return i;
2970void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
2972 if (CopyInsertPoint == ASan.LocalDynamicShadow) {
2980 if (
Arg.hasByValAttr()) {
2981 Type *Ty =
Arg.getParamByValType();
2982 const Align Alignment =
2983 DL.getValueOrABITypeAlignment(
Arg.getParamAlign(), Ty);
2987 (
Arg.hasName() ?
Arg.getName() :
"Arg" +
Twine(
Arg.getArgNo())) +
2990 Arg.replaceAllUsesWith(AI);
2992 uint64_t AllocSize =
DL.getTypeAllocSize(Ty);
3001 Value *ValueIfFalse) {
3004 PHI->addIncoming(ValueIfFalse, CondBlock);
3006 PHI->addIncoming(ValueIfTrue, ThenBlock);
3010Value *FunctionStackPoisoner::createAllocaForLayout(
3019 nullptr,
"MyAlloca");
3028void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
3031 DynamicAllocaLayout = IRB.
CreateAlloca(IntptrTy,
nullptr);
3036void FunctionStackPoisoner::processDynamicAllocas() {
3043 for (
const auto &APC : DynamicAllocaPoisonCallVec) {
3046 assert(ASan.isInterestingAlloca(*APC.AI));
3047 assert(!APC.AI->isStaticAlloca());
3050 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
3057 createDynamicAllocasInitStorage();
3058 for (
auto &AI : DynamicAllocaVec)
3059 handleDynamicAllocaCall(AI);
3060 unpoisonDynamicAllocas();
3072 for (
Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
3082 if (isa<AllocaInst>(It) || isa<CastInst>(It))
3084 if (
auto *Store = dyn_cast<StoreInst>(It)) {
3088 auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
3089 if (!Alloca || ASan.isInterestingAlloca(*Alloca))
3092 Value *Val = Store->getValueOperand();
3093 bool IsDirectArgInit = isa<Argument>(Val);
3094 bool IsArgInitViaCast =
3095 isa<CastInst>(Val) &&
3096 isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
3099 Val == It->getPrevNonDebugInstruction();
3100 bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
3104 if (IsArgInitViaCast)
3105 InitInsts.
push_back(cast<Instruction>(Val));
3116void FunctionStackPoisoner::processStaticAllocas() {
3117 if (AllocaVec.
empty()) {
3122 int StackMallocIdx = -1;
3124 if (
auto SP =
F.getSubprogram())
3125 EntryDebugLocation =
3134 auto InsBeforeB = InsBefore->
getParent();
3135 assert(InsBeforeB == &
F.getEntryBlock());
3136 for (
auto *AI : StaticAllocasToMoveUp)
3147 ArgInitInst->moveBefore(InsBefore);
3150 if (LocalEscapeCall) LocalEscapeCall->
moveBefore(InsBefore);
3156 ASan.getAllocaSizeInBytes(*AI),
3167 uint64_t Granularity = 1ULL << Mapping.Scale;
3168 uint64_t MinHeaderSize = std::max((
uint64_t)ASan.LongSize / 2, Granularity);
3174 for (
auto &Desc : SVD)
3175 AllocaToSVDMap[Desc.AI] = &Desc;
3178 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3181 assert(ASan.isInterestingAlloca(*APC.AI));
3182 assert(APC.AI->isStaticAlloca());
3187 if (
const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
3188 if (LifetimeLoc->getFile() == FnLoc->getFile())
3189 if (
unsigned Line = LifetimeLoc->getLine())
3190 Desc.
Line = std::min(Desc.
Line ? Desc.
Line : Line, Line);
3198 bool DoStackMalloc =
3208 DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
3209 DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
3211 Value *StaticAlloca =
3212 DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L,
false);
3215 Value *LocalStackBase;
3216 Value *LocalStackBaseAlloca;
3219 if (DoStackMalloc) {
3220 LocalStackBaseAlloca =
3221 IRB.
CreateAlloca(IntptrTy,
nullptr,
"asan_local_stack_base");
3228 Constant *OptionDetectUseAfterReturn =
F.getParent()->getOrInsertGlobal(
3238 Value *FakeStackValue =
3239 IRBIf.CreateCall(AsanStackMallocFunc[StackMallocIdx],
3242 FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
3250 FakeStack = IRB.
CreateCall(AsanStackMallocFunc[StackMallocIdx],
3253 Value *NoFakeStack =
3258 Value *AllocaValue =
3259 DoDynamicAlloca ? createAllocaForLayout(IRBIf, L,
true) : StaticAlloca;
3262 LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
3263 IRB.
CreateStore(LocalStackBase, LocalStackBaseAlloca);
3270 DoDynamicAlloca ? createAllocaForLayout(IRB, L,
true) : StaticAlloca;
3271 LocalStackBaseAlloca = LocalStackBase;
3277 Value *LocalStackBaseAllocaPtr =
3278 isa<PtrToIntInst>(LocalStackBaseAlloca)
3279 ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
3280 : LocalStackBaseAlloca;
3281 assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
3282 "Variable descriptions relative to ASan stack base will be dropped");
3285 for (
const auto &Desc : SVD) {
3320 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
3323 copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
3325 if (!StaticAllocaPoisonCallVec.empty()) {
3329 for (
const auto &APC : StaticAllocaPoisonCallVec) {
3336 copyToShadow(ShadowAfterScope,
3337 APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
3351 if (DoStackMalloc) {
3352 assert(StackMallocIdx >= 0);
3369 if (StackMallocIdx <= 4) {
3373 copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
3375 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
3378 Value *SavedFlagPtr = IRBPoison.CreateLoad(
3379 IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
3380 IRBPoison.CreateStore(
3382 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
3385 IRBPoison.CreateCall(
3386 AsanStackFreeFunc[StackMallocIdx],
3391 copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
3393 copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
3398 for (
auto *AI : AllocaVec)
3408 DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
3409 {AddrArg, SizeArg});
3420void FunctionStackPoisoner::handleDynamicAllocaCall(
AllocaInst *AI) {
3434 const unsigned ElementSize =
3469 IRB.
CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize});
3490 if (!ObjSizeVis.
bothKnown(SizeOffset))
return false;
3492 int64_t
Offset = SizeOffset.second.getSExtValue();
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static cl::opt< bool > ClUseStackSafety("stack-tagging-use-stack-safety", cl::Hidden, cl::init(true), cl::desc("Use Stack Safety analysis results"))
This header is deprecated in favour of llvm/TargetParser/Triple.h.
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
static void findStoresToUninstrumentedArgAllocas(AddressSanitizer &ASan, Instruction &InsBefore, SmallVectorImpl< Instruction * > &InitInsts)
Collect instructions in the entry block after InsBefore which initialize permanent storage for a func...
static const uint64_t kDefaultShadowScale
constexpr size_t kAccessSizeIndexMask
static size_t TypeSizeToSizeIndex(uint32_t TypeSize)
static cl::opt< int > ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClUsePrivateAlias("asan-use-private-alias", cl::desc("Use private aliases for global variables"), cl::Hidden, cl::init(true))
static const uint64_t kPS_ShadowOffset64
static const uint64_t kFreeBSD_ShadowOffset32
constexpr size_t kIsWriteShift
static const uint64_t kSmallX86_64ShadowOffsetAlignMask
static bool isInterestingPointerSubtraction(Instruction *I)
const char kAMDGPUAddressSharedName[]
const char kAsanStackFreeNameTemplate[]
constexpr size_t kCompileKernelMask
static cl::opt< bool > ClForceDynamicShadow("asan-force-dynamic-shadow", cl::desc("Load shadow address into a local variable for each function"), cl::Hidden, cl::init(false))
const char kAsanOptionDetectUseAfterReturn[]
static cl::opt< std::string > ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", cl::desc("Prefix for memory access callbacks"), cl::Hidden, cl::init("__asan_"))
static const uint64_t kRISCV64_ShadowOffset64
static cl::opt< bool > ClInsertVersionCheck("asan-guard-against-version-mismatch", cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, cl::init(true))
const char kAsanSetShadowPrefix[]
static cl::opt< AsanDtorKind > ClOverrideDestructorKind("asan-destructor-kind", cl::desc("Sets the ASan destructor kind. The default is to use the value " "provided to the pass constructor"), cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"), clEnumValN(AsanDtorKind::Global, "global", "Use global destructors")), cl::init(AsanDtorKind::Invalid), cl::Hidden)
static cl::opt< bool > ClInstrumentWrites("asan-instrument-writes", cl::desc("instrument write instructions"), cl::Hidden, cl::init(true))
static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple)
const char kAsanStackMallocNameTemplate[]
static cl::opt< bool > ClInstrumentByval("asan-instrument-byval", cl::desc("instrument byval call arguments"), cl::Hidden, cl::init(true))
const char kAsanInitName[]
static cl::opt< bool > ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " "required)"), cl::Hidden, cl::init(true))
static const uint64_t kWindowsShadowOffset64
static const uint64_t kEmscriptenShadowOffset
const char kAsanGenPrefix[]
constexpr size_t kIsWriteMask
static uint64_t getRedzoneSizeForScale(int MappingScale)
static const uint64_t kDefaultShadowOffset64
static cl::opt< bool > ClOptimizeCallbacks("asan-optimize-callbacks", cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false))
const char kAsanUnregisterGlobalsName[]
static const uint64_t kAsanCtorAndDtorPriority
static cl::opt< bool > ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(false), cl::Hidden, cl::desc("Use Stack Safety analysis results"), cl::Optional)
const char kAsanUnpoisonGlobalsName[]
static cl::opt< bool > ClWithIfuncSuppressRemat("asan-with-ifunc-suppress-remat", cl::desc("Suppress rematerialization of dynamic shadow address by passing " "it through inline asm in prologue."), cl::Hidden, cl::init(true))
static cl::opt< int > ClDebugStack("asan-debug-stack", cl::desc("debug stack"), cl::Hidden, cl::init(0))
const char kAsanUnregisterElfGlobalsName[]
static bool isUnsupportedAMDGPUAddrspace(Value *Addr)
const char kAsanRegisterImageGlobalsName[]
static cl::opt< bool > ClOpt("asan-opt", cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true))
static const uint64_t kAllocaRzSize
const char kODRGenPrefix[]
static const uint64_t kSystemZ_ShadowOffset64
static const uint64_t kDefaultShadowOffset32
const char kAsanShadowMemoryDynamicAddress[]
static cl::opt< bool > ClUseOdrIndicator("asan-use-odr-indicator", cl::desc("Use odr indicators to improve ODR reporting"), cl::Hidden, cl::init(true))
static bool GlobalWasGeneratedByCompiler(GlobalVariable *G)
Check if G has been created by a trusted compiler pass.
const char kAsanStackMallocAlwaysNameTemplate[]
static cl::opt< bool > ClInvalidPointerCmp("asan-detect-invalid-pointer-cmp", cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden, cl::init(false))
static const uint64_t kAsanEmscriptenCtorAndDtorPriority
static cl::opt< int > ClDebugMax("asan-debug-max", cl::desc("Debug max inst"), cl::Hidden, cl::init(-1))
static cl::opt< bool > ClInvalidPointerSub("asan-detect-invalid-pointer-sub", cl::desc("Instrument - operations with pointer operands"), cl::Hidden, cl::init(false))
static bool isPointerOperand(Value *V)
static const uint64_t kFreeBSD_ShadowOffset64
static cl::opt< uint32_t > ClForceExperiment("asan-force-experiment", cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0))
const char kSanCovGenPrefix[]
static const uint64_t kFreeBSDKasan_ShadowOffset64
const char kAsanModuleDtorName[]
static const uint64_t kDynamicShadowSentinel
static bool isInterestingPointerComparison(Instruction *I)
static cl::opt< bool > ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true))
static const uint64_t kMIPS64_ShadowOffset64
static const uint64_t kLinuxKasan_ShadowOffset64
static int StackMallocSizeClass(uint64_t LocalStackSize)
static cl::opt< uint32_t > ClMaxInlinePoisoningSize("asan-max-inline-poisoning-size", cl::desc("Inline shadow poisoning for blocks up to the given size in bytes."), cl::Hidden, cl::init(64))
static cl::opt< bool > ClInstrumentAtomics("asan-instrument-atomics", cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), cl::Hidden, cl::init(false))
constexpr size_t kAccessSizeIndexShift
static cl::opt< int > ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0))
const char kAsanPoisonStackMemoryName[]
static cl::opt< bool > ClEnableKasan("asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClDebugFunc("asan-debug-func", cl::Hidden, cl::desc("Debug func"))
static cl::opt< bool > ClUseGlobalsGC("asan-globals-live-support", cl::desc("Use linker features to support dead " "code stripping of globals"), cl::Hidden, cl::init(true))
static const size_t kNumberOfAccessSizes
const char kAsanUnpoisonStackMemoryName[]
static const uint64_t kLoongArch64_ShadowOffset64
static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I, Instruction *InsertBefore, Value *Addr, MaybeAlign Alignment, unsigned Granularity, uint32_t TypeSize, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp)
const char kAsanRegisterGlobalsName[]
static cl::opt< bool > ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas", cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true))
const char kAsanModuleCtorName[]
const char kAsanGlobalsRegisteredFlagName[]
static const size_t kMaxStackMallocSize
static cl::opt< bool > ClRecover("asan-recover", cl::desc("Enable recovery mode (continue-after-error)."), cl::Hidden, cl::init(false))
static cl::opt< bool > ClOptSameTemp("asan-opt-same-temp", cl::desc("Instrument the same temp just once"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClDynamicAllocaStack("asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClOptStack("asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false))
static const uint64_t kMIPS_ShadowOffsetN32
const char kAsanUnregisterImageGlobalsName[]
static cl::opt< AsanDetectStackUseAfterReturnMode > ClUseAfterReturn("asan-use-after-return", cl::desc("Sets the mode of detection for stack-use-after-return."), cl::values(clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never", "Never detect stack use after return."), clEnumValN(AsanDetectStackUseAfterReturnMode::Runtime, "runtime", "Detect stack use after return if " "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."), clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always", "Always detect stack use after return.")), cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime))
static cl::opt< bool > ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true))
static const uintptr_t kCurrentStackFrameMagic
static cl::opt< int > ClInstrumentationWithCallsThreshold("asan-instrumentation-with-call-threshold", cl::desc("If the function being instrumented contains more than " "this number of memory accesses, use callbacks instead of " "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000))
static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize, bool IsKasan)
static const uint64_t kPPC64_ShadowOffset64
static cl::opt< AsanCtorKind > ClConstructorKind("asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"), clEnumValN(AsanCtorKind::Global, "global", "Use global constructors")), cl::init(AsanCtorKind::Global), cl::Hidden)
static const int kMaxAsanStackMallocSizeClass
static const uint64_t kMIPS32_ShadowOffset32
static cl::opt< bool > ClAlwaysSlowPath("asan-always-slow-path", cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden, cl::init(false))
static const uint64_t kNetBSD_ShadowOffset32
static const uint64_t kFreeBSDAArch64_ShadowOffset64
static const uint64_t kSmallX86_64ShadowOffsetBase
static cl::opt< bool > ClInitializers("asan-initialization-order", cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(true))
static const uint64_t kNetBSD_ShadowOffset64
static cl::opt< unsigned > ClRealignStack("asan-realign-stack", cl::desc("Realign stack to the value of this flag (power of two)"), cl::Hidden, cl::init(32))
static const uint64_t kWindowsShadowOffset32
static cl::opt< bool > ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true))
const char kAsanAllocaPoison[]
constexpr size_t kCompileKernelShift
static cl::opt< bool > ClWithIfunc("asan-with-ifunc", cl::desc("Access dynamic shadow through an ifunc global on " "platforms that support this"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClKasanMemIntrinCallbackPrefix("asan-kernel-mem-intrinsic-prefix", cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden, cl::init(false))
const char kAsanVersionCheckNamePrefix[]
const char kAMDGPUAddressPrivateName[]
static const uint64_t kNetBSDKasan_ShadowOffset64
static void instrumentMaskedLoadOrStore(AddressSanitizer *Pass, const DataLayout &DL, Type *IntptrTy, Value *Mask, Instruction *I, Value *Addr, MaybeAlign Alignment, unsigned Granularity, Type *OpType, bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp)
const char kAsanRegisterElfGlobalsName[]
static cl::opt< uint64_t > ClMappingOffset("asan-mapping-offset", cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, cl::init(0))
const char kAsanReportErrorTemplate[]
static cl::opt< bool > ClWithComdat("asan-with-comdat", cl::desc("Place ASan constructors in comdat sections"), cl::Hidden, cl::init(true))
static cl::opt< bool > ClSkipPromotableAllocas("asan-skip-promotable-allocas", cl::desc("Do not instrument promotable allocas"), cl::Hidden, cl::init(true))
static cl::opt< int > ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb", cl::init(10000), cl::desc("maximal number of instructions to instrument in any given BB"), cl::Hidden)
static const uintptr_t kRetiredStackFrameMagic
const char kAsanPoisonGlobalsName[]
const char kAsanHandleNoReturnName[]
static const size_t kMinStackMallocSize
static cl::opt< int > ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0))
const char kAsanAllocasUnpoison[]
static const uint64_t kAArch64_ShadowOffset64
static cl::opt< bool > ClInvalidPointerPairs("asan-detect-invalid-pointer-pair", cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden, cl::init(false))
This file contains the simple types necessary to represent the attributes associated with functions a...
static const Function * getParent(const Value *V)
SmallVector< MachineOperand, 4 > Cond
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
This is the interface for a simple mod/ref and alias analysis over globals.
static M68kRelType getType(unsigned Kind, MCSymbolRefExpr::VariantKind &Modifier, bool &IsPCRel)
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
ModuleAnalysisManager MAM
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
This defines the Use class.
AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC=true, bool UseOdrIndicator=true, AsanDtorKind DestructorKind=AsanDtorKind::Global, AsanCtorKind ConstructorKind=AsanCtorKind::Global)
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
an instruction to allocate memory on the stack
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
bool isStaticAlloca() const
Return true if this alloca is in the entry block of the function and is a constant size.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
PointerType * getType() const
Overload to return most specific pointer type.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
bool isArrayAllocation() const
Return true if there is an allocation size parameter to the allocation instruction that is not 1.
void setAlignment(Align Align)
const Value * getArraySize() const
Get the number of elements allocated.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
An instruction that atomically checks whether a specified value is in a memory location,...
an instruction that atomically reads a memory location, combines it with another value,...
AttributeList addParamAttribute(LLVMContext &C, unsigned ArgNo, Attribute::AttrKind Kind) const
Add an argument attribute to the list.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
const CallInst * getTerminatingMustTailCall() const
Returns the call instruction marked 'musttail' prior to the terminating return instruction of this ba...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
Value * getArgOperand(unsigned i) const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
bool doesNotReturn() const
Determine if the call cannot return.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
@ Largest
The linker will choose the largest COMDAT.
@ SameSize
The data referenced by the COMDAT must be the same size.
@ Any
The linker may choose any COMDAT.
@ NoDeduplicate
No deduplication is performed.
@ ExactMatch
The data referenced by the COMDAT must be the same.
ConstantArray - Constant Array Declarations.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, std::optional< unsigned > InRangeIndex=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
This is the shared class of boolean and integer constants.
IntegerType * getType() const
getType - Specialize the getType() method to always return an IntegerType, which reduces the amount o...
static bool isValueValidForType(Type *Ty, uint64_t V)
This static method returns true if the type Ty is big enough to represent the value V.
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static Constant * get(StructType *T, ArrayRef< Constant * > V)
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
A parsed version of the target data layout string in and methods for querying it.
TypeSize getTypeAllocSize(Type *Ty) const
Returns the offset in bytes between successive objects of the specified type, including alignment pad...
DILocation * get() const
Get the underlying DILocation.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
const BasicBlock & front() const
static Function * createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
Creates a function with some attributes recorded in llvm.module.flags applied.
const Constant * getAliasee() const
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
void setAlignment(Align Align)
Sets the alignment attribute of the GlobalObject.
void copyMetadata(const GlobalObject *Src, unsigned Offset)
Copy metadata from Src, adjusting offsets by Offset.
void setComdat(Comdat *C)
void setSection(StringRef S)
Change the section for this global.
VisibilityTypes getVisibility() const
void setUnnamedAddr(UnnamedAddr Val)
bool hasLocalLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
ThreadLocalMode getThreadLocalMode() const
Module * getParent()
Get the module that this global value is contained inside of...
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ CommonLinkage
Tentative definitions.
@ InternalLinkage
Rename collisions when linking (static functions).
@ AvailableExternallyLinkage
Available for inspection, not emission.
@ ExternalWeakLinkage
ExternalWeak linkage description.
DLLStorageClassTypes getDLLStorageClass() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void copyAttributesFrom(const GlobalVariable *Src)
copyAttributesFrom - copy all additional attributes (those not needed to create a GlobalVariable) fro...
Analysis pass providing a never-invalidated alias analysis result.
This instruction compares its operands according to the predicate given to the constructor.
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
IntegerType * getInt1Ty()
Fetch the type representing a single bit.
Value * CreateExtractElement(Value *Vec, Value *Idx, const Twine &Name="")
Value * CreatePointerCast(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateICmpSGE(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSelect(Value *C, Value *True, Value *False, const Twine &Name="", Instruction *MDFrom=nullptr)
Value * CreateIntToPtr(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
IntegerType * getInt32Ty()
Fetch the type representing a 32-bit integer.
BasicBlock * GetInsertBlock() const
IntegerType * getInt64Ty()
Fetch the type representing a 64-bit integer.
Value * CreateICmpNE(Value *LHS, Value *RHS, const Twine &Name="")
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateICmpEQ(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateSub(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
ConstantInt * getIntN(unsigned N, uint64_t C)
Get a constant N-bit value, zero extended or truncated from a 64-bit value.
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
PointerType * getInt8PtrTy(unsigned AddrSpace=0)
Fetch the type representing a pointer to an 8-bit integer value.
Value * CreateAnd(Value *LHS, Value *RHS, const Twine &Name="")
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
Value * CreateAdd(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
Value * CreatePtrToInt(Value *V, Type *DestTy, const Twine &Name="")
Value * CreateIsNotNull(Value *Arg, const Twine &Name="")
Return a boolean value testing if Arg != 0.
Value * CreateOr(Value *LHS, Value *RHS, const Twine &Name="")
Value * CreateIntCast(Value *V, Type *DestTy, bool isSigned, const Twine &Name="")
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
Type * getVoidTy()
Fetch the type representing void.
StoreInst * CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align, bool isVolatile=false)
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", bool IsInBounds=false)
IntegerType * getInt8Ty()
Fetch the type representing an 8-bit integer.
CallInst * CreateMemCpy(Value *Dst, MaybeAlign DstAlign, Value *Src, MaybeAlign SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *TBAAStructTag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memcpy between the specified pointers.
Value * CreateMul(Value *LHS, Value *RHS, const Twine &Name="", bool HasNUW=false, bool HasNSW=false)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Base class for instruction visitors.
RetTy visitCallBase(CallBase &I)
RetTy visitCleanupReturnInst(CleanupReturnInst &I)
RetTy visitIntrinsicInst(IntrinsicInst &I)
void visit(Iterator Start, Iterator End)
RetTy visitReturnInst(ReturnInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitResumeInst(ResumeInst &I)
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
const BasicBlock * getParent() const
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
A wrapper class for inspecting calls to intrinsic functions.
Intrinsic::ID getIntrinsicID() const
Return the intrinsic ID of this intrinsic.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static Error ParseSectionSpecifier(StringRef Spec, StringRef &Segment, StringRef &Section, unsigned &TAA, bool &TAAParsed, unsigned &StubSize)
Parse the section specifier indicated by "Spec".
MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight)
Return metadata containing two branch weights.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is the common base class for memset/memcpy/memmove.
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Evaluate the size and offset of an object pointed to by a Value* statically.
static bool bothKnown(const SizeOffsetType &SizeOffset)
SizeOffsetType compute(Value *V)
Pass interface - Implemented by all 'passes'.
static PointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void abandon()
Mark an analysis as abandoned.
Resume the propagation of an exception.
Return a value (possibly void), from a function.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, Instruction *InsertBefore=nullptr)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This pass performs the global (interprocedural) stack safety analysis (new pass manager).
bool stackAccessIsSafe(const Instruction &I) const
bool isSafe(const AllocaInst &AI) const
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
constexpr bool empty() const
empty - Check if the string is empty.
bool startswith(StringRef Prefix) const
const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Class to represent struct types.
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
AttributeList getAttrList(LLVMContext *C, ArrayRef< unsigned > ArgNos, bool Signed, bool Ret=false, AttributeList AL=AttributeList()) const
Triple - Helper class for working with autoconf configuration names.
bool isAndroidVersionLT(unsigned Major) const
bool isThumb() const
Tests whether the target is Thumb (little and big endian).
bool isDriverKit() const
Is this an Apple DriverKit triple.
bool isAndroid() const
Tests whether the target is Android.
bool isMIPS64() const
Tests whether the target is MIPS 64-bit (little and big endian).
ArchType getArch() const
Get the parsed architecture type of this triple.
EnvironmentType getEnvironment() const
Get the parsed environment type of this triple.
bool isMIPS32() const
Tests whether the target is MIPS 32-bit (little and big endian).
bool isOSWindows() const
Tests whether the OS is Windows.
bool isARM() const
Tests whether the target is ARM (little and big endian).
bool isOSLinux() const
Tests whether the OS is Linux.
bool isMacOSX() const
Is this a Mac OS X triple.
bool isOSEmscripten() const
Tests whether the OS is Emscripten.
bool isWatchOS() const
Is this an Apple watchOS triple.
bool isiOS() const
Is this an iOS triple.
bool isPS() const
Tests whether the target is the PS4 or PS5 platform.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
bool isPointerTy() const
True if this is an instance of PointerType.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
static Type * getVoidTy(LLVMContext &C)
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
static IntegerType * getInt32Ty(LLVMContext &C)
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
An efficient, type-erasing, non-owning reference to a callable.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
This file contains the declaration of the Comdat class, which represents a single COMDAT in LLVM.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=std::nullopt)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
@ S_CSTRING_LITERALS
S_CSTRING_LITERALS - Section with literal C strings.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage.
uint64_t getAllocaSizeInBytes(const AllocaInst &AI)
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
SmallVector< uint8_t, 64 > GetShadowBytesAfterScope(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
std::string demangle(const std::string &MangledName)
Attempt to demangle a string using different demangling schemes.
std::pair< APInt, APInt > SizeOffsetType
AllocaInst * findAllocaForValue(Value *V, bool OffsetZero=false)
Returns unique alloca where the value comes from, or nullptr.
Function * createSanitizerCtor(Module &M, StringRef CtorName)
Creates sanitizer constructor function.
AsanDetectStackUseAfterReturnMode
Mode of ASan detect stack use after return.
@ Always
Always detect stack use after return.
@ Never
Never detect stack use after return.
@ Runtime
Detect stack use after return if not disabled runtime with (ASAN_OPTIONS=detect_stack_use_after_retur...
GlobalVariable * createPrivateGlobalForString(Module &M, StringRef Str, bool AllowMerging, const char *NamePrefix="")
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value,...
void SplitBlockAndInsertIfThenElse(Value *Cond, Instruction *SplitBefore, Instruction **ThenTerm, Instruction **ElseTerm, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr)
SplitBlockAndInsertIfThenElse is similar to SplitBlockAndInsertIfThen, but also creates the ElseBlock...
bool isAllocaPromotable(const AllocaInst *AI)
Return true if this alloca is legal for promotion.
SmallString< 64 > ComputeASanStackFrameDescription(const SmallVectorImpl< ASanStackVariableDescription > &Vars)
SmallVector< uint8_t, 64 > GetShadowBytes(const SmallVectorImpl< ASanStackVariableDescription > &Vars, const ASanStackFrameLayout &Layout)
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
FunctionCallee declareSanitizerInitFunction(Module &M, StringRef InitName, ArrayRef< Type * > InitArgTypes, bool Weak=false)
std::string getUniqueModuleId(Module *M)
Produce a unique identifier for this module by taking the MD5 sum of the names of the module's strong...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
std::pair< Function *, FunctionCallee > createSanitizerCtorAndInitFunctions(Module &M, StringRef CtorName, StringRef InitName, ArrayRef< Type * > InitArgTypes, ArrayRef< Value * > InitArgs, StringRef VersionCheckName=StringRef(), bool Weak=false)
Creates sanitizer constructor function, and calls sanitizer's init function from it.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
AsanDtorKind
Types of ASan module destructors supported.
@ None
Do not emit any destructors for ASan.
ASanStackFrameLayout ComputeASanStackFrameLayout(SmallVectorImpl< ASanStackVariableDescription > &Vars, uint64_t Granularity, uint64_t MinHeaderSize)
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
static const int kAsanStackUseAfterReturnMagic
@ Dynamic
Denotes mode unknown at compile time.
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
iterator_range< df_iterator< T > > depth_first(const T &G)
Instruction * SplitBlockAndInsertIfThen(Value *Cond, Instruction *SplitBefore, bool Unreachable, MDNode *BranchWeights, DominatorTree *DT, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AsanCtorKind
Types of ASan module constructors supported.
void maybeMarkSanitizerLibraryCallNoBuiltin(CallInst *CI, const TargetLibraryInfo *TLI)
Given a CallInst, check if it calls a string function known to CodeGen, and mark it with NoBuiltin if...
void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
void appendToGlobalDtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Same as appendToGlobalCtors(), but for global dtors.
void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize, bool IsKasan, uint64_t *ShadowBase, int *MappingScale, bool *OrShadowOffset)
bool replaceDbgDeclare(Value *Address, Value *NewAddress, DIBuilder &Builder, uint8_t DIExprFlags, int Offset)
Replaces llvm.dbg.declare instruction when the address it describes is replaced with a new value.
ASanAccessInfo(int32_t Packed)
AsanDetectStackUseAfterReturnMode UseAfterReturn
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
Various options to control the behavior of getObjectSize.
bool RoundToAlign
Whether to round the result up to the alignment of allocas, byval arguments, and global variables.
A CRTP mix-in to automatically provide informational APIs needed for passes.