LLVM 23.0.0git
StackProtector.cpp
Go to the documentation of this file.
1//===- StackProtector.cpp - Stack Protector Insertion ---------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass inserts stack protectors into functions which need them. A variable
10// with a random value in it is stored onto the stack before the local variables
11// are allocated. Upon exiting the block, the stored value is checked. If it's
12// changed, then there was some sort of violation and the program aborts.
13//
14//===----------------------------------------------------------------------===//
15
18#include "llvm/ADT/Statistic.h"
23#include "llvm/CodeGen/Passes.h"
27#include "llvm/IR/Attributes.h"
28#include "llvm/IR/BasicBlock.h"
29#include "llvm/IR/Constants.h"
30#include "llvm/IR/DataLayout.h"
32#include "llvm/IR/Dominators.h"
34#include "llvm/IR/Function.h"
35#include "llvm/IR/IRBuilder.h"
36#include "llvm/IR/Instruction.h"
39#include "llvm/IR/Intrinsics.h"
40#include "llvm/IR/MDBuilder.h"
41#include "llvm/IR/Module.h"
42#include "llvm/IR/Type.h"
43#include "llvm/IR/User.h"
45#include "llvm/Pass.h"
51#include <optional>
52
53using namespace llvm;
54
55#define DEBUG_TYPE "stack-protector"
56
57STATISTIC(NumFunProtected, "Number of functions protected");
58STATISTIC(NumAddrTaken, "Number of local variables that have their address"
59 " taken.");
60
61static cl::opt<bool> EnableSelectionDAGSP("enable-selectiondag-sp",
62 cl::init(true), cl::Hidden);
63static cl::opt<bool> DisableCheckNoReturn("disable-check-noreturn-call",
64 cl::init(false), cl::Hidden);
65
66/// InsertStackProtectors - Insert code into the prologue and epilogue of the
67/// function.
68///
69/// - The prologue code loads and stores the stack guard onto the stack.
70/// - The epilogue checks the value stored in the prologue against the original
71/// value. It calls __stack_chk_fail if they differ.
72static bool InsertStackProtectors(const TargetLowering &TLI,
73 const LibcallLoweringInfo &Libcalls,
75 bool &HasPrologue, bool &HasIRCheck);
76
77/// CreateFailBB - Create a basic block to jump to when the stack protector
78/// check fails.
80 const LibcallLoweringInfo &Libcalls);
81
83 return HasPrologue && !HasIRCheck && isa<ReturnInst>(BB.getTerminator());
84}
85
87 if (Layout.empty())
88 return;
89
90 for (int I = 0, E = MFI.getObjectIndexEnd(); I != E; ++I) {
91 if (MFI.isDeadObjectIndex(I))
92 continue;
93
94 const AllocaInst *AI = MFI.getObjectAllocation(I);
95 if (!AI)
96 continue;
97
98 SSPLayoutMap::const_iterator LI = Layout.find(AI);
99 if (LI == Layout.end())
100 continue;
101
102 MFI.setObjectSSPLayout(I, LI->second);
103 }
104}
105
108
109 SSPLayoutInfo Info;
110 Info.RequireStackProtector =
112 Info.SSPBufferSize = F.getFnAttributeAsParsedInteger(
113 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
114 return Info;
115}
116
117AnalysisKey SSPLayoutAnalysis::Key;
118
121 auto &Info = FAM.getResult<SSPLayoutAnalysis>(F);
122 auto *DT = FAM.getCachedResult<DominatorTreeAnalysis>(F);
123 DomTreeUpdater DTU(DT, DomTreeUpdater::UpdateStrategy::Lazy);
124
125 if (!Info.RequireStackProtector)
126 return PreservedAnalyses::all();
127
128 // TODO(etienneb): Functions with funclets are not correctly supported now.
129 // Do nothing if this is funclet-based personality.
130 if (F.hasPersonalityFn()) {
131 EHPersonality Personality = classifyEHPersonality(F.getPersonalityFn());
132 if (isFuncletEHPersonality(Personality))
133 return PreservedAnalyses::all();
134 }
135
136 auto &MAMProxy = FAM.getResult<ModuleAnalysisManagerFunctionProxy>(F);
137 const LibcallLoweringModuleAnalysisResult *LibcallLowering =
138 MAMProxy.getCachedResult<LibcallLoweringModuleAnalysis>(*F.getParent());
139
140 if (!LibcallLowering) {
141 F.getContext().emitError("'" + LibcallLoweringModuleAnalysis::name() +
142 "' analysis required");
143 return PreservedAnalyses::all();
144 }
145
146 const TargetSubtargetInfo *STI = TM->getSubtargetImpl(F);
147 const TargetLowering *TLI = STI->getTargetLowering();
148 const LibcallLoweringInfo &Libcalls =
149 LibcallLowering->getLibcallLowering(*STI);
150
151 ++NumFunProtected;
152 bool Changed = InsertStackProtectors(*TLI, Libcalls, &F, DT ? &DTU : nullptr,
153 Info.HasPrologue, Info.HasIRCheck);
154#ifdef EXPENSIVE_CHECKS
155 assert((!DT ||
156 DTU.getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
157 "Failed to maintain validity of domtree!");
158#endif
159
160 if (!Changed)
161 return PreservedAnalyses::all();
165 return PA;
166}
167
168char StackProtector::ID = 0;
169
171
173 "Insert stack protectors", false, true)
178 "Insert stack protectors", false, true)
179
181
187
189 F = &Fn;
190 M = F->getParent();
192 DTU.emplace(DTWP->getDomTree(), DomTreeUpdater::UpdateStrategy::Lazy);
194 LayoutInfo.HasPrologue = false;
195 LayoutInfo.HasIRCheck = false;
196
197 LayoutInfo.SSPBufferSize = Fn.getFnAttributeAsParsedInteger(
198 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
199 if (!requiresStackProtector(F, &LayoutInfo.Layout))
200 return false;
201
202 // TODO(etienneb): Functions with funclets are not correctly supported now.
203 // Do nothing if this is funclet-based personality.
204 if (Fn.hasPersonalityFn()) {
206 if (isFuncletEHPersonality(Personality))
207 return false;
208 }
209
210 const TargetSubtargetInfo *Subtarget = TM->getSubtargetImpl(Fn);
211 const LibcallLoweringInfo &Libcalls =
212 getAnalysis<LibcallLoweringInfoWrapper>().getLibcallLowering(*M,
213 *Subtarget);
214
215 const TargetLowering *TLI = Subtarget->getTargetLowering();
216
217 ++NumFunProtected;
218 bool Changed =
219 InsertStackProtectors(*TLI, Libcalls, F, DTU ? &*DTU : nullptr,
220 LayoutInfo.HasPrologue, LayoutInfo.HasIRCheck);
221#ifdef EXPENSIVE_CHECKS
222 assert((!DTU ||
223 DTU->getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
224 "Failed to maintain validity of domtree!");
225#endif
226 DTU.reset();
227 return Changed;
228}
229
230/// \param [out] IsLarge is set to true if a protectable array is found and
231/// it is "large" ( >= ssp-buffer-size). In the case of a structure with
232/// multiple arrays, this gets set if any of them is large.
233static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize,
234 bool &IsLarge, bool Strong,
235 bool InStruct) {
236 if (!Ty)
237 return false;
238 if (ArrayType *AT = dyn_cast<ArrayType>(Ty)) {
239 if (!AT->getElementType()->isIntegerTy(8)) {
240 // If we're on a non-Darwin platform or we're inside of a structure, don't
241 // add stack protectors unless the array is a character array.
242 // However, in strong mode any array, regardless of type and size,
243 // triggers a protector.
244 if (!Strong && (InStruct || !M->getTargetTriple().isOSDarwin()))
245 return false;
246 }
247
248 // If an array has more than SSPBufferSize bytes of allocated space, then we
249 // emit stack protectors.
250 if (SSPBufferSize <= M->getDataLayout().getTypeAllocSize(AT)) {
251 IsLarge = true;
252 return true;
253 }
254
255 if (Strong)
256 // Require a protector for all arrays in strong mode
257 return true;
258 }
259
260 const StructType *ST = dyn_cast<StructType>(Ty);
261 if (!ST)
262 return false;
263
264 bool NeedsProtector = false;
265 for (Type *ET : ST->elements())
266 if (ContainsProtectableArray(ET, M, SSPBufferSize, IsLarge, Strong, true)) {
267 // If the element is a protectable array and is large (>= SSPBufferSize)
268 // then we are done. If the protectable array is not large, then
269 // keep looking in case a subsequent element is a large array.
270 if (IsLarge)
271 return true;
272 NeedsProtector = true;
273 }
274
275 return NeedsProtector;
276}
277
278/// Maximum remaining allocation size observed for a phi node, and how often
279/// the allocation size has already been decreased. We only allow a limited
280/// number of decreases.
281struct PhiInfo {
283 unsigned NumDecreased = 0;
284 static constexpr unsigned MaxNumDecreased = 3;
286};
288
289/// Check whether a stack allocation has its address taken.
290static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize,
291 Module *M,
292 PhiMap &VisitedPHIs) {
293 const DataLayout &DL = M->getDataLayout();
294 for (const User *U : AI->users()) {
295 const auto *I = cast<Instruction>(U);
296 // If this instruction accesses memory make sure it doesn't access beyond
297 // the bounds of the allocated object.
298 std::optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I);
299 if (MemLoc && MemLoc->Size.hasValue() &&
300 !TypeSize::isKnownGE(AllocSize, MemLoc->Size.getValue()))
301 return true;
302 switch (I->getOpcode()) {
303 case Instruction::Store:
304 if (AI == cast<StoreInst>(I)->getValueOperand())
305 return true;
306 break;
307 case Instruction::AtomicCmpXchg:
308 // cmpxchg conceptually includes both a load and store from the same
309 // location. So, like store, the value being stored is what matters.
310 if (AI == cast<AtomicCmpXchgInst>(I)->getNewValOperand())
311 return true;
312 break;
313 case Instruction::AtomicRMW:
314 if (AI == cast<AtomicRMWInst>(I)->getValOperand())
315 return true;
316 break;
317 case Instruction::PtrToInt:
318 if (AI == cast<PtrToIntInst>(I)->getOperand(0))
319 return true;
320 break;
321 case Instruction::Call: {
322 // Ignore intrinsics that do not become real instructions.
323 // TODO: Narrow this to intrinsics that have store-like effects.
324 const auto *CI = cast<CallInst>(I);
325 if (!CI->isDebugOrPseudoInst() && !CI->isLifetimeStartOrEnd())
326 return true;
327 break;
328 }
329 case Instruction::Invoke:
330 return true;
331 case Instruction::GetElementPtr: {
332 // If the GEP offset is out-of-bounds, or is non-constant and so has to be
333 // assumed to be potentially out-of-bounds, then any memory access that
334 // would use it could also be out-of-bounds meaning stack protection is
335 // required.
337 unsigned IndexSize = DL.getIndexTypeSizeInBits(I->getType());
338 APInt Offset(IndexSize, 0);
339 if (!GEP->accumulateConstantOffset(DL, Offset))
340 return true;
341 TypeSize OffsetSize = TypeSize::getFixed(Offset.getLimitedValue());
342 if (!TypeSize::isKnownGT(AllocSize, OffsetSize))
343 return true;
344 // Adjust AllocSize to be the space remaining after this offset.
345 // We can't subtract a fixed size from a scalable one, so in that case
346 // assume the scalable value is of minimum size.
347 TypeSize NewAllocSize =
348 TypeSize::getFixed(AllocSize.getKnownMinValue()) - OffsetSize;
349 if (HasAddressTaken(I, NewAllocSize, M, VisitedPHIs))
350 return true;
351 break;
352 }
353 case Instruction::BitCast:
354 case Instruction::Select:
355 case Instruction::AddrSpaceCast:
356 if (HasAddressTaken(I, AllocSize, M, VisitedPHIs))
357 return true;
358 break;
359 case Instruction::PHI: {
360 // Keep track of what PHI nodes we have already visited to ensure
361 // they are only visited once.
362 const auto *PN = cast<PHINode>(I);
363 auto [It, Inserted] = VisitedPHIs.try_emplace(PN, AllocSize);
364 if (!Inserted) {
365 if (TypeSize::isKnownGE(AllocSize, It->second.AllocSize))
366 break;
367
368 // Check again with smaller size.
369 if (It->second.NumDecreased == PhiInfo::MaxNumDecreased)
370 return true;
371
372 It->second.AllocSize = AllocSize;
373 ++It->second.NumDecreased;
374 }
375 if (HasAddressTaken(PN, AllocSize, M, VisitedPHIs))
376 return true;
377 break;
378 }
379 case Instruction::Load:
380 case Instruction::Ret:
381 // These instructions take an address operand, but have load-like or
382 // other innocuous behavior that should not trigger a stack protector.
383 break;
384 default:
385 // Conservatively return true for any instruction that takes an address
386 // operand, but is not handled above.
387 return true;
388 }
389 }
390 return false;
391}
392
393/// Search for the first call to the llvm.stackprotector intrinsic and return it
394/// if present.
396 for (const BasicBlock &BB : F)
397 for (const Instruction &I : BB)
398 if (const auto *II = dyn_cast<IntrinsicInst>(&I))
399 if (II->getIntrinsicID() == Intrinsic::stackprotector)
400 return II;
401 return nullptr;
402}
403
404/// Check whether or not this function needs a stack protector based
405/// upon the stack protector level.
406///
407/// We use two heuristics: a standard (ssp) and strong (sspstrong).
408/// The standard heuristic which will add a guard variable to functions that
409/// call alloca with a either a variable size or a size >= SSPBufferSize,
410/// functions with character buffers larger than SSPBufferSize, and functions
411/// with aggregates containing character buffers larger than SSPBufferSize. The
412/// strong heuristic will add a guard variables to functions that call alloca
413/// regardless of size, functions with any buffer regardless of type and size,
414/// functions with aggregates that contain any buffer regardless of type and
415/// size, and functions that contain stack-based variables that have had their
416/// address taken.
418 SSPLayoutMap *Layout) {
419 Module *M = F->getParent();
420 bool Strong = false;
421 bool NeedsProtector = false;
422
423 // The set of PHI nodes visited when determining if a variable's reference has
424 // been taken. This set is maintained to ensure we don't visit the same PHI
425 // node multiple times.
426 PhiMap VisitedPHIs;
427
428 unsigned SSPBufferSize = F->getFnAttributeAsParsedInteger(
429 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
430
431 if (F->hasFnAttribute(Attribute::SafeStack))
432 return false;
433
434 // We are constructing the OptimizationRemarkEmitter on the fly rather than
435 // using the analysis pass to avoid building DominatorTree and LoopInfo which
436 // are not available this late in the IR pipeline.
438
439 if (F->hasFnAttribute(Attribute::StackProtectReq)) {
440 if (!Layout)
441 return true;
442 ORE.emit([&]() {
443 return OptimizationRemark(DEBUG_TYPE, "StackProtectorRequested", F)
444 << "Stack protection applied to function "
445 << ore::NV("Function", F)
446 << " due to a function attribute or command-line switch";
447 });
448 NeedsProtector = true;
449 Strong = true; // Use the same heuristic as strong to determine SSPLayout
450 } else if (F->hasFnAttribute(Attribute::StackProtectStrong))
451 Strong = true;
452 else if (!F->hasFnAttribute(Attribute::StackProtect))
453 return false;
454
455 for (const BasicBlock &BB : *F) {
456 for (const Instruction &I : BB) {
457 if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
458 if (const MDNode *MD = AI->getMetadata("stack-protector")) {
459 const auto *CI = mdconst::dyn_extract<ConstantInt>(MD->getOperand(0));
460 if (CI->isZero())
461 continue;
462 }
463 if (AI->isArrayAllocation()) {
464 auto RemarkBuilder = [&]() {
465 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAllocaOrArray",
466 &I)
467 << "Stack protection applied to function "
468 << ore::NV("Function", F)
469 << " due to a call to alloca or use of a variable length "
470 "array";
471 };
472 if (const auto *CI = dyn_cast<ConstantInt>(AI->getArraySize())) {
473 if (CI->getLimitedValue(SSPBufferSize) >= SSPBufferSize) {
474 // A call to alloca with size >= SSPBufferSize requires
475 // stack protectors.
476 if (!Layout)
477 return true;
478 Layout->insert(
479 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
480 ORE.emit(RemarkBuilder);
481 NeedsProtector = true;
482 } else if (Strong) {
483 // Require protectors for all alloca calls in strong mode.
484 if (!Layout)
485 return true;
486 Layout->insert(
487 std::make_pair(AI, MachineFrameInfo::SSPLK_SmallArray));
488 ORE.emit(RemarkBuilder);
489 NeedsProtector = true;
490 }
491 } else {
492 // A call to alloca with a variable size requires protectors.
493 if (!Layout)
494 return true;
495 Layout->insert(
496 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
497 ORE.emit(RemarkBuilder);
498 NeedsProtector = true;
499 }
500 continue;
501 }
502
503 bool IsLarge = false;
504 if (ContainsProtectableArray(AI->getAllocatedType(), M, SSPBufferSize,
505 IsLarge, Strong, false)) {
506 if (!Layout)
507 return true;
508 Layout->insert(std::make_pair(
511 ORE.emit([&]() {
512 return OptimizationRemark(DEBUG_TYPE, "StackProtectorBuffer", &I)
513 << "Stack protection applied to function "
514 << ore::NV("Function", F)
515 << " due to a stack allocated buffer or struct containing a "
516 "buffer";
517 });
518 NeedsProtector = true;
519 continue;
520 }
521
522 if (Strong) {
523 std::optional<TypeSize> AllocSize =
524 AI->getAllocationSize(M->getDataLayout());
525 if (!AllocSize || HasAddressTaken(AI, *AllocSize, M, VisitedPHIs)) {
526 ++NumAddrTaken;
527 if (!Layout)
528 return true;
529 Layout->insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
530 ORE.emit([&]() {
532 "StackProtectorAddressTaken", &I)
533 << "Stack protection applied to function "
534 << ore::NV("Function", F)
535 << " due to the address of a local variable being taken";
536 });
537 NeedsProtector = true;
538 }
539 }
540 // Clear any PHIs that we visited, to make sure we examine all uses of
541 // any subsequent allocas that we look at.
542 VisitedPHIs.clear();
543 }
544 }
545 }
546
547 return NeedsProtector;
548}
549
550/// Create a stack guard loading and populate whether SelectionDAG SSP is
551/// supported.
553 const LibcallLoweringInfo &Libcalls, Module *M,
554 IRBuilder<> &B,
555 bool *SupportsSelectionDAGSP = nullptr) {
556 Value *Guard = TLI.getIRStackGuard(B, Libcalls);
557 StringRef GuardMode = M->getStackProtectorGuard();
558 if ((GuardMode == "tls" || GuardMode.empty()) && Guard)
559 return B.CreateLoad(B.getPtrTy(), Guard, true, "StackGuard");
560
561 // Use SelectionDAG SSP handling, since there isn't an IR guard.
562 //
563 // This is more or less weird, since we optionally output whether we
564 // should perform a SelectionDAG SP here. The reason is that it's strictly
565 // defined as !TLI->getIRStackGuard(B), where getIRStackGuard is also
566 // mutating. There is no way to get this bit without mutating the IR, so
567 // getting this bit has to happen in this right time.
568 //
569 // We could have define a new function TLI::supportsSelectionDAGSP(), but that
570 // will put more burden on the backends' overriding work, especially when it
571 // actually conveys the same information getIRStackGuard() already gives.
572 if (SupportsSelectionDAGSP)
573 *SupportsSelectionDAGSP = true;
574 TLI.insertSSPDeclarations(*M, Libcalls);
575 return B.CreateIntrinsic(Intrinsic::stackguard, {});
576}
577
578/// Insert code into the entry block that stores the stack guard
579/// variable onto the stack:
580///
581/// entry:
582/// StackGuardSlot = alloca i8*
583/// StackGuard = <stack guard>
584/// call void @llvm.stackprotector(StackGuard, StackGuardSlot)
585///
586/// Returns true if the platform/triple supports the stackprotectorcreate pseudo
587/// node.
588static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc,
589 const TargetLoweringBase *TLI,
590 const LibcallLoweringInfo &Libcalls,
591 AllocaInst *&AI) {
592 bool SupportsSelectionDAGSP = false;
593 IRBuilder<> B(&F->getEntryBlock().front());
594 PointerType *PtrTy = PointerType::getUnqual(CheckLoc->getContext());
595 AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot");
596
597 Value *GuardSlot =
598 getStackGuard(*TLI, Libcalls, M, B, &SupportsSelectionDAGSP);
599 B.CreateIntrinsic(Intrinsic::stackprotector, {GuardSlot, AI});
600 return SupportsSelectionDAGSP;
601}
602
604 const LibcallLoweringInfo &Libcalls, Function *F,
605 DomTreeUpdater *DTU, bool &HasPrologue,
606 bool &HasIRCheck) {
607 auto *M = F->getParent();
608
609 // If the target wants to XOR the frame pointer into the guard value, it's
610 // impossible to emit the check in IR, so the target *must* support stack
611 // protection in SDAG.
612 bool SupportsSelectionDAGSP =
613 TLI.useStackGuardXorFP() ||
614 (EnableSelectionDAGSP && !TLI.getTargetMachine().Options.EnableFastISel);
615 AllocaInst *AI = nullptr; // Place on stack that stores the stack guard.
616 BasicBlock *FailBB = nullptr;
617
619 // This is stack protector auto generated check BB, skip it.
620 if (&BB == FailBB)
621 continue;
622 Instruction *CheckLoc = dyn_cast<ReturnInst>(BB.getTerminator());
623 if (!CheckLoc && !DisableCheckNoReturn)
624 for (auto &Inst : BB) {
625 if (IntrinsicInst *IB = dyn_cast<IntrinsicInst>(&Inst);
626 IB && (IB->getIntrinsicID() == Intrinsic::eh_sjlj_callsite)) {
627 // eh_sjlj_callsite has to be in same BB as the
628 // bb terminator. Don't insert within this range.
629 CheckLoc = IB;
630 break;
631 }
632 if (auto *CB = dyn_cast<CallBase>(&Inst))
633 // Do stack check before noreturn calls that aren't nounwind (e.g:
634 // __cxa_throw).
635 if (CB->doesNotReturn() && !CB->doesNotThrow()) {
636 CheckLoc = CB;
637 break;
638 }
639 }
640
641 if (!CheckLoc)
642 continue;
643
644 // Generate prologue instrumentation if not already generated.
645 if (!HasPrologue) {
646 HasPrologue = true;
647 SupportsSelectionDAGSP &=
648 CreatePrologue(F, M, CheckLoc, &TLI, Libcalls, AI);
649 }
650
651 // SelectionDAG based code generation. Nothing else needs to be done here.
652 // The epilogue instrumentation is postponed to SelectionDAG.
653 if (SupportsSelectionDAGSP)
654 break;
655
656 // Find the stack guard slot if the prologue was not created by this pass
657 // itself via a previous call to CreatePrologue().
658 if (!AI) {
659 const CallInst *SPCall = findStackProtectorIntrinsic(*F);
660 assert(SPCall && "Call to llvm.stackprotector is missing");
661 AI = cast<AllocaInst>(SPCall->getArgOperand(1));
662 }
663
664 // Set HasIRCheck to true, so that SelectionDAG will not generate its own
665 // version. SelectionDAG called 'shouldEmitSDCheck' to check whether
666 // instrumentation has already been generated.
667 HasIRCheck = true;
668
669 // If we're instrumenting a block with a tail call, the check has to be
670 // inserted before the call rather than between it and the return.
671 Instruction *Prev = CheckLoc->getPrevNode();
672 if (auto *CI = dyn_cast_if_present<CallInst>(Prev))
673 if (CI->isTailCall() && isInTailCallPosition(*CI, TLI.getTargetMachine()))
674 CheckLoc = Prev;
675
676 // Generate epilogue instrumentation. The epilogue intrumentation can be
677 // function-based or inlined depending on which mechanism the target is
678 // providing.
679 if (Function *GuardCheck = TLI.getSSPStackGuardCheck(*M, Libcalls)) {
680 // Generate the function-based epilogue instrumentation.
681 // The target provides a guard check function, generate a call to it.
682 IRBuilder<> B(CheckLoc);
683 LoadInst *Guard = B.CreateLoad(B.getPtrTy(), AI, true, "Guard");
684 CallInst *Call = B.CreateCall(GuardCheck, {Guard});
685 Call->setAttributes(GuardCheck->getAttributes());
686 Call->setCallingConv(GuardCheck->getCallingConv());
687 } else {
688 // Generate the epilogue with inline instrumentation.
689 // If we do not support SelectionDAG based calls, generate IR level
690 // calls.
691 //
692 // For each block with a return instruction, convert this:
693 //
694 // return:
695 // ...
696 // ret ...
697 //
698 // into this:
699 //
700 // return:
701 // ...
702 // %1 = <stack guard>
703 // %2 = load StackGuardSlot
704 // %3 = icmp ne i1 %1, %2
705 // br i1 %3, label %CallStackCheckFailBlk, label %SP_return
706 //
707 // SP_return:
708 // ret ...
709 //
710 // CallStackCheckFailBlk:
711 // call void @__stack_chk_fail()
712 // unreachable
713
714 // Create the FailBB. We duplicate the BB every time since the MI tail
715 // merge pass will merge together all of the various BB into one including
716 // fail BB generated by the stack protector pseudo instruction.
717 if (!FailBB)
718 FailBB = CreateFailBB(F, Libcalls);
719
720 IRBuilder<> B(CheckLoc);
721 Value *Guard = getStackGuard(TLI, Libcalls, M, B);
722 LoadInst *LI2 = B.CreateLoad(B.getPtrTy(), AI, true);
723 auto *Cmp = cast<ICmpInst>(B.CreateICmpNE(Guard, LI2));
724 auto SuccessProb =
726 auto FailureProb =
728 MDNode *Weights = MDBuilder(F->getContext())
729 .createBranchWeights(FailureProb.getNumerator(),
730 SuccessProb.getNumerator());
731
732 SplitBlockAndInsertIfThen(Cmp, CheckLoc,
733 /*Unreachable=*/false, Weights, DTU,
734 /*LI=*/nullptr, /*ThenBlock=*/FailBB);
735
736 auto *BI = cast<BranchInst>(Cmp->getParent()->getTerminator());
737 BasicBlock *NewBB = BI->getSuccessor(1);
738 NewBB->setName("SP_return");
739 NewBB->moveAfter(&BB);
740
741 Cmp->setPredicate(Cmp->getInversePredicate());
742 BI->swapSuccessors();
743 }
744 }
745
746 // Return if we didn't modify any basic blocks. i.e., there are no return
747 // statements in the function.
748 return HasPrologue;
749}
750
752 auto *M = F->getParent();
753 LLVMContext &Context = F->getContext();
754 BasicBlock *FailBB = BasicBlock::Create(Context, "CallStackCheckFailBlk", F);
755 IRBuilder<> B(FailBB);
756 if (F->getSubprogram())
757 B.SetCurrentDebugLocation(
758 DILocation::get(Context, 0, 0, F->getSubprogram()));
759 FunctionCallee StackChkFail;
761
762 if (RTLIB::LibcallImpl ChkFailImpl =
763 Libcalls.getLibcallImpl(RTLIB::STACKPROTECTOR_CHECK_FAIL)) {
764 StackChkFail = M->getOrInsertFunction(
766 Type::getVoidTy(Context));
767 } else if (RTLIB::LibcallImpl SSHImpl =
768 Libcalls.getLibcallImpl(RTLIB::STACK_SMASH_HANDLER)) {
769 StackChkFail = M->getOrInsertFunction(
771 Type::getVoidTy(Context), PointerType::getUnqual(Context));
772 Args.push_back(B.CreateGlobalString(F->getName(), "SSH"));
773 } else {
774 Context.emitError("no libcall available for stack protector");
775 }
776
777 if (StackChkFail) {
778 CallInst *Call = B.CreateCall(StackChkFail, Args);
779 Call->addFnAttr(Attribute::NoReturn);
780 }
781
782 B.CreateUnreachable();
783 return FailBB;
784}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
#define DEBUG_TYPE
Hexagon Common GEP
Module.h This file contains the declarations for the Module class.
static LVOptions Options
Definition LVOptions.cpp:25
#define F(x, y, z)
Definition MD5.cpp:54
#define I(x, y, z)
Definition MD5.cpp:57
This file provides utility analysis objects describing memory locations.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition PassSupport.h:42
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
Definition PassSupport.h:44
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
Definition PassSupport.h:39
This file defines the SmallVector class.
static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc, const TargetLoweringBase *TLI, const LibcallLoweringInfo &Libcalls, AllocaInst *&AI)
Insert code into the entry block that stores the stack guard variable onto the stack:
static Value * getStackGuard(const TargetLoweringBase &TLI, const LibcallLoweringInfo &Libcalls, Module *M, IRBuilder<> &B, bool *SupportsSelectionDAGSP=nullptr)
Create a stack guard loading and populate whether SelectionDAG SSP is supported.
static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize, Module *M, PhiMap &VisitedPHIs)
Check whether a stack allocation has its address taken.
static bool InsertStackProtectors(const TargetLowering &TLI, const LibcallLoweringInfo &Libcalls, Function *F, DomTreeUpdater *DTU, bool &HasPrologue, bool &HasIRCheck)
InsertStackProtectors - Insert code into the prologue and epilogue of the function.
static cl::opt< bool > DisableCheckNoReturn("disable-check-noreturn-call", cl::init(false), cl::Hidden)
SmallDenseMap< const PHINode *, PhiInfo, 16 > PhiMap
static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize, bool &IsLarge, bool Strong, bool InStruct)
static BasicBlock * CreateFailBB(Function *F, const LibcallLoweringInfo &Libcalls)
CreateFailBB - Create a basic block to jump to when the stack protector check fails.
static cl::opt< bool > EnableSelectionDAGSP("enable-selectiondag-sp", cl::init(true), cl::Hidden)
static const CallInst * findStackProtectorIntrinsic(Function &F)
Search for the first call to the llvm.stackprotector intrinsic and return it if present.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition Statistic.h:171
This file describes how to lower LLVM code to machine code.
Target-Independent Code Generator Pass Configuration Options pass.
Class for arbitrary precision integers.
Definition APInt.h:78
an instruction to allocate memory on the stack
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
LLVM Basic Block Representation.
Definition BasicBlock.h:62
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition BasicBlock.h:206
LLVM_ABI void moveAfter(BasicBlock *MovePos)
Unlink this basic block from its current function and insert it right after MovePos in the function M...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition BasicBlock.h:233
static BranchProbability getBranchProbStackProtector(bool IsLikely)
Value * getArgOperand(unsigned i) const
This class represents a function call, abstracting a target machine's calling convention.
A parsed version of the target data layout string in and methods for querying it.
Definition DataLayout.h:64
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
Definition DenseMap.h:256
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT, true > const_iterator
Definition DenseMap.h:75
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition DenseMap.h:241
Analysis pass which computes a DominatorTree.
Definition Dominators.h:283
Legacy analysis pass which computes a DominatorTree.
Definition Dominators.h:321
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
FunctionPass class - This class is used to implement most global optimizations.
Definition Pass.h:314
FunctionPass(char &pid)
Definition Pass.h:316
uint64_t getFnAttributeAsParsedInteger(StringRef Kind, uint64_t Default=0) const
For a string attribute Kind, parse attribute as an integer.
Definition Function.cpp:776
bool hasPersonalityFn() const
Check whether this function has a personality function.
Definition Function.h:909
Constant * getPersonalityFn() const
Get the personality function associated with this function.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Module * getParent()
Get the module that this global value is contained inside of...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition IRBuilder.h:2772
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Definition LLVMContext.h:68
Tracks which library functions to use for a particular subtarget.
LLVM_ABI RTLIB::LibcallImpl getLibcallImpl(RTLIB::Libcall Call) const
Return the lowering's selection of implementation call for Call.
Record a mapping from subtarget to LibcallLoweringInfo.
An instruction for reading from memory.
LLVM_ABI MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight, bool IsExpected=false)
Return metadata containing two branch weights.
Definition MDBuilder.cpp:38
Metadata node.
Definition Metadata.h:1078
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition Metadata.h:1569
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
const AllocaInst * getObjectAllocation(int ObjectIdx) const
Return the underlying Alloca of the specified stack object if it exists.
@ SSPLK_SmallArray
Array or nested array < SSP-buffer-size.
@ SSPLK_LargeArray
Array or nested array >= SSP-buffer-size.
@ SSPLK_AddrOf
The address of this allocation is exposed and triggered protection.
void setObjectSSPLayout(int ObjectIdx, SSPLayoutKind Kind)
int getObjectIndexEnd() const
Return one past the maximum frame object index.
bool isDeadObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a dead object.
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Definition Module.h:67
The optimization diagnostic interface.
LLVM_ABI void emit(DiagnosticInfoOptimizationBase &OptDiag)
Output the remark via the diagnostic handler and to the optimization record file.
Diagnostic information for applied optimization remarks.
AnalysisType & getAnalysis() const
getAnalysis<AnalysisType>() - This function is used by subclasses to get to the analysis information ...
AnalysisType * getAnalysisIfAvailable() const
getAnalysisIfAvailable<AnalysisType>() - Subclasses use this function to get analysis information tha...
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
A set of analyses that are preserved following a run of a transformation pass.
Definition Analysis.h:112
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition Analysis.h:118
PreservedAnalyses & preserve()
Mark an analysis as preserved.
Definition Analysis.h:132
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
Result run(Function &F, FunctionAnalysisManager &FAM)
void copyToMachineFrameInfo(MachineFrameInfo &MFI) const
bool shouldEmitSDCheck(const BasicBlock &BB) const
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
bool runOnFunction(Function &Fn) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
StringRef - Represent a constant reference to a string, i.e.
Definition StringRef.h:55
constexpr bool empty() const
empty - Check if the string is empty.
Definition StringRef.h:143
Class to represent struct types.
This base class for TargetLowering contains the SelectionDAG-independent parts that can be used from ...
Function * getSSPStackGuardCheck(const Module &M, const LibcallLoweringInfo &Libcalls) const
If the target has a standard stack protection check function that performs validation and error handl...
virtual bool useStackGuardXorFP() const
If this function returns true, stack protection checks should XOR the frame pointer (or whichever poi...
virtual Value * getIRStackGuard(IRBuilderBase &IRB, const LibcallLoweringInfo &Libcalls) const
If the target has a standard location for the stack protector guard, returns the address of that loca...
const TargetMachine & getTargetMachine() const
virtual void insertSSPDeclarations(Module &M, const LibcallLoweringInfo &Libcalls) const
Inserts necessary declarations for SSP (stack protection) purpose.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
Primary interface to the complete machine description for the target machine.
Target-Independent Code Generator Pass Configuration Options.
TargetSubtargetInfo - Generic base class for all target subtargets.
virtual const TargetLowering * getTargetLowering() const
static constexpr TypeSize getFixed(ScalarTy ExactSize)
Definition TypeSize.h:343
The instances of the Type class are immutable: once they are created, they are never changed.
Definition Type.h:45
static LLVM_ABI Type * getVoidTy(LLVMContext &C)
Definition Type.cpp:280
LLVM Value Representation.
Definition Value.h:75
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
Definition Value.cpp:397
iterator_range< user_iterator > users()
Definition Value.h:426
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
Definition TypeSize.h:165
static constexpr bool isKnownGT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:223
static constexpr bool isKnownGE(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:237
CallInst * Call
Changed
initializer< Ty > init(const Ty &Val)
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract(Y &&MD)
Extract a Value from Metadata, if any.
Definition Metadata.h:695
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
Definition Types.h:26
@ Offset
Definition DWP.cpp:532
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:643
OuterAnalysisManagerProxy< ModuleAnalysisManager, Function > ModuleAnalysisManagerFunctionProxy
Provide the ModuleAnalysisManager to Function proxy.
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
Definition Casting.h:732
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition STLExtras.h:632
LLVM_ABI FunctionPass * createStackProtectorPass()
createStackProtectorPass - This pass adds stack protectors to functions.
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
Definition Casting.h:547
bool isFuncletEHPersonality(EHPersonality Pers)
Returns true if this is a personality function that invokes handler funclets (which must return to it...
bool isInTailCallPosition(const CallBase &Call, const TargetMachine &TM, bool ReturnsFirstArg=false)
Test if the given instruction is in a position to be optimized with a tail-call.
Definition Analysis.cpp:539
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:559
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
PhiInfo(TypeSize AllocSize)
unsigned NumDecreased
static constexpr unsigned MaxNumDecreased
TypeSize AllocSize
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition Analysis.h:29
static StringRef getLibcallImplName(RTLIB::LibcallImpl CallImpl)
Get the libcall routine name for the specified libcall implementation.