LLVM 23.0.0git
StackProtector.cpp
Go to the documentation of this file.
1//===- StackProtector.cpp - Stack Protector Insertion ---------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass inserts stack protectors into functions which need them. A variable
10// with a random value in it is stored onto the stack before the local variables
11// are allocated. Upon exiting the block, the stored value is checked. If it's
12// changed, then there was some sort of violation and the program aborts.
13//
14//===----------------------------------------------------------------------===//
15
18#include "llvm/ADT/Statistic.h"
23#include "llvm/CodeGen/Passes.h"
27#include "llvm/IR/Attributes.h"
28#include "llvm/IR/BasicBlock.h"
29#include "llvm/IR/Constants.h"
30#include "llvm/IR/DataLayout.h"
32#include "llvm/IR/Dominators.h"
34#include "llvm/IR/Function.h"
35#include "llvm/IR/IRBuilder.h"
36#include "llvm/IR/Instruction.h"
39#include "llvm/IR/Intrinsics.h"
40#include "llvm/IR/MDBuilder.h"
41#include "llvm/IR/Module.h"
42#include "llvm/IR/Type.h"
43#include "llvm/IR/User.h"
45#include "llvm/Pass.h"
51#include <optional>
52
53using namespace llvm;
54
55#define DEBUG_TYPE "stack-protector"
56
57STATISTIC(NumFunProtected, "Number of functions protected");
58STATISTIC(NumAddrTaken, "Number of local variables that have their address"
59 " taken.");
60
61static cl::opt<bool> EnableSelectionDAGSP("enable-selectiondag-sp",
62 cl::init(true), cl::Hidden);
63static cl::opt<bool> DisableCheckNoReturn("disable-check-noreturn-call",
64 cl::init(false), cl::Hidden);
65
66/// InsertStackProtectors - Insert code into the prologue and epilogue of the
67/// function.
68///
69/// - The prologue code loads and stores the stack guard onto the stack.
70/// - The epilogue checks the value stored in the prologue against the original
71/// value. It calls __stack_chk_fail if they differ.
72static bool InsertStackProtectors(const TargetLowering &TLI,
73 const LibcallLoweringInfo &Libcalls,
75 bool &HasPrologue, bool &HasIRCheck);
76
77/// CreateFailBB - Create a basic block to jump to when the stack protector
78/// check fails.
80 const LibcallLoweringInfo &Libcalls);
81
83 return HasPrologue && !HasIRCheck && isa<ReturnInst>(BB.getTerminator());
84}
85
87 if (Layout.empty())
88 return;
89
90 for (int I = 0, E = MFI.getObjectIndexEnd(); I != E; ++I) {
91 if (MFI.isDeadObjectIndex(I))
92 continue;
93
94 const AllocaInst *AI = MFI.getObjectAllocation(I);
95 if (!AI)
96 continue;
97
98 SSPLayoutMap::const_iterator LI = Layout.find(AI);
99 if (LI == Layout.end())
100 continue;
101
102 MFI.setObjectSSPLayout(I, LI->second);
103 }
104}
105
108
109 SSPLayoutInfo Info;
110 Info.RequireStackProtector =
112 Info.SSPBufferSize = F.getFnAttributeAsParsedInteger(
113 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
114 return Info;
115}
116
117AnalysisKey SSPLayoutAnalysis::Key;
118
121 auto &Info = FAM.getResult<SSPLayoutAnalysis>(F);
122 auto *DT = FAM.getCachedResult<DominatorTreeAnalysis>(F);
123 DomTreeUpdater DTU(DT, DomTreeUpdater::UpdateStrategy::Lazy);
124
125 if (!Info.RequireStackProtector)
126 return PreservedAnalyses::all();
127
128 // TODO(etienneb): Functions with funclets are not correctly supported now.
129 // Do nothing if this is funclet-based personality.
130 if (F.hasPersonalityFn()) {
131 EHPersonality Personality = classifyEHPersonality(F.getPersonalityFn());
132 if (isFuncletEHPersonality(Personality))
133 return PreservedAnalyses::all();
134 }
135
136 auto &MAMProxy = FAM.getResult<ModuleAnalysisManagerFunctionProxy>(F);
137 const LibcallLoweringModuleAnalysisResult *LibcallLowering =
138 MAMProxy.getCachedResult<LibcallLoweringModuleAnalysis>(*F.getParent());
139
140 if (!LibcallLowering) {
141 F.getContext().emitError("'" + LibcallLoweringModuleAnalysis::name() +
142 "' analysis required");
143 return PreservedAnalyses::all();
144 }
145
146 const TargetSubtargetInfo *STI = TM->getSubtargetImpl(F);
147 const TargetLowering *TLI = STI->getTargetLowering();
148 const LibcallLoweringInfo &Libcalls =
149 LibcallLowering->getLibcallLowering(*STI);
150
151 ++NumFunProtected;
152 bool Changed = InsertStackProtectors(*TLI, Libcalls, &F, DT ? &DTU : nullptr,
153 Info.HasPrologue, Info.HasIRCheck);
154#ifdef EXPENSIVE_CHECKS
155 assert((!DT ||
156 DTU.getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
157 "Failed to maintain validity of domtree!");
158#endif
159
160 if (!Changed)
161 return PreservedAnalyses::all();
165 return PA;
166}
167
168char StackProtector::ID = 0;
169
173
175 "Insert stack protectors", false, true)
180 "Insert stack protectors", false, true)
181
183
189
191 F = &Fn;
192 M = F->getParent();
194 DTU.emplace(DTWP->getDomTree(), DomTreeUpdater::UpdateStrategy::Lazy);
196 LayoutInfo.HasPrologue = false;
197 LayoutInfo.HasIRCheck = false;
198
199 LayoutInfo.SSPBufferSize = Fn.getFnAttributeAsParsedInteger(
200 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
201 if (!requiresStackProtector(F, &LayoutInfo.Layout))
202 return false;
203
204 // TODO(etienneb): Functions with funclets are not correctly supported now.
205 // Do nothing if this is funclet-based personality.
206 if (Fn.hasPersonalityFn()) {
208 if (isFuncletEHPersonality(Personality))
209 return false;
210 }
211
212 const TargetSubtargetInfo *Subtarget = TM->getSubtargetImpl(Fn);
213 const LibcallLoweringInfo &Libcalls =
214 getAnalysis<LibcallLoweringInfoWrapper>().getLibcallLowering(*M,
215 *Subtarget);
216
217 const TargetLowering *TLI = Subtarget->getTargetLowering();
218
219 ++NumFunProtected;
220 bool Changed =
221 InsertStackProtectors(*TLI, Libcalls, F, DTU ? &*DTU : nullptr,
222 LayoutInfo.HasPrologue, LayoutInfo.HasIRCheck);
223#ifdef EXPENSIVE_CHECKS
224 assert((!DTU ||
225 DTU->getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
226 "Failed to maintain validity of domtree!");
227#endif
228 DTU.reset();
229 return Changed;
230}
231
232/// \param [out] IsLarge is set to true if a protectable array is found and
233/// it is "large" ( >= ssp-buffer-size). In the case of a structure with
234/// multiple arrays, this gets set if any of them is large.
235static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize,
236 bool &IsLarge, bool Strong,
237 bool InStruct) {
238 if (!Ty)
239 return false;
240 if (ArrayType *AT = dyn_cast<ArrayType>(Ty)) {
241 if (!AT->getElementType()->isIntegerTy(8)) {
242 // If we're on a non-Darwin platform or we're inside of a structure, don't
243 // add stack protectors unless the array is a character array.
244 // However, in strong mode any array, regardless of type and size,
245 // triggers a protector.
246 if (!Strong && (InStruct || !M->getTargetTriple().isOSDarwin()))
247 return false;
248 }
249
250 // If an array has more than SSPBufferSize bytes of allocated space, then we
251 // emit stack protectors.
252 if (SSPBufferSize <= M->getDataLayout().getTypeAllocSize(AT)) {
253 IsLarge = true;
254 return true;
255 }
256
257 if (Strong)
258 // Require a protector for all arrays in strong mode
259 return true;
260 }
261
262 const StructType *ST = dyn_cast<StructType>(Ty);
263 if (!ST)
264 return false;
265
266 bool NeedsProtector = false;
267 for (Type *ET : ST->elements())
268 if (ContainsProtectableArray(ET, M, SSPBufferSize, IsLarge, Strong, true)) {
269 // If the element is a protectable array and is large (>= SSPBufferSize)
270 // then we are done. If the protectable array is not large, then
271 // keep looking in case a subsequent element is a large array.
272 if (IsLarge)
273 return true;
274 NeedsProtector = true;
275 }
276
277 return NeedsProtector;
278}
279
280/// Maximum remaining allocation size observed for a phi node, and how often
281/// the allocation size has already been decreased. We only allow a limited
282/// number of decreases.
283struct PhiInfo {
285 unsigned NumDecreased = 0;
286 static constexpr unsigned MaxNumDecreased = 3;
288};
290
291/// Check whether a stack allocation has its address taken.
292static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize,
293 Module *M,
294 PhiMap &VisitedPHIs) {
295 const DataLayout &DL = M->getDataLayout();
296 for (const User *U : AI->users()) {
297 const auto *I = cast<Instruction>(U);
298 // If this instruction accesses memory make sure it doesn't access beyond
299 // the bounds of the allocated object.
300 std::optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I);
301 if (MemLoc && MemLoc->Size.hasValue() &&
302 !TypeSize::isKnownGE(AllocSize, MemLoc->Size.getValue()))
303 return true;
304 switch (I->getOpcode()) {
305 case Instruction::Store:
306 if (AI == cast<StoreInst>(I)->getValueOperand())
307 return true;
308 break;
309 case Instruction::AtomicCmpXchg:
310 // cmpxchg conceptually includes both a load and store from the same
311 // location. So, like store, the value being stored is what matters.
312 if (AI == cast<AtomicCmpXchgInst>(I)->getNewValOperand())
313 return true;
314 break;
315 case Instruction::AtomicRMW:
316 if (AI == cast<AtomicRMWInst>(I)->getValOperand())
317 return true;
318 break;
319 case Instruction::PtrToInt:
320 if (AI == cast<PtrToIntInst>(I)->getOperand(0))
321 return true;
322 break;
323 case Instruction::Call: {
324 // Ignore intrinsics that do not become real instructions.
325 // TODO: Narrow this to intrinsics that have store-like effects.
326 const auto *CI = cast<CallInst>(I);
327 if (!CI->isDebugOrPseudoInst() && !CI->isLifetimeStartOrEnd())
328 return true;
329 break;
330 }
331 case Instruction::Invoke:
332 return true;
333 case Instruction::GetElementPtr: {
334 // If the GEP offset is out-of-bounds, or is non-constant and so has to be
335 // assumed to be potentially out-of-bounds, then any memory access that
336 // would use it could also be out-of-bounds meaning stack protection is
337 // required.
339 unsigned IndexSize = DL.getIndexTypeSizeInBits(I->getType());
340 APInt Offset(IndexSize, 0);
341 if (!GEP->accumulateConstantOffset(DL, Offset))
342 return true;
343 TypeSize OffsetSize = TypeSize::getFixed(Offset.getLimitedValue());
344 if (!TypeSize::isKnownGT(AllocSize, OffsetSize))
345 return true;
346 // Adjust AllocSize to be the space remaining after this offset.
347 // We can't subtract a fixed size from a scalable one, so in that case
348 // assume the scalable value is of minimum size.
349 TypeSize NewAllocSize =
350 TypeSize::getFixed(AllocSize.getKnownMinValue()) - OffsetSize;
351 if (HasAddressTaken(I, NewAllocSize, M, VisitedPHIs))
352 return true;
353 break;
354 }
355 case Instruction::BitCast:
356 case Instruction::Select:
357 case Instruction::AddrSpaceCast:
358 if (HasAddressTaken(I, AllocSize, M, VisitedPHIs))
359 return true;
360 break;
361 case Instruction::PHI: {
362 // Keep track of what PHI nodes we have already visited to ensure
363 // they are only visited once.
364 const auto *PN = cast<PHINode>(I);
365 auto [It, Inserted] = VisitedPHIs.try_emplace(PN, AllocSize);
366 if (!Inserted) {
367 if (TypeSize::isKnownGE(AllocSize, It->second.AllocSize))
368 break;
369
370 // Check again with smaller size.
371 if (It->second.NumDecreased == PhiInfo::MaxNumDecreased)
372 return true;
373
374 It->second.AllocSize = AllocSize;
375 ++It->second.NumDecreased;
376 }
377 if (HasAddressTaken(PN, AllocSize, M, VisitedPHIs))
378 return true;
379 break;
380 }
381 case Instruction::Load:
382 case Instruction::Ret:
383 // These instructions take an address operand, but have load-like or
384 // other innocuous behavior that should not trigger a stack protector.
385 break;
386 default:
387 // Conservatively return true for any instruction that takes an address
388 // operand, but is not handled above.
389 return true;
390 }
391 }
392 return false;
393}
394
395/// Search for the first call to the llvm.stackprotector intrinsic and return it
396/// if present.
398 for (const BasicBlock &BB : F)
399 for (const Instruction &I : BB)
400 if (const auto *II = dyn_cast<IntrinsicInst>(&I))
401 if (II->getIntrinsicID() == Intrinsic::stackprotector)
402 return II;
403 return nullptr;
404}
405
406/// Check whether or not this function needs a stack protector based
407/// upon the stack protector level.
408///
409/// We use two heuristics: a standard (ssp) and strong (sspstrong).
410/// The standard heuristic which will add a guard variable to functions that
411/// call alloca with a either a variable size or a size >= SSPBufferSize,
412/// functions with character buffers larger than SSPBufferSize, and functions
413/// with aggregates containing character buffers larger than SSPBufferSize. The
414/// strong heuristic will add a guard variables to functions that call alloca
415/// regardless of size, functions with any buffer regardless of type and size,
416/// functions with aggregates that contain any buffer regardless of type and
417/// size, and functions that contain stack-based variables that have had their
418/// address taken.
420 SSPLayoutMap *Layout) {
421 Module *M = F->getParent();
422 bool Strong = false;
423 bool NeedsProtector = false;
424
425 // The set of PHI nodes visited when determining if a variable's reference has
426 // been taken. This set is maintained to ensure we don't visit the same PHI
427 // node multiple times.
428 PhiMap VisitedPHIs;
429
430 unsigned SSPBufferSize = F->getFnAttributeAsParsedInteger(
431 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
432
433 if (F->hasFnAttribute(Attribute::SafeStack))
434 return false;
435
436 // We are constructing the OptimizationRemarkEmitter on the fly rather than
437 // using the analysis pass to avoid building DominatorTree and LoopInfo which
438 // are not available this late in the IR pipeline.
440
441 if (F->hasFnAttribute(Attribute::StackProtectReq)) {
442 if (!Layout)
443 return true;
444 ORE.emit([&]() {
445 return OptimizationRemark(DEBUG_TYPE, "StackProtectorRequested", F)
446 << "Stack protection applied to function "
447 << ore::NV("Function", F)
448 << " due to a function attribute or command-line switch";
449 });
450 NeedsProtector = true;
451 Strong = true; // Use the same heuristic as strong to determine SSPLayout
452 } else if (F->hasFnAttribute(Attribute::StackProtectStrong))
453 Strong = true;
454 else if (!F->hasFnAttribute(Attribute::StackProtect))
455 return false;
456
457 for (const BasicBlock &BB : *F) {
458 for (const Instruction &I : BB) {
459 if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
460 if (const MDNode *MD = AI->getMetadata("stack-protector")) {
461 const auto *CI = mdconst::dyn_extract<ConstantInt>(MD->getOperand(0));
462 if (CI->isZero())
463 continue;
464 }
465 if (AI->isArrayAllocation()) {
466 auto RemarkBuilder = [&]() {
467 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAllocaOrArray",
468 &I)
469 << "Stack protection applied to function "
470 << ore::NV("Function", F)
471 << " due to a call to alloca or use of a variable length "
472 "array";
473 };
474 if (const auto *CI = dyn_cast<ConstantInt>(AI->getArraySize())) {
475 if (CI->getLimitedValue(SSPBufferSize) >= SSPBufferSize) {
476 // A call to alloca with size >= SSPBufferSize requires
477 // stack protectors.
478 if (!Layout)
479 return true;
480 Layout->insert(
481 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
482 ORE.emit(RemarkBuilder);
483 NeedsProtector = true;
484 } else if (Strong) {
485 // Require protectors for all alloca calls in strong mode.
486 if (!Layout)
487 return true;
488 Layout->insert(
489 std::make_pair(AI, MachineFrameInfo::SSPLK_SmallArray));
490 ORE.emit(RemarkBuilder);
491 NeedsProtector = true;
492 }
493 } else {
494 // A call to alloca with a variable size requires protectors.
495 if (!Layout)
496 return true;
497 Layout->insert(
498 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
499 ORE.emit(RemarkBuilder);
500 NeedsProtector = true;
501 }
502 continue;
503 }
504
505 bool IsLarge = false;
506 if (ContainsProtectableArray(AI->getAllocatedType(), M, SSPBufferSize,
507 IsLarge, Strong, false)) {
508 if (!Layout)
509 return true;
510 Layout->insert(std::make_pair(
513 ORE.emit([&]() {
514 return OptimizationRemark(DEBUG_TYPE, "StackProtectorBuffer", &I)
515 << "Stack protection applied to function "
516 << ore::NV("Function", F)
517 << " due to a stack allocated buffer or struct containing a "
518 "buffer";
519 });
520 NeedsProtector = true;
521 continue;
522 }
523
524 if (Strong) {
525 std::optional<TypeSize> AllocSize =
526 AI->getAllocationSize(M->getDataLayout());
527 if (!AllocSize || HasAddressTaken(AI, *AllocSize, M, VisitedPHIs)) {
528 ++NumAddrTaken;
529 if (!Layout)
530 return true;
531 Layout->insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
532 ORE.emit([&]() {
534 "StackProtectorAddressTaken", &I)
535 << "Stack protection applied to function "
536 << ore::NV("Function", F)
537 << " due to the address of a local variable being taken";
538 });
539 NeedsProtector = true;
540 }
541 }
542 // Clear any PHIs that we visited, to make sure we examine all uses of
543 // any subsequent allocas that we look at.
544 VisitedPHIs.clear();
545 }
546 }
547 }
548
549 return NeedsProtector;
550}
551
552/// Create a stack guard loading and populate whether SelectionDAG SSP is
553/// supported.
555 const LibcallLoweringInfo &Libcalls, Module *M,
556 IRBuilder<> &B,
557 bool *SupportsSelectionDAGSP = nullptr) {
558 Value *Guard = TLI.getIRStackGuard(B, Libcalls);
559 StringRef GuardMode = M->getStackProtectorGuard();
560 if ((GuardMode == "tls" || GuardMode.empty()) && Guard)
561 return B.CreateLoad(B.getPtrTy(), Guard, true, "StackGuard");
562
563 // Use SelectionDAG SSP handling, since there isn't an IR guard.
564 //
565 // This is more or less weird, since we optionally output whether we
566 // should perform a SelectionDAG SP here. The reason is that it's strictly
567 // defined as !TLI->getIRStackGuard(B), where getIRStackGuard is also
568 // mutating. There is no way to get this bit without mutating the IR, so
569 // getting this bit has to happen in this right time.
570 //
571 // We could have define a new function TLI::supportsSelectionDAGSP(), but that
572 // will put more burden on the backends' overriding work, especially when it
573 // actually conveys the same information getIRStackGuard() already gives.
574 if (SupportsSelectionDAGSP)
575 *SupportsSelectionDAGSP = true;
576 TLI.insertSSPDeclarations(*M, Libcalls);
577 return B.CreateIntrinsic(Intrinsic::stackguard, {});
578}
579
580/// Insert code into the entry block that stores the stack guard
581/// variable onto the stack:
582///
583/// entry:
584/// StackGuardSlot = alloca i8*
585/// StackGuard = <stack guard>
586/// call void @llvm.stackprotector(StackGuard, StackGuardSlot)
587///
588/// Returns true if the platform/triple supports the stackprotectorcreate pseudo
589/// node.
590static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc,
591 const TargetLoweringBase *TLI,
592 const LibcallLoweringInfo &Libcalls,
593 AllocaInst *&AI) {
594 bool SupportsSelectionDAGSP = false;
595 IRBuilder<> B(&F->getEntryBlock().front());
596 PointerType *PtrTy = PointerType::getUnqual(CheckLoc->getContext());
597 AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot");
598
599 Value *GuardSlot =
600 getStackGuard(*TLI, Libcalls, M, B, &SupportsSelectionDAGSP);
601 B.CreateIntrinsic(Intrinsic::stackprotector, {GuardSlot, AI});
602 return SupportsSelectionDAGSP;
603}
604
606 const LibcallLoweringInfo &Libcalls, Function *F,
607 DomTreeUpdater *DTU, bool &HasPrologue,
608 bool &HasIRCheck) {
609 auto *M = F->getParent();
610
611 // If the target wants to XOR the frame pointer into the guard value, it's
612 // impossible to emit the check in IR, so the target *must* support stack
613 // protection in SDAG.
614 bool SupportsSelectionDAGSP =
615 TLI.useStackGuardXorFP() ||
616 (EnableSelectionDAGSP && !TLI.getTargetMachine().Options.EnableFastISel);
617 AllocaInst *AI = nullptr; // Place on stack that stores the stack guard.
618 BasicBlock *FailBB = nullptr;
619
621 // This is stack protector auto generated check BB, skip it.
622 if (&BB == FailBB)
623 continue;
624 Instruction *CheckLoc = dyn_cast<ReturnInst>(BB.getTerminator());
625 if (!CheckLoc && !DisableCheckNoReturn)
626 for (auto &Inst : BB) {
627 if (IntrinsicInst *IB = dyn_cast<IntrinsicInst>(&Inst);
628 IB && (IB->getIntrinsicID() == Intrinsic::eh_sjlj_callsite)) {
629 // eh_sjlj_callsite has to be in same BB as the
630 // bb terminator. Don't insert within this range.
631 CheckLoc = IB;
632 break;
633 }
634 if (auto *CB = dyn_cast<CallBase>(&Inst))
635 // Do stack check before noreturn calls that aren't nounwind (e.g:
636 // __cxa_throw).
637 if (CB->doesNotReturn() && !CB->doesNotThrow()) {
638 CheckLoc = CB;
639 break;
640 }
641 }
642
643 if (!CheckLoc)
644 continue;
645
646 // Generate prologue instrumentation if not already generated.
647 if (!HasPrologue) {
648 HasPrologue = true;
649 SupportsSelectionDAGSP &=
650 CreatePrologue(F, M, CheckLoc, &TLI, Libcalls, AI);
651 }
652
653 // SelectionDAG based code generation. Nothing else needs to be done here.
654 // The epilogue instrumentation is postponed to SelectionDAG.
655 if (SupportsSelectionDAGSP)
656 break;
657
658 // Find the stack guard slot if the prologue was not created by this pass
659 // itself via a previous call to CreatePrologue().
660 if (!AI) {
661 const CallInst *SPCall = findStackProtectorIntrinsic(*F);
662 assert(SPCall && "Call to llvm.stackprotector is missing");
663 AI = cast<AllocaInst>(SPCall->getArgOperand(1));
664 }
665
666 // Set HasIRCheck to true, so that SelectionDAG will not generate its own
667 // version. SelectionDAG called 'shouldEmitSDCheck' to check whether
668 // instrumentation has already been generated.
669 HasIRCheck = true;
670
671 // If we're instrumenting a block with a tail call, the check has to be
672 // inserted before the call rather than between it and the return.
673 Instruction *Prev = CheckLoc->getPrevNode();
674 if (auto *CI = dyn_cast_if_present<CallInst>(Prev))
675 if (CI->isTailCall() && isInTailCallPosition(*CI, TLI.getTargetMachine()))
676 CheckLoc = Prev;
677
678 // Generate epilogue instrumentation. The epilogue intrumentation can be
679 // function-based or inlined depending on which mechanism the target is
680 // providing.
681 if (Function *GuardCheck = TLI.getSSPStackGuardCheck(*M, Libcalls)) {
682 // Generate the function-based epilogue instrumentation.
683 // The target provides a guard check function, generate a call to it.
684 IRBuilder<> B(CheckLoc);
685 LoadInst *Guard = B.CreateLoad(B.getPtrTy(), AI, true, "Guard");
686 CallInst *Call = B.CreateCall(GuardCheck, {Guard});
687 Call->setAttributes(GuardCheck->getAttributes());
688 Call->setCallingConv(GuardCheck->getCallingConv());
689 } else {
690 // Generate the epilogue with inline instrumentation.
691 // If we do not support SelectionDAG based calls, generate IR level
692 // calls.
693 //
694 // For each block with a return instruction, convert this:
695 //
696 // return:
697 // ...
698 // ret ...
699 //
700 // into this:
701 //
702 // return:
703 // ...
704 // %1 = <stack guard>
705 // %2 = load StackGuardSlot
706 // %3 = icmp ne i1 %1, %2
707 // br i1 %3, label %CallStackCheckFailBlk, label %SP_return
708 //
709 // SP_return:
710 // ret ...
711 //
712 // CallStackCheckFailBlk:
713 // call void @__stack_chk_fail()
714 // unreachable
715
716 // Create the FailBB. We duplicate the BB every time since the MI tail
717 // merge pass will merge together all of the various BB into one including
718 // fail BB generated by the stack protector pseudo instruction.
719 if (!FailBB)
720 FailBB = CreateFailBB(F, Libcalls);
721
722 IRBuilder<> B(CheckLoc);
723 Value *Guard = getStackGuard(TLI, Libcalls, M, B);
724 LoadInst *LI2 = B.CreateLoad(B.getPtrTy(), AI, true);
725 auto *Cmp = cast<ICmpInst>(B.CreateICmpNE(Guard, LI2));
726 auto SuccessProb =
728 auto FailureProb =
730 MDNode *Weights = MDBuilder(F->getContext())
731 .createBranchWeights(FailureProb.getNumerator(),
732 SuccessProb.getNumerator());
733
734 SplitBlockAndInsertIfThen(Cmp, CheckLoc,
735 /*Unreachable=*/false, Weights, DTU,
736 /*LI=*/nullptr, /*ThenBlock=*/FailBB);
737
738 auto *BI = cast<BranchInst>(Cmp->getParent()->getTerminator());
739 BasicBlock *NewBB = BI->getSuccessor(1);
740 NewBB->setName("SP_return");
741 NewBB->moveAfter(&BB);
742
743 Cmp->setPredicate(Cmp->getInversePredicate());
744 BI->swapSuccessors();
745 }
746 }
747
748 // Return if we didn't modify any basic blocks. i.e., there are no return
749 // statements in the function.
750 return HasPrologue;
751}
752
754 auto *M = F->getParent();
755 LLVMContext &Context = F->getContext();
756 BasicBlock *FailBB = BasicBlock::Create(Context, "CallStackCheckFailBlk", F);
757 IRBuilder<> B(FailBB);
758 if (F->getSubprogram())
759 B.SetCurrentDebugLocation(
760 DILocation::get(Context, 0, 0, F->getSubprogram()));
761 FunctionCallee StackChkFail;
763
764 if (RTLIB::LibcallImpl ChkFailImpl =
765 Libcalls.getLibcallImpl(RTLIB::STACKPROTECTOR_CHECK_FAIL)) {
766 StackChkFail = M->getOrInsertFunction(
768 Type::getVoidTy(Context));
769 } else if (RTLIB::LibcallImpl SSHImpl =
770 Libcalls.getLibcallImpl(RTLIB::STACK_SMASH_HANDLER)) {
771 StackChkFail = M->getOrInsertFunction(
773 Type::getVoidTy(Context), PointerType::getUnqual(Context));
774 Args.push_back(B.CreateGlobalString(F->getName(), "SSH"));
775 } else {
776 Context.emitError("no libcall available for stack protector");
777 }
778
779 if (StackChkFail) {
780 CallInst *Call = B.CreateCall(StackChkFail, Args);
781 Call->addFnAttr(Attribute::NoReturn);
782 }
783
784 B.CreateUnreachable();
785 return FailBB;
786}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
#define DEBUG_TYPE
Hexagon Common GEP
Module.h This file contains the declarations for the Module class.
static LVOptions Options
Definition LVOptions.cpp:25
#define F(x, y, z)
Definition MD5.cpp:54
#define I(x, y, z)
Definition MD5.cpp:57
This file provides utility analysis objects describing memory locations.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition PassSupport.h:42
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
Definition PassSupport.h:44
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
Definition PassSupport.h:39
This file defines the SmallVector class.
static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc, const TargetLoweringBase *TLI, const LibcallLoweringInfo &Libcalls, AllocaInst *&AI)
Insert code into the entry block that stores the stack guard variable onto the stack:
static Value * getStackGuard(const TargetLoweringBase &TLI, const LibcallLoweringInfo &Libcalls, Module *M, IRBuilder<> &B, bool *SupportsSelectionDAGSP=nullptr)
Create a stack guard loading and populate whether SelectionDAG SSP is supported.
static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize, Module *M, PhiMap &VisitedPHIs)
Check whether a stack allocation has its address taken.
static bool InsertStackProtectors(const TargetLowering &TLI, const LibcallLoweringInfo &Libcalls, Function *F, DomTreeUpdater *DTU, bool &HasPrologue, bool &HasIRCheck)
InsertStackProtectors - Insert code into the prologue and epilogue of the function.
static cl::opt< bool > DisableCheckNoReturn("disable-check-noreturn-call", cl::init(false), cl::Hidden)
SmallDenseMap< const PHINode *, PhiInfo, 16 > PhiMap
static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize, bool &IsLarge, bool Strong, bool InStruct)
static BasicBlock * CreateFailBB(Function *F, const LibcallLoweringInfo &Libcalls)
CreateFailBB - Create a basic block to jump to when the stack protector check fails.
static cl::opt< bool > EnableSelectionDAGSP("enable-selectiondag-sp", cl::init(true), cl::Hidden)
static const CallInst * findStackProtectorIntrinsic(Function &F)
Search for the first call to the llvm.stackprotector intrinsic and return it if present.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition Statistic.h:171
This file describes how to lower LLVM code to machine code.
Target-Independent Code Generator Pass Configuration Options pass.
Class for arbitrary precision integers.
Definition APInt.h:78
an instruction to allocate memory on the stack
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
LLVM Basic Block Representation.
Definition BasicBlock.h:62
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition BasicBlock.h:206
LLVM_ABI void moveAfter(BasicBlock *MovePos)
Unlink this basic block from its current function and insert it right after MovePos in the function M...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition BasicBlock.h:233
static BranchProbability getBranchProbStackProtector(bool IsLikely)
Value * getArgOperand(unsigned i) const
This class represents a function call, abstracting a target machine's calling convention.
A parsed version of the target data layout string in and methods for querying it.
Definition DataLayout.h:64
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
Definition DenseMap.h:256
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT, true > const_iterator
Definition DenseMap.h:75
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition DenseMap.h:241
Analysis pass which computes a DominatorTree.
Definition Dominators.h:283
Legacy analysis pass which computes a DominatorTree.
Definition Dominators.h:321
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
FunctionPass class - This class is used to implement most global optimizations.
Definition Pass.h:314
FunctionPass(char &pid)
Definition Pass.h:316
uint64_t getFnAttributeAsParsedInteger(StringRef Kind, uint64_t Default=0) const
For a string attribute Kind, parse attribute as an integer.
Definition Function.cpp:777
bool hasPersonalityFn() const
Check whether this function has a personality function.
Definition Function.h:903
Constant * getPersonalityFn() const
Get the personality function associated with this function.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Module * getParent()
Get the module that this global value is contained inside of...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition IRBuilder.h:2762
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Definition LLVMContext.h:68
Tracks which library functions to use for a particular subtarget.
LLVM_ABI RTLIB::LibcallImpl getLibcallImpl(RTLIB::Libcall Call) const
Return the lowering's selection of implementation call for Call.
Record a mapping from subtarget to LibcallLoweringInfo.
An instruction for reading from memory.
LLVM_ABI MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight, bool IsExpected=false)
Return metadata containing two branch weights.
Definition MDBuilder.cpp:38
Metadata node.
Definition Metadata.h:1078
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition Metadata.h:1569
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
const AllocaInst * getObjectAllocation(int ObjectIdx) const
Return the underlying Alloca of the specified stack object if it exists.
@ SSPLK_SmallArray
Array or nested array < SSP-buffer-size.
@ SSPLK_LargeArray
Array or nested array >= SSP-buffer-size.
@ SSPLK_AddrOf
The address of this allocation is exposed and triggered protection.
void setObjectSSPLayout(int ObjectIdx, SSPLayoutKind Kind)
int getObjectIndexEnd() const
Return one past the maximum frame object index.
bool isDeadObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a dead object.
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Definition Module.h:67
The optimization diagnostic interface.
LLVM_ABI void emit(DiagnosticInfoOptimizationBase &OptDiag)
Output the remark via the diagnostic handler and to the optimization record file.
Diagnostic information for applied optimization remarks.
static LLVM_ABI PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
AnalysisType & getAnalysis() const
getAnalysis<AnalysisType>() - This function is used by subclasses to get to the analysis information ...
AnalysisType * getAnalysisIfAvailable() const
getAnalysisIfAvailable<AnalysisType>() - Subclasses use this function to get analysis information tha...
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
A set of analyses that are preserved following a run of a transformation pass.
Definition Analysis.h:112
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition Analysis.h:118
PreservedAnalyses & preserve()
Mark an analysis as preserved.
Definition Analysis.h:132
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
Result run(Function &F, FunctionAnalysisManager &FAM)
void copyToMachineFrameInfo(MachineFrameInfo &MFI) const
bool shouldEmitSDCheck(const BasicBlock &BB) const
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
bool runOnFunction(Function &Fn) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
StringRef - Represent a constant reference to a string, i.e.
Definition StringRef.h:55
constexpr bool empty() const
empty - Check if the string is empty.
Definition StringRef.h:143
Class to represent struct types.
This base class for TargetLowering contains the SelectionDAG-independent parts that can be used from ...
Function * getSSPStackGuardCheck(const Module &M, const LibcallLoweringInfo &Libcalls) const
If the target has a standard stack protection check function that performs validation and error handl...
virtual bool useStackGuardXorFP() const
If this function returns true, stack protection checks should XOR the frame pointer (or whichever poi...
virtual Value * getIRStackGuard(IRBuilderBase &IRB, const LibcallLoweringInfo &Libcalls) const
If the target has a standard location for the stack protector guard, returns the address of that loca...
const TargetMachine & getTargetMachine() const
virtual void insertSSPDeclarations(Module &M, const LibcallLoweringInfo &Libcalls) const
Inserts necessary declarations for SSP (stack protection) purpose.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
Primary interface to the complete machine description for the target machine.
Target-Independent Code Generator Pass Configuration Options.
TargetSubtargetInfo - Generic base class for all target subtargets.
virtual const TargetLowering * getTargetLowering() const
static constexpr TypeSize getFixed(ScalarTy ExactSize)
Definition TypeSize.h:343
The instances of the Type class are immutable: once they are created, they are never changed.
Definition Type.h:45
static LLVM_ABI Type * getVoidTy(LLVMContext &C)
Definition Type.cpp:280
LLVM Value Representation.
Definition Value.h:75
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
Definition Value.cpp:397
iterator_range< user_iterator > users()
Definition Value.h:426
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
Definition TypeSize.h:165
static constexpr bool isKnownGT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:223
static constexpr bool isKnownGE(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:237
CallInst * Call
Changed
initializer< Ty > init(const Ty &Val)
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract(Y &&MD)
Extract a Value from Metadata, if any.
Definition Metadata.h:695
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
Definition Types.h:26
@ Offset
Definition DWP.cpp:532
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:643
OuterAnalysisManagerProxy< ModuleAnalysisManager, Function > ModuleAnalysisManagerFunctionProxy
Provide the ModuleAnalysisManager to Function proxy.
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
Definition Casting.h:732
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition STLExtras.h:632
LLVM_ABI FunctionPass * createStackProtectorPass()
createStackProtectorPass - This pass adds stack protectors to functions.
LLVM_ABI void initializeStackProtectorPass(PassRegistry &)
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
Definition Casting.h:547
bool isFuncletEHPersonality(EHPersonality Pers)
Returns true if this is a personality function that invokes handler funclets (which must return to it...
bool isInTailCallPosition(const CallBase &Call, const TargetMachine &TM, bool ReturnsFirstArg=false)
Test if the given instruction is in a position to be optimized with a tail-call.
Definition Analysis.cpp:539
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:559
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
PhiInfo(TypeSize AllocSize)
unsigned NumDecreased
static constexpr unsigned MaxNumDecreased
TypeSize AllocSize
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition Analysis.h:29
static StringRef getLibcallImplName(RTLIB::LibcallImpl CallImpl)
Get the libcall routine name for the specified libcall implementation.