112#define DEBUG_TYPE "aarch64-speculation-hardening"
114#define AARCH64_SPECULATION_HARDENING_NAME "AArch64 speculation hardening pass"
117 cl::desc(
"Sanitize loads from memory."),
131 bool runOnMachineFunction(MachineFunction &Fn)
override;
133 StringRef getPassName()
const override {
138 unsigned MisspeculatingTaintReg;
139 unsigned MisspeculatingTaintReg32Bit;
140 bool UseControlFlowSpeculationBarrier;
141 BitVector RegsNeedingCSDBBeforeUse;
142 BitVector RegsAlreadyMasked;
144 bool functionUsesHardeningRegister(MachineFunction &MF)
const;
145 bool instrumentControlFlow(MachineBasicBlock &
MBB,
146 bool &UsesFullSpeculationBarrier);
147 bool endsWithCondControlFlow(MachineBasicBlock &
MBB, MachineBasicBlock *&
TBB,
148 MachineBasicBlock *&FBB,
150 void insertTrackingCode(MachineBasicBlock &SplitEdgeBB,
152 void insertSPToRegTaintPropagation(MachineBasicBlock &
MBB,
154 void insertRegToSPTaintPropagation(MachineBasicBlock &
MBB,
156 unsigned TmpReg)
const;
157 void insertFullSpeculationBarrier(MachineBasicBlock &
MBB,
161 bool slhLoads(MachineBasicBlock &
MBB);
162 bool makeGPRSpeculationSafe(MachineBasicBlock &
MBB,
164 MachineInstr &
MI,
unsigned Reg);
165 bool lowerSpeculationSafeValuePseudos(MachineBasicBlock &
MBB,
166 bool UsesFullSpeculationBarrier);
167 bool expandSpeculationSafeValue(MachineBasicBlock &
MBB,
169 bool UsesFullSpeculationBarrier);
176char AArch64SpeculationHardening::ID = 0;
181bool AArch64SpeculationHardening::endsWithCondControlFlow(
185 if (
TII->analyzeBranch(
MBB,
TBB, FBB, analyzeBranchCondCode,
false))
189 if (analyzeBranchCondCode.
empty())
197 FBB =
MBB.getFallThrough();
207 assert(analyzeBranchCondCode.
size() == 1 &&
"unknown Cond array format");
212void AArch64SpeculationHardening::insertFullSpeculationBarrier(
220void AArch64SpeculationHardening::insertTrackingCode(
223 if (UseControlFlowSpeculationBarrier) {
224 insertFullSpeculationBarrier(SplitEdgeBB, SplitEdgeBB.
begin(),
DL);
227 .
addDef(MisspeculatingTaintReg)
228 .
addUse(MisspeculatingTaintReg)
235bool AArch64SpeculationHardening::instrumentControlFlow(
236 MachineBasicBlock &
MBB,
bool &UsesFullSpeculationBarrier) {
240 MachineBasicBlock *
TBB =
nullptr;
241 MachineBasicBlock *FBB =
nullptr;
244 if (!endsWithCondControlFlow(
MBB,
TBB, FBB, CondCode)) {
256 assert(SplitEdgeTBB !=
nullptr);
257 assert(SplitEdgeFBB !=
nullptr);
263 insertTrackingCode(*SplitEdgeTBB, CondCode,
DL);
264 insertTrackingCode(*SplitEdgeFBB, InvCondCode,
DL);
281 bool TmpRegisterNotAvailableEverywhere =
false;
287 MachineInstr &
MI = *--
I;
288 if (!
MI.isReturn() && !
MI.isCall())
306 << ((TmpReg == 0) ?
"no register " :
"register ");
308 dbgs() <<
"to be available at MI " <<
MI);
310 TmpRegisterNotAvailableEverywhere =
true;
313 else if (
MI.isCall())
317 if (TmpRegisterNotAvailableEverywhere) {
324 UsesFullSpeculationBarrier =
true;
327 for (
auto MI_Reg : ReturnInstructions) {
328 assert(MI_Reg.second != 0);
331 <<
" About to insert Reg to SP taint propagation with temp register "
333 <<
" on instruction: " << *MI_Reg.first);
334 insertRegToSPTaintPropagation(
MBB, MI_Reg.first, MI_Reg.second);
338 for (
auto MI_Reg : CallInstructions) {
339 assert(MI_Reg.second != 0);
341 "propagation with temp register "
343 <<
" around instruction: " << *MI_Reg.first);
345 insertSPToRegTaintPropagation(
348 insertRegToSPTaintPropagation(
MBB, MI_Reg.first, MI_Reg.second);
355void AArch64SpeculationHardening::insertSPToRegTaintPropagation(
360 if (UseControlFlowSpeculationBarrier) {
373 .
addDef(MisspeculatingTaintReg)
379void AArch64SpeculationHardening::insertRegToSPTaintPropagation(
381 unsigned TmpReg)
const {
385 if (UseControlFlowSpeculationBarrier)
408bool AArch64SpeculationHardening::functionUsesHardeningRegister(
409 MachineFunction &MF)
const {
410 for (MachineBasicBlock &
MBB : MF) {
411 for (MachineInstr &
MI :
MBB) {
416 if (
MI.readsRegister(MisspeculatingTaintReg,
TRI) ||
417 MI.modifiesRegister(MisspeculatingTaintReg,
TRI))
427bool AArch64SpeculationHardening::makeGPRSpeculationSafe(
438 if (
Reg == AArch64::SP ||
Reg == AArch64::WSP)
442 if (RegsAlreadyMasked[
Reg])
445 const bool Is64Bit = AArch64::GPR64allRegClass.contains(
Reg);
448 TII->get(Is64Bit ? AArch64::SpeculationSafeValueX
449 : AArch64::SpeculationSafeValueW))
452 RegsAlreadyMasked.
set(
Reg);
456bool AArch64SpeculationHardening::slhLoads(MachineBasicBlock &
MBB) {
461 RegsAlreadyMasked.
reset();
467 NextMBBI = std::next(
MBBI);
482 return Op.isReg() && (AArch64::GPR32allRegClass.contains(Op.getReg()) ||
483 AArch64::GPR64allRegClass.contains(Op.getReg()));
489 bool HardenLoadedData = AllDefsAreGPR;
490 bool HardenAddressLoadedFrom = !HardenLoadedData;
495 for (MachineOperand
Op :
MI.defs())
496 for (MCRegAliasIterator AI(
Op.getReg(),
TRI,
true); AI.isValid(); ++AI)
497 RegsAlreadyMasked.
reset(*AI);
505 if (HardenLoadedData)
506 for (
auto Def :
MI.defs()) {
518 if (HardenAddressLoadedFrom)
519 for (
auto Use :
MI.uses()) {
544bool AArch64SpeculationHardening::expandSpeculationSafeValue(
546 bool UsesFullSpeculationBarrier) {
548 unsigned Opcode =
MI.getOpcode();
554 case AArch64::SpeculationSafeValueW:
557 case AArch64::SpeculationSafeValueX:
561 if (!UseControlFlowSpeculationBarrier && !UsesFullSpeculationBarrier) {
567 for (MachineOperand
Op :
MI.defs())
568 for (MCRegAliasIterator AI(
Op.getReg(),
TRI,
true); AI.isValid(); ++AI)
569 RegsNeedingCSDBBeforeUse.
set(*AI);
573 Is64Bit ?
TII->get(AArch64::ANDXrs) :
TII->get(AArch64::ANDWrs))
576 .
addUse(Is64Bit ? MisspeculatingTaintReg
577 : MisspeculatingTaintReg32Bit)
580 MI.eraseFromParent();
586bool AArch64SpeculationHardening::insertCSDB(MachineBasicBlock &
MBB,
589 assert(!UseControlFlowSpeculationBarrier &&
"No need to insert CSDBs when "
590 "control flow miss-speculation "
591 "is already blocked");
594 RegsNeedingCSDBBeforeUse.
reset();
598bool AArch64SpeculationHardening::lowerSpeculationSafeValuePseudos(
599 MachineBasicBlock &
MBB,
bool UsesFullSpeculationBarrier) {
602 RegsNeedingCSDBBeforeUse.
reset();
618 DL =
MI.getDebugLoc();
624 bool NeedToEmitBarrier =
false;
625 if (RegsNeedingCSDBBeforeUse.
any() && (
MI.isCall() ||
MI.isTerminator()))
626 NeedToEmitBarrier =
true;
627 if (!NeedToEmitBarrier)
628 for (MachineOperand
Op :
MI.uses())
629 if (
Op.isReg() && RegsNeedingCSDBBeforeUse[
Op.getReg()]) {
630 NeedToEmitBarrier =
true;
634 if (NeedToEmitBarrier && !UsesFullSpeculationBarrier)
638 expandSpeculationSafeValue(
MBB,
MBBI, UsesFullSpeculationBarrier);
643 if (RegsNeedingCSDBBeforeUse.
any() && !UsesFullSpeculationBarrier)
649bool AArch64SpeculationHardening::runOnMachineFunction(MachineFunction &MF) {
653 MisspeculatingTaintReg = AArch64::X16;
654 MisspeculatingTaintReg32Bit = AArch64::W16;
657 RegsNeedingCSDBBeforeUse.
resize(
TRI->getNumRegs());
658 RegsAlreadyMasked.
resize(
TRI->getNumRegs());
659 UseControlFlowSpeculationBarrier = functionUsesHardeningRegister(MF);
666 dbgs() <<
"***** AArch64SpeculationHardening - automatic insertion of "
667 "SpeculationSafeValue intrinsics *****\n");
675 <<
"***** AArch64SpeculationHardening - track control flow *****\n");
680 EntryBlocks.
push_back(LPI.LandingPadBlock);
681 for (
auto *Entry : EntryBlocks)
682 insertSPToRegTaintPropagation(
683 *Entry,
Entry->SkipPHIsLabelsAndDebug(
Entry->begin()));
686 for (
auto &
MBB : MF) {
687 bool UsesFullSpeculationBarrier =
false;
688 Modified |= instrumentControlFlow(
MBB, UsesFullSpeculationBarrier);
690 lowerSpeculationSafeValuePseudos(
MBB, UsesFullSpeculationBarrier);
698 return new AArch64SpeculationHardening();
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
#define AARCH64_SPECULATION_HARDENING_NAME
static cl::opt< bool > HardenLoads("aarch64-slh-loads", cl::Hidden, cl::desc("Sanitize loads from memory."), cl::init(true))
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
This file implements the BitVector class.
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
const HexagonInstrInfo * TII
Register const TargetRegisterInfo * TRI
Promote Memory to Register
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
const SmallVectorImpl< MachineOperand > MachineBasicBlock * TBB
This file declares the machine register scavenger class.
static bool contains(SmallPtrSetImpl< ConstantExpr * > &Cache, ConstantExpr *Expr, Constant *C)
This file defines the SmallVector class.
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
bool any() const
any - Returns true if any bit is set.
FunctionPass class - This class is used to implement most global optimizations.
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
instr_iterator instr_begin()
MachineBasicBlock * SplitCriticalEdge(MachineBasicBlock *Succ, Pass &P, std::vector< SparseBitVector<> > *LiveInSets=nullptr, MachineDomTreeUpdater *MDTU=nullptr)
instr_iterator instr_end()
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
MachineInstrBundleIterator< MachineInstr > iterator
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
Function & getFunction()
Return the LLVM function that this machine code represents.
const std::vector< LandingPadInfo > & getLandingPads() const
Return a reference to the landing pad info for the current function.
const MachineBasicBlock & front() const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
void enterBasicBlockEnd(MachineBasicBlock &MBB)
Start tracking liveness from the end of basic block MBB.
Register FindUnusedReg(const TargetRegisterClass *RC) const
Find an unused register of the specified register class.
void backward()
Update internal register state and move MBB iterator backwards.
void enterBasicBlock(MachineBasicBlock &MBB)
Start tracking liveness from the begin of basic block MBB.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
static CondCode getInvertedCondCode(CondCode Code)
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
CondCode
ISD::CondCode enum - These are ordered carefully to make the bitfields below work out,...
@ Renamable
Register that may be renamed.
@ Kill
The last use of a register.
initializer< Ty > init(const Ty &Val)
NodeAddr< DefNode * > Def
NodeAddr< UseNode * > Use
This is an optimization pass for GlobalISel generic memory operations.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
MachineInstr * getImm(const MachineOperand &MO, const MachineRegisterInfo *MRI)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
DWARFExpression::Operation Op
FunctionPass * createAArch64SpeculationHardeningPass()
Returns an instance of the pseudo instruction expansion pass.
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.