13#ifndef LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H
14#define LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H
66 unsigned BytesInStackArgArea = 0;
71 unsigned ArgumentStackToRestore = 0;
77 unsigned TailCallReservedStack = 0;
81 bool HasStackFrame =
false;
87 unsigned CalleeSavedStackSize = 0;
88 unsigned ZPRCalleeSavedStackSize = 0;
89 unsigned PPRCalleeSavedStackSize = 0;
90 bool HasCalleeSavedStackSize =
false;
91 bool HasSVECalleeSavedStackSize =
false;
95 unsigned NumLocalDynamicTLSAccesses = 0;
99 int VarArgsStackIndex = 0;
102 unsigned VarArgsStackOffset = 0;
106 int VarArgsGPRIndex = 0;
110 unsigned VarArgsGPRSize = 0;
114 int VarArgsFPRIndex = 0;
118 unsigned VarArgsFPRSize = 0;
123 int StackHazardSlotIndex = std::numeric_limits<int>::max();
124 int StackHazardCSRSlotIndex = std::numeric_limits<int>::max();
128 bool IsSplitCSR =
false;
132 bool StackRealigned =
false;
136 bool CalleeSaveStackHasFreeSpace =
false;
151 bool SplitSVEObjects =
false;
154 bool HasCalculatedStackSizeSVE =
false;
160 std::optional<bool> HasRedZone;
167 std::optional<int> TaggedBasePointerIndex;
172 unsigned TaggedBasePointerOffset;
176 std::optional<std::string> OutliningStyle;
180 int CalleeSaveBaseToFrameRecordOffset = 0;
186 bool SignWithBKey =
false;
191 bool HasELFSignedGOT =
false;
200 bool BranchTargetEnforcement =
false;
206 bool BranchProtectionPAuthLR =
false;
211 bool HasSwiftAsyncContext =
false;
214 int SwiftAsyncContextFrameIdx = std::numeric_limits<int>::max();
216 bool IsMTETagged =
false;
220 bool IsSVECC =
false;
223 bool HasStreamingModeChanges =
false;
226 mutable std::optional<bool> NeedsDwarfUnwindInfo;
229 mutable std::optional<bool> NeedsAsyncDwarfUnwindInfo;
231 int64_t StackProbeSize = 0;
239 unsigned PredicateRegForFillSpill = 0;
246 Register EarlyAllocSMESaveBuffer = AArch64::NoRegister;
249 int ZT0SpillSlotIndex = std::numeric_limits<int>::max();
259 bool SMESaveBufferUsed =
false;
270 EarlyAllocSMESaveBuffer = Ptr;
274 return EarlyAllocSMESaveBuffer;
280 return ZT0SpillSlotIndex;
283 return ZT0SpillSlotIndex != std::numeric_limits<int>::max();
294 PredicateRegForFillSpill =
Reg;
297 return PredicateRegForFillSpill;
313 ArgumentStackToRestore = bytes;
318 TailCallReservedStack = bytes;
323 "expected SVE stack sizes to be aligned to 16-bytes");
326 HasCalculatedStackSizeSVE =
true;
350 return CalleeSaveStackHasFreeSpace;
353 CalleeSaveStackHasFreeSpace = s;
363 return OutliningStyle;
367 CalleeSavedStackSize =
Size;
368 HasCalleeSavedStackSize =
true;
377 bool ValidateCalleeSavedStackSize =
false;
383 ValidateCalleeSavedStackSize = HasCalleeSavedStackSize;
386 if (!HasCalleeSavedStackSize || ValidateCalleeSavedStackSize) {
391 int64_t MinOffset = std::numeric_limits<int64_t>::max();
392 int64_t MaxOffset = std::numeric_limits<int64_t>::min();
394 int FrameIdx =
Info.getFrameIdx();
399 MinOffset = std::min<int64_t>(
Offset, MinOffset);
400 MaxOffset = std::max<int64_t>(
Offset + ObjSize, MaxOffset);
403 if (SwiftAsyncContextFrameIdx != std::numeric_limits<int>::max()) {
406 MinOffset = std::min<int64_t>(
Offset, MinOffset);
407 MaxOffset = std::max<int64_t>(
Offset + ObjSize, MaxOffset);
410 if (StackHazardCSRSlotIndex != std::numeric_limits<int>::max()) {
412 int64_t ObjSize = MFI.
getObjectSize(StackHazardCSRSlotIndex);
413 MinOffset = std::min<int64_t>(
Offset, MinOffset);
414 MaxOffset = std::max<int64_t>(
Offset + ObjSize, MaxOffset);
417 unsigned Size =
alignTo(MaxOffset - MinOffset, 16);
419 "Invalid size calculated for callee saves");
427 assert(HasCalleeSavedStackSize &&
428 "CalleeSavedStackSize has not been calculated");
429 return CalleeSavedStackSize;
435 "expected SVE callee-save sizes to be aligned to 16-bytes");
436 ZPRCalleeSavedStackSize = ZPR;
437 PPRCalleeSavedStackSize = PPR;
438 HasSVECalleeSavedStackSize =
true;
441 assert(HasSVECalleeSavedStackSize &&
442 "ZPRCalleeSavedStackSize has not been calculated");
443 return ZPRCalleeSavedStackSize;
446 assert(HasSVECalleeSavedStackSize &&
447 "PPRCalleeSavedStackSize has not been calculated");
448 return PPRCalleeSavedStackSize;
453 "ZPRs and PPRs are split. Use get[ZPR|PPR]CalleeSavedStackSize()");
459 return NumLocalDynamicTLSAccesses;
466 std::optional<bool>
hasRedZone()
const {
return HasRedZone; }
488 return StackHazardSlotIndex != std::numeric_limits<int>::max();
492 assert(StackHazardSlotIndex == std::numeric_limits<int>::max());
493 StackHazardSlotIndex = Index;
497 assert(StackHazardCSRSlotIndex == std::numeric_limits<int>::max());
498 StackHazardCSRSlotIndex = Index;
516 return JumpTableEntryInfo[Idx].first;
519 return JumpTableEntryInfo[Idx].second;
522 if ((
unsigned)Idx >= JumpTableEntryInfo.size())
523 JumpTableEntryInfo.resize(Idx+1);
524 JumpTableEntryInfo[Idx] = std::make_pair(
Size, PCRelSym);
542 : Kind(Kind), Args(Args.begin(), Args.end()) {
558 LOHRelated.insert_range(Args);
563 size_t InitialSize = LOHContainerSet.size();
564 erase_if(LOHContainerSet, [&](
const auto &
D) {
565 return any_of(
D.getArgs(), [&](
auto *Arg) { return MIs.contains(Arg); });
571 LOHRelated.remove_if([&](
auto *
MI) {
return MIs.
contains(
MI); });
572 return InitialSize - LOHContainerSet.size();
576 return ForwardedMustTailRegParms;
580 return TaggedBasePointerIndex;
585 return TaggedBasePointerOffset;
588 TaggedBasePointerOffset =
Offset;
592 return CalleeSaveBaseToFrameRecordOffset;
595 CalleeSaveBaseToFrameRecordOffset =
Offset;
604 return SignCondition;
623 HasSwiftAsyncContext = HasContext;
628 SwiftAsyncContextFrameIdx = FI;
637 HasStreamingModeChanges = HasChanges;
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
Analysis containing CSE Info
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
MILOHDirective(MCLOHType Kind, LOHArgs Args)
MCLOHType getKind() const
ArrayRef< const MachineInstr * > LOHArgs
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
void addLOHDirective(MCLOHType Kind, MILOHArgs Args)
Add a LOH directive of this Kind and this Args.
bool needsShadowCallStackPrologueEpilogue(MachineFunction &MF) const
bool branchTargetEnforcement() const
unsigned getPPRCalleeSavedStackSize() const
void setHasStackFrame(bool s)
void setSwiftAsyncContextFrameIdx(int FI)
unsigned getTailCallReservedStack() const
unsigned getCalleeSavedStackSize(const MachineFrameInfo &MFI) const
Register getEarlyAllocSMESaveBuffer() const
unsigned getVarArgsFPRSize() const
void setCalleeSaveBaseToFrameRecordOffset(int Offset)
void setVarArgsStackOffset(unsigned Offset)
bool hasStackProbing() const
void setVarArgsStackIndex(int Index)
void setEarlyAllocSMESaveBuffer(Register Ptr)
int getZT0SpillSlotIndex() const
unsigned getArgumentStackToRestore() const
TPIDR2Object & getTPIDR2Obj()
void setTailCallReservedStack(unsigned bytes)
SmallVector< MILOHDirective, 32 > MILOHContainer
void setLocalStackSize(uint64_t Size)
void setCalleeSaveStackHasFreeSpace(bool s)
int getCalleeSaveBaseToFrameRecordOffset() const
bool hasELFSignedGOT() const
SmallVectorImpl< ForwardedRegister > & getForwardedMustTailRegParms()
void setIsSplitCSR(bool s)
int getVarArgsFPRIndex() const
SignReturnAddress getSignReturnAddressCondition() const
bool hasStreamingModeChanges() const
void setPredicateRegForFillSpill(unsigned Reg)
void setOutliningStyle(const std::string &Style)
void incNumLocalDynamicTLSAccesses()
const SetOfInstructions & getLOHRelated() const
void setBytesInStackArgArea(unsigned bytes)
int getVarArgsStackIndex() const
void setVarArgsGPRIndex(int Index)
int getStackHazardSlotIndex() const
unsigned getTaggedBasePointerOffset() const
void setCalleeSavedStackSize(unsigned Size)
int getVarArgsGPRIndex() const
int64_t getStackProbeSize() const
void setSigningInstrLabel(MCSymbol *Label)
void setHasSwiftAsyncContext(bool HasContext)
void setSplitSVEObjects(bool s)
bool branchProtectionPAuthLR() const
void setPStateSMReg(Register Reg)
void setHasRedZone(bool s)
MILOHDirective::LOHArgs MILOHArgs
bool hasStackFrame() const
int getStackHazardCSRSlotIndex() const
void setStackSizeSVE(uint64_t ZPR, uint64_t PPR)
void setVarArgsFPRSize(unsigned Size)
std::optional< int > getTaggedBasePointerIndex() const
unsigned getVarArgsStackOffset() const
SMEAttrs getSMEFnAttrs() const
AArch64FunctionInfo(const Function &F, const AArch64Subtarget *STI)
uint64_t getLocalStackSize() const
void setStackRealigned(bool s)
unsigned getJumpTableEntrySize(int Idx) const
bool needsDwarfUnwindInfo(const MachineFunction &MF) const
size_t clearLinkerOptimizationHints(const SmallPtrSetImpl< MachineInstr * > &MIs)
unsigned getVarArgsGPRSize() const
void setZT0SpillSlotIndex(int FI)
unsigned getSRetReturnReg() const
MCSymbol * getJumpTableEntryPCRelSymbol(int Idx) const
bool isStackRealigned() const
Register getPStateSMReg() const
uint64_t getStackSizePPR() const
unsigned getNumLocalDynamicTLSAccesses() const
bool hasZT0SpillSlotIndex() const
SmallPtrSet< const MachineInstr *, 16 > SetOfInstructions
bool hasSwiftAsyncContext() const
bool hasStackHazardSlotIndex() const
void setTaggedBasePointerOffset(unsigned Offset)
void setStackHazardSlotIndex(int Index)
std::optional< bool > hasRedZone() const
unsigned getZPRCalleeSavedStackSize() const
void setSMESaveBufferUsed(bool Used=true)
void setSRetReturnReg(unsigned Reg)
static bool shouldSignReturnAddress(SignReturnAddress Condition, bool IsLRSpilled)
void setStackHazardCSRSlotIndex(int Index)
void setSMESaveBufferAddr(Register Reg)
int getSwiftAsyncContextFrameIdx() const
unsigned getPredicateRegForFillSpill() const
void setSVECalleeSavedStackSize(unsigned ZPR, unsigned PPR)
bool hasCalculatedStackSizeSVE() const
std::optional< std::string > getOutliningStyle() const
unsigned getBytesInStackArgArea() const
Register getSMESaveBufferAddr() const
uint64_t getStackSizeZPR() const
void initializeBaseYamlFields(const yaml::AArch64FunctionInfo &YamlMFI)
const MILOHContainer & getLOHContainer() const
void setJumpTableEntryInfo(int Idx, unsigned Size, MCSymbol *PCRelSym)
bool hasSVEStackSize() const
bool isStackHazardIncludedInCalleeSaveArea() const
unsigned isSMESaveBufferUsed() const
unsigned getSVECalleeSavedStackSize() const
bool hasSplitSVEObjects() const
bool needsAsyncDwarfUnwindInfo(const MachineFunction &MF) const
void setVarArgsFPRIndex(int Index)
MachineFunctionInfo * clone(BumpPtrAllocator &Allocator, MachineFunction &DestMF, const DenseMap< MachineBasicBlock *, MachineBasicBlock * > &Src2DstMBB) const override
Make a functionally equivalent copy of this MachineFunctionInfo in MF.
void setVarArgsGPRSize(unsigned Size)
void setTaggedBasePointerIndex(int Index)
MCSymbol * getSigningInstrLabel() const
bool hasSVE_AAPCS(const MachineFunction &MF) const
void setArgumentStackToRestore(unsigned bytes)
void setHasStreamingModeChanges(bool HasChanges)
unsigned getCalleeSavedStackSize() const
bool hasCalleeSaveStackFreeSpace() const
bool shouldSignWithBKey() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
static constexpr unsigned NoRegister
MCSymbol - Instances of this class represent a symbol name in the MC file, and MCSymbols are created ...
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
bool isCalleeSavedInfoValid() const
Has the callee saved info been calculated yet?
int64_t getObjectSize(int ObjectIdx) const
Return the size of the specified object.
const std::vector< CalleeSavedInfo > & getCalleeSavedInfo() const
Returns a reference to call saved info vector for the current function.
uint8_t getStackID(int ObjectIdx) const
int64_t getObjectOffset(int ObjectIdx) const
Return the assigned stack offset of the specified object from the incoming stack pointer.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
Wrapper class representing virtual and physical registers.
SMEAttrs is a utility class to parse the SME ACLE attributes on functions.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
void mapOptional(StringRef Key, T &Val)
@ AArch64_SVE_VectorCall
Used between AArch64 SVE functions.
This is an optimization pass for GlobalISel generic memory operations.
SignReturnAddress
Condition of signing the return address in a function.
bool isAligned(Align Lhs, uint64_t SizeInBytes)
Checks that SizeInBytes is a multiple of the alignment.
static bool isValidMCLOHType(unsigned Kind)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
MCLOHType
Linker Optimization Hint Type.
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
BumpPtrAllocatorImpl<> BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
This struct is a compact representation of a valid (non-zero power of two) alignment.
MachineFunctionInfo - This class can be derived from and used by targets to hold private target-speci...
std::optional< bool > HasRedZone
std::optional< uint64_t > StackSizePPR
AArch64FunctionInfo()=default
std::optional< bool > HasStreamingModeChanges
std::optional< bool > HasStackFrame
std::optional< uint64_t > StackSizeZPR
~AArch64FunctionInfo() override=default
void mappingImpl(yaml::IO &YamlIO) override
Targets should override this in a way that mirrors the implementation of llvm::MachineFunctionInfo.
static void mapping(IO &YamlIO, AArch64FunctionInfo &MFI)
This class should be specialized by any type that needs to be converted to/from a YAML mapping.