LLVM 19.0.0git
Classes | Namespaces | Enumerations | Functions | Variables
AArch64ISelLowering.h File Reference
#include "AArch64.h"
#include "Utils/AArch64SMEAttributes.h"
#include "llvm/CodeGen/CallingConvLower.h"
#include "llvm/CodeGen/MachineFunction.h"
#include "llvm/CodeGen/SelectionDAG.h"
#include "llvm/CodeGen/TargetLowering.h"
#include "llvm/IR/CallingConv.h"
#include "llvm/IR/Instruction.h"

Go to the source code of this file.

Classes

class  llvm::AArch64TargetLowering
 

Namespaces

namespace  llvm
 This is an optimization pass for GlobalISel generic memory operations.
 
namespace  llvm::AArch64ISD
 
namespace  llvm::AArch64
 

Enumerations

enum  llvm::AArch64ISD::NodeType : unsigned {
  llvm::AArch64ISD::FIRST_NUMBER = ISD::BUILTIN_OP_END , llvm::AArch64ISD::WrapperLarge , llvm::AArch64ISD::CALL , llvm::AArch64ISD::CALL_RVMARKER ,
  llvm::AArch64ISD::CALL_BTI , llvm::AArch64ISD::AUTH_CALL , llvm::AArch64ISD::AUTH_TC_RETURN , llvm::AArch64ISD::AUTH_CALL_RVMARKER ,
  llvm::AArch64ISD::COALESCER_BARRIER , llvm::AArch64ISD::SMSTART , llvm::AArch64ISD::SMSTOP , llvm::AArch64ISD::RESTORE_ZA ,
  llvm::AArch64ISD::RESTORE_ZT , llvm::AArch64ISD::SAVE_ZT , llvm::AArch64ISD::CALL_ARM64EC_TO_X64 , llvm::AArch64ISD::TLSDESC_CALLSEQ ,
  llvm::AArch64ISD::ADRP , llvm::AArch64ISD::ADR , llvm::AArch64ISD::ADDlow , llvm::AArch64ISD::LOADgot ,
  llvm::AArch64ISD::RET_GLUE , llvm::AArch64ISD::BRCOND , llvm::AArch64ISD::CSEL , llvm::AArch64ISD::CSINV ,
  llvm::AArch64ISD::CSNEG , llvm::AArch64ISD::CSINC , llvm::AArch64ISD::THREAD_POINTER , llvm::AArch64ISD::ADC ,
  llvm::AArch64ISD::SBC , llvm::AArch64ISD::PROBED_ALLOCA , llvm::AArch64ISD::ABDS_PRED , llvm::AArch64ISD::ABDU_PRED ,
  llvm::AArch64ISD::FADD_PRED , llvm::AArch64ISD::FDIV_PRED , llvm::AArch64ISD::FMA_PRED , llvm::AArch64ISD::FMAX_PRED ,
  llvm::AArch64ISD::FMAXNM_PRED , llvm::AArch64ISD::FMIN_PRED , llvm::AArch64ISD::FMINNM_PRED , llvm::AArch64ISD::FMUL_PRED ,
  llvm::AArch64ISD::FSUB_PRED , llvm::AArch64ISD::HADDS_PRED , llvm::AArch64ISD::HADDU_PRED , llvm::AArch64ISD::MUL_PRED ,
  llvm::AArch64ISD::MULHS_PRED , llvm::AArch64ISD::MULHU_PRED , llvm::AArch64ISD::RHADDS_PRED , llvm::AArch64ISD::RHADDU_PRED ,
  llvm::AArch64ISD::SDIV_PRED , llvm::AArch64ISD::SHL_PRED , llvm::AArch64ISD::SMAX_PRED , llvm::AArch64ISD::SMIN_PRED ,
  llvm::AArch64ISD::SRA_PRED , llvm::AArch64ISD::SRL_PRED , llvm::AArch64ISD::UDIV_PRED , llvm::AArch64ISD::UMAX_PRED ,
  llvm::AArch64ISD::UMIN_PRED , llvm::AArch64ISD::BIC , llvm::AArch64ISD::SRAD_MERGE_OP1 , llvm::AArch64ISD::FABS_MERGE_PASSTHRU ,
  llvm::AArch64ISD::FCEIL_MERGE_PASSTHRU , llvm::AArch64ISD::FFLOOR_MERGE_PASSTHRU , llvm::AArch64ISD::FNEARBYINT_MERGE_PASSTHRU , llvm::AArch64ISD::FNEG_MERGE_PASSTHRU ,
  llvm::AArch64ISD::FRECPX_MERGE_PASSTHRU , llvm::AArch64ISD::FRINT_MERGE_PASSTHRU , llvm::AArch64ISD::FROUND_MERGE_PASSTHRU , llvm::AArch64ISD::FROUNDEVEN_MERGE_PASSTHRU ,
  llvm::AArch64ISD::FSQRT_MERGE_PASSTHRU , llvm::AArch64ISD::FTRUNC_MERGE_PASSTHRU , llvm::AArch64ISD::FP_ROUND_MERGE_PASSTHRU , llvm::AArch64ISD::FP_EXTEND_MERGE_PASSTHRU ,
  llvm::AArch64ISD::UINT_TO_FP_MERGE_PASSTHRU , llvm::AArch64ISD::SINT_TO_FP_MERGE_PASSTHRU , llvm::AArch64ISD::FCVTZU_MERGE_PASSTHRU , llvm::AArch64ISD::FCVTZS_MERGE_PASSTHRU ,
  llvm::AArch64ISD::SIGN_EXTEND_INREG_MERGE_PASSTHRU , llvm::AArch64ISD::ZERO_EXTEND_INREG_MERGE_PASSTHRU , llvm::AArch64ISD::ABS_MERGE_PASSTHRU , llvm::AArch64ISD::NEG_MERGE_PASSTHRU ,
  llvm::AArch64ISD::SETCC_MERGE_ZERO , llvm::AArch64ISD::ADDS , llvm::AArch64ISD::SUBS , llvm::AArch64ISD::ADCS ,
  llvm::AArch64ISD::SBCS , llvm::AArch64ISD::ANDS , llvm::AArch64ISD::CCMP , llvm::AArch64ISD::CCMN ,
  llvm::AArch64ISD::FCCMP , llvm::AArch64ISD::FCMP , llvm::AArch64ISD::DUP , llvm::AArch64ISD::DUPLANE8 ,
  llvm::AArch64ISD::DUPLANE16 , llvm::AArch64ISD::DUPLANE32 , llvm::AArch64ISD::DUPLANE64 , llvm::AArch64ISD::DUPLANE128 ,
  llvm::AArch64ISD::MOVI , llvm::AArch64ISD::MOVIshift , llvm::AArch64ISD::MOVIedit , llvm::AArch64ISD::MOVImsl ,
  llvm::AArch64ISD::FMOV , llvm::AArch64ISD::MVNIshift , llvm::AArch64ISD::MVNImsl , llvm::AArch64ISD::BICi ,
  llvm::AArch64ISD::ORRi , llvm::AArch64ISD::BSP , llvm::AArch64ISD::ZIP1 , llvm::AArch64ISD::ZIP2 ,
  llvm::AArch64ISD::UZP1 , llvm::AArch64ISD::UZP2 , llvm::AArch64ISD::TRN1 , llvm::AArch64ISD::TRN2 ,
  llvm::AArch64ISD::REV16 , llvm::AArch64ISD::REV32 , llvm::AArch64ISD::REV64 , llvm::AArch64ISD::EXT ,
  llvm::AArch64ISD::SPLICE , llvm::AArch64ISD::VSHL , llvm::AArch64ISD::VLSHR , llvm::AArch64ISD::VASHR ,
  llvm::AArch64ISD::SQSHL_I , llvm::AArch64ISD::UQSHL_I , llvm::AArch64ISD::SQSHLU_I , llvm::AArch64ISD::SRSHR_I ,
  llvm::AArch64ISD::URSHR_I , llvm::AArch64ISD::URSHR_I_PRED , llvm::AArch64ISD::RSHRNB_I , llvm::AArch64ISD::VSLI ,
  llvm::AArch64ISD::VSRI , llvm::AArch64ISD::CMEQ , llvm::AArch64ISD::CMGE , llvm::AArch64ISD::CMGT ,
  llvm::AArch64ISD::CMHI , llvm::AArch64ISD::CMHS , llvm::AArch64ISD::FCMEQ , llvm::AArch64ISD::FCMGE ,
  llvm::AArch64ISD::FCMGT , llvm::AArch64ISD::CMEQz , llvm::AArch64ISD::CMGEz , llvm::AArch64ISD::CMGTz ,
  llvm::AArch64ISD::CMLEz , llvm::AArch64ISD::CMLTz , llvm::AArch64ISD::FCMEQz , llvm::AArch64ISD::FCMGEz ,
  llvm::AArch64ISD::FCMGTz , llvm::AArch64ISD::FCMLEz , llvm::AArch64ISD::FCMLTz , llvm::AArch64ISD::FCVTXN ,
  llvm::AArch64ISD::SADDV , llvm::AArch64ISD::UADDV , llvm::AArch64ISD::UADDLV , llvm::AArch64ISD::SADDLV ,
  llvm::AArch64ISD::ADDP , llvm::AArch64ISD::SADDLP , llvm::AArch64ISD::UADDLP , llvm::AArch64ISD::UDOT ,
  llvm::AArch64ISD::SDOT , llvm::AArch64ISD::SMINV , llvm::AArch64ISD::UMINV , llvm::AArch64ISD::SMAXV ,
  llvm::AArch64ISD::UMAXV , llvm::AArch64ISD::SADDV_PRED , llvm::AArch64ISD::UADDV_PRED , llvm::AArch64ISD::SMAXV_PRED ,
  llvm::AArch64ISD::UMAXV_PRED , llvm::AArch64ISD::SMINV_PRED , llvm::AArch64ISD::UMINV_PRED , llvm::AArch64ISD::ORV_PRED ,
  llvm::AArch64ISD::EORV_PRED , llvm::AArch64ISD::ANDV_PRED , llvm::AArch64ISD::CBZ , llvm::AArch64ISD::CBNZ ,
  llvm::AArch64ISD::TBZ , llvm::AArch64ISD::TBNZ , llvm::AArch64ISD::TC_RETURN , llvm::AArch64ISD::PREFETCH ,
  llvm::AArch64ISD::SITOF , llvm::AArch64ISD::UITOF , llvm::AArch64ISD::NVCAST , llvm::AArch64ISD::MRS ,
  llvm::AArch64ISD::SMULL , llvm::AArch64ISD::UMULL , llvm::AArch64ISD::PMULL , llvm::AArch64ISD::FRECPE ,
  llvm::AArch64ISD::FRECPS , llvm::AArch64ISD::FRSQRTE , llvm::AArch64ISD::FRSQRTS , llvm::AArch64ISD::SUNPKHI ,
  llvm::AArch64ISD::SUNPKLO , llvm::AArch64ISD::UUNPKHI , llvm::AArch64ISD::UUNPKLO , llvm::AArch64ISD::CLASTA_N ,
  llvm::AArch64ISD::CLASTB_N , llvm::AArch64ISD::LASTA , llvm::AArch64ISD::LASTB , llvm::AArch64ISD::TBL ,
  llvm::AArch64ISD::FADDA_PRED , llvm::AArch64ISD::FADDV_PRED , llvm::AArch64ISD::FMAXV_PRED , llvm::AArch64ISD::FMAXNMV_PRED ,
  llvm::AArch64ISD::FMINV_PRED , llvm::AArch64ISD::FMINNMV_PRED , llvm::AArch64ISD::INSR , llvm::AArch64ISD::PTEST ,
  llvm::AArch64ISD::PTEST_ANY , llvm::AArch64ISD::PTRUE , llvm::AArch64ISD::CTTZ_ELTS , llvm::AArch64ISD::BITREVERSE_MERGE_PASSTHRU ,
  llvm::AArch64ISD::BSWAP_MERGE_PASSTHRU , llvm::AArch64ISD::REVH_MERGE_PASSTHRU , llvm::AArch64ISD::REVW_MERGE_PASSTHRU , llvm::AArch64ISD::CTLZ_MERGE_PASSTHRU ,
  llvm::AArch64ISD::CTPOP_MERGE_PASSTHRU , llvm::AArch64ISD::DUP_MERGE_PASSTHRU , llvm::AArch64ISD::INDEX_VECTOR , llvm::AArch64ISD::REINTERPRET_CAST ,
  llvm::AArch64ISD::LS64_BUILD , llvm::AArch64ISD::LS64_EXTRACT , llvm::AArch64ISD::LD1_MERGE_ZERO , llvm::AArch64ISD::LD1S_MERGE_ZERO ,
  llvm::AArch64ISD::LDNF1_MERGE_ZERO , llvm::AArch64ISD::LDNF1S_MERGE_ZERO , llvm::AArch64ISD::LDFF1_MERGE_ZERO , llvm::AArch64ISD::LDFF1S_MERGE_ZERO ,
  llvm::AArch64ISD::LD1RQ_MERGE_ZERO , llvm::AArch64ISD::LD1RO_MERGE_ZERO , llvm::AArch64ISD::SVE_LD2_MERGE_ZERO , llvm::AArch64ISD::SVE_LD3_MERGE_ZERO ,
  llvm::AArch64ISD::SVE_LD4_MERGE_ZERO , llvm::AArch64ISD::GLD1_MERGE_ZERO , llvm::AArch64ISD::GLD1_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLD1_UXTW_MERGE_ZERO ,
  llvm::AArch64ISD::GLD1_SXTW_MERGE_ZERO , llvm::AArch64ISD::GLD1_UXTW_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLD1_SXTW_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLD1_IMM_MERGE_ZERO ,
  llvm::AArch64ISD::GLD1Q_MERGE_ZERO , llvm::AArch64ISD::GLD1Q_INDEX_MERGE_ZERO , llvm::AArch64ISD::GLD1S_MERGE_ZERO , llvm::AArch64ISD::GLD1S_SCALED_MERGE_ZERO ,
  llvm::AArch64ISD::GLD1S_UXTW_MERGE_ZERO , llvm::AArch64ISD::GLD1S_SXTW_MERGE_ZERO , llvm::AArch64ISD::GLD1S_UXTW_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLD1S_SXTW_SCALED_MERGE_ZERO ,
  llvm::AArch64ISD::GLD1S_IMM_MERGE_ZERO , llvm::AArch64ISD::GLDFF1_MERGE_ZERO , llvm::AArch64ISD::GLDFF1_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLDFF1_UXTW_MERGE_ZERO ,
  llvm::AArch64ISD::GLDFF1_SXTW_MERGE_ZERO , llvm::AArch64ISD::GLDFF1_UXTW_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLDFF1_SXTW_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLDFF1_IMM_MERGE_ZERO ,
  llvm::AArch64ISD::GLDFF1S_MERGE_ZERO , llvm::AArch64ISD::GLDFF1S_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLDFF1S_UXTW_MERGE_ZERO , llvm::AArch64ISD::GLDFF1S_SXTW_MERGE_ZERO ,
  llvm::AArch64ISD::GLDFF1S_UXTW_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLDFF1S_SXTW_SCALED_MERGE_ZERO , llvm::AArch64ISD::GLDFF1S_IMM_MERGE_ZERO , llvm::AArch64ISD::GLDNT1_MERGE_ZERO ,
  llvm::AArch64ISD::GLDNT1_INDEX_MERGE_ZERO , llvm::AArch64ISD::GLDNT1S_MERGE_ZERO , llvm::AArch64ISD::ST1_PRED , llvm::AArch64ISD::SST1_PRED ,
  llvm::AArch64ISD::SST1_SCALED_PRED , llvm::AArch64ISD::SST1_UXTW_PRED , llvm::AArch64ISD::SST1_SXTW_PRED , llvm::AArch64ISD::SST1_UXTW_SCALED_PRED ,
  llvm::AArch64ISD::SST1_SXTW_SCALED_PRED , llvm::AArch64ISD::SST1_IMM_PRED , llvm::AArch64ISD::SST1Q_PRED , llvm::AArch64ISD::SST1Q_INDEX_PRED ,
  llvm::AArch64ISD::SSTNT1_PRED , llvm::AArch64ISD::SSTNT1_INDEX_PRED , llvm::AArch64ISD::RDSVL , llvm::AArch64ISD::REVD_MERGE_PASSTHRU ,
  llvm::AArch64ISD::ASSERT_ZEXT_BOOL , llvm::AArch64ISD::MRRS , llvm::AArch64ISD::MSRR , llvm::AArch64ISD::STRICT_FCMP = ISD::FIRST_TARGET_STRICTFP_OPCODE ,
  llvm::AArch64ISD::STRICT_FCMPE , llvm::AArch64ISD::SME_ZA_LDR , llvm::AArch64ISD::SME_ZA_STR , llvm::AArch64ISD::LD2post = ISD::FIRST_TARGET_MEMORY_OPCODE ,
  llvm::AArch64ISD::LD3post , llvm::AArch64ISD::LD4post , llvm::AArch64ISD::ST2post , llvm::AArch64ISD::ST3post ,
  llvm::AArch64ISD::ST4post , llvm::AArch64ISD::LD1x2post , llvm::AArch64ISD::LD1x3post , llvm::AArch64ISD::LD1x4post ,
  llvm::AArch64ISD::ST1x2post , llvm::AArch64ISD::ST1x3post , llvm::AArch64ISD::ST1x4post , llvm::AArch64ISD::LD1DUPpost ,
  llvm::AArch64ISD::LD2DUPpost , llvm::AArch64ISD::LD3DUPpost , llvm::AArch64ISD::LD4DUPpost , llvm::AArch64ISD::LD1LANEpost ,
  llvm::AArch64ISD::LD2LANEpost , llvm::AArch64ISD::LD3LANEpost , llvm::AArch64ISD::LD4LANEpost , llvm::AArch64ISD::ST2LANEpost ,
  llvm::AArch64ISD::ST3LANEpost , llvm::AArch64ISD::ST4LANEpost , llvm::AArch64ISD::STG , llvm::AArch64ISD::STZG ,
  llvm::AArch64ISD::ST2G , llvm::AArch64ISD::STZ2G , llvm::AArch64ISD::LDP , llvm::AArch64ISD::LDIAPP ,
  llvm::AArch64ISD::LDNP , llvm::AArch64ISD::STP , llvm::AArch64ISD::STILP , llvm::AArch64ISD::STNP ,
  llvm::AArch64ISD::MOPS_MEMSET , llvm::AArch64ISD::MOPS_MEMSET_TAGGING , llvm::AArch64ISD::MOPS_MEMCOPY , llvm::AArch64ISD::MOPS_MEMMOVE
}
 
enum  llvm::AArch64::Rounding {
  llvm::AArch64::RN = 0 , llvm::AArch64::RP = 1 , llvm::AArch64::RM = 2 , llvm::AArch64::RZ = 3 ,
  llvm::AArch64::rmMask = 3
}
 Possible values of current rounding mode, which is specified in bits 23:22 of FPCR. More...
 

Functions

ArrayRef< MCPhysRegllvm::AArch64::getGPRArgRegs ()
 
ArrayRef< MCPhysRegllvm::AArch64::getFPRArgRegs ()
 
FastISelllvm::AArch64::createFastISel (FunctionLoweringInfo &funcInfo, const TargetLibraryInfo *libInfo)
 

Variables

const unsigned llvm::AArch64::RoundingBitsPos = 22
 
const uint64_t llvm::AArch64::ReservedFPControlBits = 0xfffffffff80040f8
 
const unsigned llvm::AArch64::StackProbeMaxUnprobedStack = 1024
 Maximum allowed number of unprobed bytes above SP at an ABI boundary.
 
const unsigned llvm::AArch64::StackProbeMaxLoopUnroll = 4
 Maximum number of iterations to unroll for a constant size probing loop.