47                                      RISCV::F13_H, RISCV::F14_H, RISCV::F15_H,
 
   48                                      RISCV::F16_H, RISCV::F17_H};
 
 
   50                                      RISCV::F13_F, RISCV::F14_F, RISCV::F15_F,
 
   51                                      RISCV::F16_F, RISCV::F17_F};
 
 
   53                                      RISCV::F13_D, RISCV::F14_D, RISCV::F15_D,
 
   54                                      RISCV::F16_D, RISCV::F17_D};
 
 
   57    RISCV::V8,  RISCV::V9,  RISCV::V10, RISCV::V11, RISCV::V12, RISCV::V13,
 
   58    RISCV::V14, RISCV::V15, RISCV::V16, RISCV::V17, RISCV::V18, RISCV::V19,
 
   59    RISCV::V20, RISCV::V21, RISCV::V22, RISCV::V23};
 
 
   61                                     RISCV::V14M2, RISCV::V16M2, RISCV::V18M2,
 
   62                                     RISCV::V20M2, RISCV::V22M2};
 
 
   67    RISCV::V8_V9,   RISCV::V9_V10,  RISCV::V10_V11, RISCV::V11_V12,
 
   68    RISCV::V12_V13, RISCV::V13_V14, RISCV::V14_V15, RISCV::V15_V16,
 
   69    RISCV::V16_V17, RISCV::V17_V18, RISCV::V18_V19, RISCV::V19_V20,
 
   70    RISCV::V20_V21, RISCV::V21_V22, RISCV::V22_V23};
 
 
   72    RISCV::V8_V9_V10,   RISCV::V9_V10_V11,  RISCV::V10_V11_V12,
 
   73    RISCV::V11_V12_V13, RISCV::V12_V13_V14, RISCV::V13_V14_V15,
 
   74    RISCV::V14_V15_V16, RISCV::V15_V16_V17, RISCV::V16_V17_V18,
 
   75    RISCV::V17_V18_V19, RISCV::V18_V19_V20, RISCV::V19_V20_V21,
 
   76    RISCV::V20_V21_V22, RISCV::V21_V22_V23};
 
 
   78    RISCV::V8_V9_V10_V11,   RISCV::V9_V10_V11_V12,  RISCV::V10_V11_V12_V13,
 
   79    RISCV::V11_V12_V13_V14, RISCV::V12_V13_V14_V15, RISCV::V13_V14_V15_V16,
 
   80    RISCV::V14_V15_V16_V17, RISCV::V15_V16_V17_V18, RISCV::V16_V17_V18_V19,
 
   81    RISCV::V17_V18_V19_V20, RISCV::V18_V19_V20_V21, RISCV::V19_V20_V21_V22,
 
   82    RISCV::V20_V21_V22_V23};
 
 
   84    RISCV::V8_V9_V10_V11_V12,   RISCV::V9_V10_V11_V12_V13,
 
   85    RISCV::V10_V11_V12_V13_V14, RISCV::V11_V12_V13_V14_V15,
 
   86    RISCV::V12_V13_V14_V15_V16, RISCV::V13_V14_V15_V16_V17,
 
   87    RISCV::V14_V15_V16_V17_V18, RISCV::V15_V16_V17_V18_V19,
 
   88    RISCV::V16_V17_V18_V19_V20, RISCV::V17_V18_V19_V20_V21,
 
   89    RISCV::V18_V19_V20_V21_V22, RISCV::V19_V20_V21_V22_V23};
 
 
   91    RISCV::V8_V9_V10_V11_V12_V13,   RISCV::V9_V10_V11_V12_V13_V14,
 
   92    RISCV::V10_V11_V12_V13_V14_V15, RISCV::V11_V12_V13_V14_V15_V16,
 
   93    RISCV::V12_V13_V14_V15_V16_V17, RISCV::V13_V14_V15_V16_V17_V18,
 
   94    RISCV::V14_V15_V16_V17_V18_V19, RISCV::V15_V16_V17_V18_V19_V20,
 
   95    RISCV::V16_V17_V18_V19_V20_V21, RISCV::V17_V18_V19_V20_V21_V22,
 
   96    RISCV::V18_V19_V20_V21_V22_V23};
 
 
   98    RISCV::V8_V9_V10_V11_V12_V13_V14,   RISCV::V9_V10_V11_V12_V13_V14_V15,
 
   99    RISCV::V10_V11_V12_V13_V14_V15_V16, RISCV::V11_V12_V13_V14_V15_V16_V17,
 
  100    RISCV::V12_V13_V14_V15_V16_V17_V18, RISCV::V13_V14_V15_V16_V17_V18_V19,
 
  101    RISCV::V14_V15_V16_V17_V18_V19_V20, RISCV::V15_V16_V17_V18_V19_V20_V21,
 
  102    RISCV::V16_V17_V18_V19_V20_V21_V22, RISCV::V17_V18_V19_V20_V21_V22_V23};
 
 
  104                                       RISCV::V9_V10_V11_V12_V13_V14_V15_V16,
 
  105                                       RISCV::V10_V11_V12_V13_V14_V15_V16_V17,
 
  106                                       RISCV::V11_V12_V13_V14_V15_V16_V17_V18,
 
  107                                       RISCV::V12_V13_V14_V15_V16_V17_V18_V19,
 
  108                                       RISCV::V13_V14_V15_V16_V17_V18_V19_V20,
 
  109                                       RISCV::V14_V15_V16_V17_V18_V19_V20_V21,
 
  110                                       RISCV::V15_V16_V17_V18_V19_V20_V21_V22,
 
  111                                       RISCV::V16_V17_V18_V19_V20_V21_V22_V23};
 
 
  113                                       RISCV::V12M2_V14M2, RISCV::V14M2_V16M2,
 
  114                                       RISCV::V16M2_V18M2, RISCV::V18M2_V20M2,
 
 
  117    RISCV::V8M2_V10M2_V12M2,  RISCV::V10M2_V12M2_V14M2,
 
  118    RISCV::V12M2_V14M2_V16M2, RISCV::V14M2_V16M2_V18M2,
 
  119    RISCV::V16M2_V18M2_V20M2, RISCV::V18M2_V20M2_V22M2};
 
 
  121    RISCV::V8M2_V10M2_V12M2_V14M2, RISCV::V10M2_V12M2_V14M2_V16M2,
 
  122    RISCV::V12M2_V14M2_V16M2_V18M2, RISCV::V14M2_V16M2_V18M2_V20M2,
 
  123    RISCV::V16M2_V18M2_V20M2_V22M2};
 
 
  130  static const MCPhysReg ArgIGPRs[] = {RISCV::X10, RISCV::X11, RISCV::X12,
 
  131                                       RISCV::X13, RISCV::X14, RISCV::X15,
 
  132                                       RISCV::X16, RISCV::X17};
 
  134  static const MCPhysReg ArgEGPRs[] = {RISCV::X10, RISCV::X11, RISCV::X12,
 
  135                                       RISCV::X13, RISCV::X14, RISCV::X15};
 
 
  146  static const MCPhysReg ArgIGPRs[] = {RISCV::X10_H, RISCV::X11_H, RISCV::X12_H,
 
  147                                       RISCV::X13_H, RISCV::X14_H, RISCV::X15_H,
 
  148                                       RISCV::X16_H, RISCV::X17_H};
 
  150  static const MCPhysReg ArgEGPRs[] = {RISCV::X10_H, RISCV::X11_H,
 
  151                                       RISCV::X12_H, RISCV::X13_H,
 
  152                                       RISCV::X14_H, RISCV::X15_H};
 
 
  163  static const MCPhysReg ArgIGPRs[] = {RISCV::X10_W, RISCV::X11_W, RISCV::X12_W,
 
  164                                       RISCV::X13_W, RISCV::X14_W, RISCV::X15_W,
 
  165                                       RISCV::X16_W, RISCV::X17_W};
 
  167  static const MCPhysReg ArgEGPRs[] = {RISCV::X10_W, RISCV::X11_W,
 
  168                                       RISCV::X12_W, RISCV::X13_W,
 
  169                                       RISCV::X14_W, RISCV::X15_W};
 
 
  182      RISCV::X10, RISCV::X11, RISCV::X12, RISCV::X13, RISCV::X14, RISCV::X15,
 
  183      RISCV::X16, RISCV::X17, RISCV::X28, RISCV::X29, RISCV::X30, RISCV::X31};
 
  186  static const MCPhysReg FastCCEGPRs[] = {RISCV::X10, RISCV::X11, RISCV::X12,
 
  187                                          RISCV::X13, RISCV::X14, RISCV::X15};
 
 
  200      RISCV::X10_H, RISCV::X11_H, RISCV::X12_H, RISCV::X13_H,
 
  201      RISCV::X14_H, RISCV::X15_H, RISCV::X16_H, RISCV::X17_H,
 
  202      RISCV::X28_H, RISCV::X29_H, RISCV::X30_H, RISCV::X31_H};
 
  205  static const MCPhysReg FastCCEGPRs[] = {RISCV::X10_H, RISCV::X11_H,
 
  206                                          RISCV::X12_H, RISCV::X13_H,
 
  207                                          RISCV::X14_H, RISCV::X15_H};
 
 
  220      RISCV::X10_W, RISCV::X11_W, RISCV::X12_W, RISCV::X13_W,
 
  221      RISCV::X14_W, RISCV::X15_W, RISCV::X16_W, RISCV::X17_W,
 
  222      RISCV::X28_W, RISCV::X29_W, RISCV::X30_W, RISCV::X31_W};
 
  225  static const MCPhysReg FastCCEGPRs[] = {RISCV::X10_W, RISCV::X11_W,
 
  226                                          RISCV::X12_W, RISCV::X13_W,
 
  227                                          RISCV::X14_W, RISCV::X15_W};
 
 
  241  unsigned XLenInBytes = XLen / 8;
 
  254    Align StackAlign(XLenInBytes);
 
  255    if (!
EABI || XLen != 32)
 
  259                            State.AllocateStack(XLenInBytes, StackAlign),
 
  262        ValNo2, ValVT2, State.AllocateStack(XLenInBytes, 
Align(XLenInBytes)),
 
  274        ValNo2, ValVT2, State.AllocateStack(XLenInBytes, 
Align(XLenInBytes)),
 
 
  284  if (RC == &RISCV::VRRegClass) {
 
  291    return State.AllocateReg(
ArgVRs);
 
  293  if (RC == &RISCV::VRM2RegClass)
 
  295  if (RC == &RISCV::VRM4RegClass)
 
  297  if (RC == &RISCV::VRM8RegClass)
 
  299  if (RC == &RISCV::VRN2M1RegClass)
 
  301  if (RC == &RISCV::VRN3M1RegClass)
 
  303  if (RC == &RISCV::VRN4M1RegClass)
 
  305  if (RC == &RISCV::VRN5M1RegClass)
 
  307  if (RC == &RISCV::VRN6M1RegClass)
 
  309  if (RC == &RISCV::VRN7M1RegClass)
 
  311  if (RC == &RISCV::VRN8M1RegClass)
 
  313  if (RC == &RISCV::VRN2M2RegClass)
 
  315  if (RC == &RISCV::VRN3M2RegClass)
 
  317  if (RC == &RISCV::VRN4M2RegClass)
 
  319  if (RC == &RISCV::VRN2M4RegClass)
 
 
  333  unsigned XLen = Subtarget.
getXLen();
 
  341        Subtarget.hasStdExtZicfilp() &&
 
  345    const auto StaticChainReg = HasCFBranch ? RISCV::X28 : RISCV::X7;
 
  351          "Nested functions with control flow protection are not " 
  352          "usable with ILP32E or LP64E ABI.");
 
  353    if (
MCRegister Reg = State.AllocateReg(StaticChainReg)) {
 
  361  if (!LocVT.
isVector() && IsRet && ValNo > 1)
 
  366  bool UseGPRForF16_F32 = 
true;
 
  369  bool UseGPRForF64 = 
true;
 
  382    UseGPRForF16_F32 = ArgFlags.
isVarArg();
 
  386    UseGPRForF16_F32 = ArgFlags.
isVarArg();
 
  391  if ((LocVT == MVT::f16 || LocVT == MVT::bf16) && !UseGPRForF16_F32) {
 
  398  if (LocVT == MVT::f32 && !UseGPRForF16_F32) {
 
  405  if (LocVT == MVT::f64 && !UseGPRForF64) {
 
  412  if ((ValVT == MVT::f16 && Subtarget.hasStdExtZhinxmin())) {
 
  419  if (ValVT == MVT::f32 && Subtarget.hasStdExtZfinx()) {
 
  429  if (LocVT == MVT::f64 && XLen == 64 && Subtarget.hasStdExtZdinx()) {
 
  437  if (LocVT == MVT::f16 || LocVT == MVT::bf16 ||
 
  438      (LocVT == MVT::f32 && XLen == 64)) {
 
  448  if ((XLen == 32 && LocVT == MVT::f32) || (XLen == 64 && LocVT == MVT::f64)) {
 
  467  unsigned TwoXLenInBytes = (2 * XLen) / 8;
 
  469      DL.getTypeAllocSize(OrigTy) == TwoXLenInBytes &&
 
  471    unsigned RegIdx = State.getFirstUnallocated(
ArgGPRs);
 
  473    if (RegIdx != std::size(
ArgGPRs) && RegIdx % 2 == 1)
 
  479      State.getPendingArgFlags();
 
  482         "PendingLocs and PendingArgFlags out of sync");
 
  486  if (XLen == 32 && LocVT == MVT::f64) {
 
  487    assert(PendingLocs.
empty() && 
"Can't lower f64 if it is split");
 
  530      PendingLocs.
size() <= 2) {
 
  531    assert(PendingLocs.
size() == 2 && 
"Unexpected PendingLocs.size()");
 
  537    PendingArgFlags.
clear();
 
  539        XLen, State, VA, AF, ValNo, ValVT, LocVT, ArgFlags,
 
  545  unsigned StoreSizeBytes = XLen / 8;
 
  554        LocVT = TLI.getContainerForFixedLengthVector(LocVT);
 
  567      if ((Reg = State.AllocateReg(
ArgGPRs))) {
 
  581    Reg = State.AllocateReg(
ArgGPRs);
 
  585      Reg ? 0 : State.AllocateStack(StoreSizeBytes, StackAlign);
 
  589  if (!PendingLocs.
empty()) {
 
  591    assert(PendingLocs.
size() > 2 && 
"Unexpected PendingLocs.size()");
 
  593    for (
auto &It : PendingLocs) {
 
  595        It.convertToReg(Reg);
 
  601    PendingArgFlags.
clear();
 
  606          (TLI.getSubtarget().hasVInstructions() &&
 
  608         "Expected an XLenVT or vector types at this stage");
 
 
  630  if ((LocVT == MVT::f16 && Subtarget.hasStdExtZfhmin()) ||
 
  631      (LocVT == MVT::bf16 && Subtarget.hasStdExtZfbfmin())) {
 
  633        RISCV::F10_H, RISCV::F11_H, RISCV::F12_H, RISCV::F13_H, RISCV::F14_H,
 
  634        RISCV::F15_H, RISCV::F16_H, RISCV::F17_H, RISCV::F0_H,  RISCV::F1_H,
 
  635        RISCV::F2_H,  RISCV::F3_H,  RISCV::F4_H,  RISCV::F5_H,  RISCV::F6_H,
 
  636        RISCV::F7_H,  RISCV::F28_H, RISCV::F29_H, RISCV::F30_H, RISCV::F31_H};
 
  637    if (
MCRegister Reg = State.AllocateReg(FPR16List)) {
 
  643  if (LocVT == MVT::f32 && Subtarget.hasStdExtF()) {
 
  645        RISCV::F10_F, RISCV::F11_F, RISCV::F12_F, RISCV::F13_F, RISCV::F14_F,
 
  646        RISCV::F15_F, RISCV::F16_F, RISCV::F17_F, RISCV::F0_F,  RISCV::F1_F,
 
  647        RISCV::F2_F,  RISCV::F3_F,  RISCV::F4_F,  RISCV::F5_F,  RISCV::F6_F,
 
  648        RISCV::F7_F,  RISCV::F28_F, RISCV::F29_F, RISCV::F30_F, RISCV::F31_F};
 
  649    if (
MCRegister Reg = State.AllocateReg(FPR32List)) {
 
  655  if (LocVT == MVT::f64 && Subtarget.hasStdExtD()) {
 
  657        RISCV::F10_D, RISCV::F11_D, RISCV::F12_D, RISCV::F13_D, RISCV::F14_D,
 
  658        RISCV::F15_D, RISCV::F16_D, RISCV::F17_D, RISCV::F0_D,  RISCV::F1_D,
 
  659        RISCV::F2_D,  RISCV::F3_D,  RISCV::F4_D,  RISCV::F5_D,  RISCV::F6_D,
 
  660        RISCV::F7_D,  RISCV::F28_D, RISCV::F29_D, RISCV::F30_D, RISCV::F31_D};
 
  661    if (
MCRegister Reg = State.AllocateReg(FPR64List)) {
 
  670  if ((LocVT == MVT::f16 && Subtarget.hasStdExtZhinxmin())) {
 
  678  if (LocVT == MVT::f32 && Subtarget.hasStdExtZfinx()) {
 
  686  if (LocVT == MVT::f64 && Subtarget.
is64Bit() && Subtarget.hasStdExtZdinx()) {
 
  706        LocVT = TLI.getContainerForFixedLengthVector(LocVT);
 
  724  if (LocVT == XLenVT) {
 
  731  if (LocVT == XLenVT || LocVT == MVT::f16 || LocVT == MVT::bf16 ||
 
 
  747        "Attribute 'nest' is not supported in GHC calling convention");
 
  751      RISCV::X9,  RISCV::X18, RISCV::X19, RISCV::X20, RISCV::X21, RISCV::X22,
 
  752      RISCV::X23, RISCV::X24, RISCV::X25, RISCV::X26, RISCV::X27};
 
  754  if (LocVT == MVT::i32 || LocVT == MVT::i64) {
 
  757    if (
MCRegister Reg = State.AllocateReg(GPRList)) {
 
  766  if (LocVT == MVT::f32 && Subtarget.hasStdExtF()) {
 
  769    static const MCPhysReg FPR32List[] = {RISCV::F8_F,  RISCV::F9_F,
 
  770                                          RISCV::F18_F, RISCV::F19_F,
 
  771                                          RISCV::F20_F, RISCV::F21_F};
 
  772    if (
MCRegister Reg = State.AllocateReg(FPR32List)) {
 
  778  if (LocVT == MVT::f64 && Subtarget.hasStdExtD()) {
 
  781    static const MCPhysReg FPR64List[] = {RISCV::F22_D, RISCV::F23_D,
 
  782                                          RISCV::F24_D, RISCV::F25_D,
 
  783                                          RISCV::F26_D, RISCV::F27_D};
 
  784    if (
MCRegister Reg = State.AllocateReg(FPR64List)) {
 
  790  if (LocVT == MVT::f32 && Subtarget.hasStdExtZfinx()) {
 
  792        RISCV::X9_W,  RISCV::X18_W, RISCV::X19_W, RISCV::X20_W,
 
  793        RISCV::X21_W, RISCV::X22_W, RISCV::X23_W, RISCV::X24_W,
 
  794        RISCV::X25_W, RISCV::X26_W, RISCV::X27_W};
 
  795    if (
MCRegister Reg = State.AllocateReg(GPR32List)) {
 
  801  if (LocVT == MVT::f64 && Subtarget.hasStdExtZdinx() && Subtarget.
is64Bit()) {
 
  802    if (
MCRegister Reg = State.AllocateReg(GPRList)) {
 
 
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
 
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
 
Module.h This file contains the declarations for the Module class.
 
const MCPhysReg ArgFPR32s[]
 
const MCPhysReg ArgFPR64s[]
 
const MCPhysReg ArgGPRs[]
 
static bool CC_RISCVAssign2XLen(unsigned XLen, CCState &State, CCValAssign VA1, ISD::ArgFlagsTy ArgFlags1, unsigned ValNo2, MVT ValVT2, MVT LocVT2, ISD::ArgFlagsTy ArgFlags2, bool EABI)
 
static const MCPhysReg ArgVRN2M2s[]
 
static const MCPhysReg ArgVRM2s[]
 
static MCRegister allocateRVVReg(MVT ValVT, unsigned ValNo, CCState &State, const RISCVTargetLowering &TLI)
 
static const MCPhysReg ArgVRN3M2s[]
 
static const MCPhysReg ArgVRN4M1s[]
 
static const MCPhysReg ArgVRN6M1s[]
 
static ArrayRef< MCPhysReg > getFastCCArgGPRF32s(const RISCVABI::ABI ABI)
 
static const MCPhysReg ArgVRN4M2s[]
 
static const MCPhysReg ArgVRN3M1s[]
 
static const MCPhysReg ArgVRN7M1s[]
 
static const MCPhysReg ArgVRN5M1s[]
 
static const MCPhysReg ArgVRN2M4s[]
 
static ArrayRef< MCPhysReg > getFastCCArgGPRF16s(const RISCVABI::ABI ABI)
 
static ArrayRef< MCPhysReg > getArgGPR32s(const RISCVABI::ABI ABI)
 
static const MCPhysReg ArgVRN2M1s[]
 
static const MCPhysReg ArgVRN8M1s[]
 
static ArrayRef< MCPhysReg > getArgGPR16s(const RISCVABI::ABI ABI)
 
static ArrayRef< MCPhysReg > getFastCCArgGPRs(const RISCVABI::ABI ABI)
 
static const MCPhysReg ArgVRM8s[]
 
static const MCPhysReg ArgVRM4s[]
 
static const MCPhysReg ArgFPR16s[]
 
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
 
CCState - This class holds information needed while lowering arguments and return values.
 
CCValAssign - Represent assignment of one arg/retval to a location.
 
static CCValAssign getPending(unsigned ValNo, MVT ValVT, MVT LocVT, LocInfo HTP, unsigned ExtraInfo=0)
 
static CCValAssign getReg(unsigned ValNo, MVT ValVT, MCRegister Reg, MVT LocVT, LocInfo HTP, bool IsCustom=false)
 
static CCValAssign getCustomReg(unsigned ValNo, MVT ValVT, MCRegister Reg, MVT LocVT, LocInfo HTP)
 
static CCValAssign getMem(unsigned ValNo, MVT ValVT, int64_t Offset, MVT LocVT, LocInfo HTP, bool IsCustom=false)
 
unsigned getValNo() const
 
static CCValAssign getCustomMem(unsigned ValNo, MVT ValVT, int64_t Offset, MVT LocVT, LocInfo HTP)
 
A parsed version of the target data layout string in and methods for querying it.
 
Module * getParent()
Get the module that this global value is contained inside of...
 
Wrapper class representing physical registers. Should be passed by value.
 
bool isRISCVVectorTuple() const
Return true if this is a RISCV vector tuple type where the runtime length is machine dependent.
 
uint64_t getScalarSizeInBits() const
 
bool isVector() const
Return true if this is a vector value type.
 
bool isScalableVector() const
Return true if this is a vector value type where the runtime length is machine dependent.
 
TypeSize getSizeInBits() const
Returns the size of the specified MVT in bits.
 
bool isFixedLengthVector() const
 
TypeSize getStoreSize() const
Return the number of bytes overwritten by a store of the specified value type.
 
bool isScalarInteger() const
Return true if this is an integer, not including vectors.
 
MVT getVectorElementType() const
 
bool isFloatingPoint() const
Return true if this is a FP or a vector FP type.
 
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
 
const DataLayout & getDataLayout() const
Return the DataLayout attached to the Module associated to this MF.
 
Function & getFunction()
Return the LLVM function that this machine code represents.
 
Metadata * getModuleFlag(StringRef Key) const
Return the corresponding value if Key appears in module flags, otherwise return null.
 
RISCVABI::ABI getTargetABI() const
 
const RISCVTargetLowering * getTargetLowering() const override
 
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
 
void push_back(const T &Elt)
 
StackOffset holds a fixed and a scalable offset in bytes.
 
virtual const TargetRegisterClass * getRegClassFor(MVT VT, bool isDivergent=false) const
Return the register class that should be used for the specified value type.
 
The instances of the Type class are immutable: once they are created, they are never changed.
 
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
 
ArrayRef< MCPhysReg > getArgGPRs(const RISCVABI::ABI ABI)
 
This is an optimization pass for GlobalISel generic memory operations.
 
bool CC_RISCV_FastCC(unsigned ValNo, MVT ValVT, MVT LocVT, CCValAssign::LocInfo LocInfo, ISD::ArgFlagsTy ArgFlags, CCState &State, bool IsRet, Type *OrigTy)
 
bool CC_RISCV_GHC(unsigned ValNo, MVT ValVT, MVT LocVT, CCValAssign::LocInfo LocInfo, ISD::ArgFlagsTy ArgFlags, Type *OrigTy, CCState &State)
 
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
 
bool CC_RISCV(unsigned ValNo, MVT ValVT, MVT LocVT, CCValAssign::LocInfo LocInfo, ISD::ArgFlagsTy ArgFlags, CCState &State, bool IsRet, Type *OrigTy)
 
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
 
ArrayRef(const T &OneElt) -> ArrayRef< T >
 
LLVM_ABI void reportFatalUsageError(Error Err)
Report a fatal error that does not indicate a bug in LLVM.
 
This struct is a compact representation of a valid (non-zero power of two) alignment.
 
Align getNonZeroOrigAlign() const
 
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
 
Align valueOrOne() const
For convenience, returns a valid alignment or 1 if undefined.