30 State.PCSections =
nullptr;
33 State.Observer =
nullptr;
60 "Expected inlined-at fields to agree");
63 false, Reg, Variable, Expr));
73 "Expected inlined-at fields to agree");
76 true, Reg, Variable, Expr));
86 "Expected inlined-at fields to agree");
90 .addMetadata(Variable)
101 "Expected inlined-at fields to agree");
104 auto *NumericConstant = [&] () ->
const Constant* {
106 if (CE->getOpcode() == Instruction::IntToPtr)
107 return CE->getOperand(0);
112 if (CI->getBitWidth() > 64)
114 else if (CI->getBitWidth() == 1)
115 MIB.addImm(CI->getZExtValue());
117 MIB.addImm(CI->getSExtValue());
127 MIB.addImm(0).
addMetadata(Variable).addMetadata(Expr);
134 "Expected inlined-at fields to agree");
135 auto MIB =
buildInstr(TargetOpcode::DBG_LABEL);
137 return MIB.addMetadata(Label);
144 auto MIB =
buildInstr(TargetOpcode::G_DYN_STACKALLOC);
146 Size.addSrcToMIB(MIB);
147 MIB.addImm(Alignment.
value());
154 auto MIB =
buildInstr(TargetOpcode::G_FRAME_INDEX);
156 MIB.addFrameIndex(Idx);
165 "address space mismatch");
167 auto MIB =
buildInstr(TargetOpcode::G_GLOBAL_VALUE);
169 MIB.addGlobalAddress(GV);
176 auto MIB =
buildInstr(TargetOpcode::G_CONSTANT_POOL);
178 MIB.addConstantPoolIndex(Idx);
184 return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
185 .addJumpTableIndex(JTI);
190 assert((Res == Op0) &&
"type mismatch");
196 assert((Res == Op0 && Res == Op1) &&
"type mismatch");
202 assert((Res == Op0) &&
"type mismatch");
207 const SrcOp &Op1, std::optional<unsigned> Flags) {
212 return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1}, Flags);
223std::optional<MachineInstrBuilder>
226 std::optional<unsigned> Flags) {
227 assert(Res == 0 &&
"Res is a result argument");
228 assert(ValueTy.isScalar() &&
"invalid offset type");
237 return buildPtrAdd(Res, Op0, Cst.getReg(0), Flags);
268 "Different vector element types");
270 "Op0 has more elements");
273 for (
auto Op : Unmerge.getInstr()->defs())
277 "Op0 has more size");
283 for (
unsigned i = 0; i < NumberOfPadElts; ++i)
298 "Different vector element types");
301 "Op0 has fewer elements");
325 "Table reg must be a pointer");
343 "creating constant with the wrong size");
345 assert(!Ty.isScalableVector() &&
346 "unexpected scalable vector in buildConstant");
348 if (Ty.isFixedVector()) {
349 auto Const =
buildInstr(TargetOpcode::G_CONSTANT)
355 auto Const =
buildInstr(TargetOpcode::G_CONSTANT);
366 ConstantInt *CI = ConstantInt::get(IntN, Val,
true);
378 "creating fconstant with the wrong size");
380 assert(!Ty.isPointer() &&
"invalid operand type");
382 assert(!Ty.isScalableVector() &&
383 "unexpected scalable vector in buildFConstant");
385 if (Ty.isFixedVector()) {
386 auto Const =
buildInstr(TargetOpcode::G_FCONSTANT)
393 auto Const =
buildInstr(TargetOpcode::G_FCONSTANT);
396 Const.addFPImm(&Val);
418 auto *CFP = ConstantFP::get(Ctx, Val);
426 auto MIB =
buildInstr(TargetOpcode::G_PTRAUTH_GLOBAL_VALUE);
430 MIB.addUse(AddrDisc);
439 auto MIB =
buildInstr(TargetOpcode::G_BRCOND);
469 MIB.addMemOperand(&MMO);
481 return buildLoad(Dst, BasePtr, *OffsetMMO);
486 auto Ptr =
buildPtrAdd(PtrTy, BasePtr, ConstOffset);
499 MIB.addMemOperand(&MMO);
529 std::optional<unsigned> Flags) {
530 return buildInstr(TargetOpcode::G_ZEXT, Res,
Op, Flags);
535 switch (TLI->getBooleanContents(IsVec, IsFP)) {
537 return TargetOpcode::G_SEXT;
539 return TargetOpcode::G_ZEXT;
541 return TargetOpcode::G_ANYEXT;
557 switch (TLI->getBooleanContents(IsVector, IsFP)) {
572 assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
573 TargetOpcode::G_SEXT == ExtOpc) &&
574 "Expecting Extending Opc");
580 unsigned Opcode = TargetOpcode::COPY;
582 Op.getLLTTy(*
getMRI()).getSizeInBits())
585 Op.getLLTTy(*
getMRI()).getSizeInBits())
586 Opcode = TargetOpcode::G_TRUNC;
625 if (SrcTy.isPointerOrPointerVector())
626 Opcode = TargetOpcode::G_PTRTOINT;
628 Opcode = TargetOpcode::G_INTTOPTR;
630 assert(!SrcTy.isPointerOrPointerVector() &&
632 Opcode = TargetOpcode::G_BITCAST;
645 assert(SrcTy.isValid() &&
"invalid operand type");
648 "extracting off end of register");
652 assert(Index == 0 &&
"insertion past the end of a register");
656 auto Extract =
buildInstr(TargetOpcode::G_EXTRACT);
657 Dst.addDefToMIB(*
getMRI(), Extract);
658 Src.addSrcToMIB(Extract);
659 Extract.addImm(Index);
664 return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
674 return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
685 return buildInstr(getOpcodeForMerge(Res, TmpVec), Res, TmpVec);
690 std::initializer_list<SrcOp>
Ops) {
695unsigned MachineIRBuilder::getOpcodeForMerge(
const DstOp &
DstOp,
698 if (SrcOps[0].getLLTTy(*
getMRI()).isVector())
699 return TargetOpcode::G_CONCAT_VECTORS;
700 return TargetOpcode::G_BUILD_VECTOR;
703 return TargetOpcode::G_MERGE_VALUES;
713 return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec,
Op);
720 return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec,
Op);
727 unsigned NumRegs = OpTy.
getSizeInBits() / Attrs.Ty.getSizeInBits();
729 return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec,
Op);
739 return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec,
Op);
748 return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
757 for (
const auto &
Op :
Ops)
759 return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
765 return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
775 if (TmpVec[0].getLLTTy(*
getMRI()).getSizeInBits() ==
777 return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
778 return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
785 "Expected Src to match Dst elt ty");
796 "Expected Src to match Dst elt ty");
797 return buildInstr(TargetOpcode::G_SPLAT_VECTOR, Res, Src);
810 assert(DstElemTy == ElemTy1 && DstElemTy == ElemTy2);
811 assert(Mask.size() > 1 &&
"Scalar G_SHUFFLE_VECTOR are not supported");
816 return buildInstr(TargetOpcode::G_SHUFFLE_VECTOR, {Res}, {Src1, Src2})
817 .addShuffleMask(MaskAlloc);
826 return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
835 "insertion past the end of a register");
838 Op.getLLTTy(*
getMRI()).getSizeInBits()) {
849 APInt(Bitwidth, Step));
850 auto StepVector =
buildInstr(TargetOpcode::G_STEP_VECTOR);
851 StepVector->setDebugLoc(
DebugLoc());
853 StepVector.addCImm(CI);
868 auto VScale =
buildInstr(TargetOpcode::G_VSCALE);
871 VScale.addCImm(&MinElts);
876 const APInt &MinElts) {
883 if (HasSideEffects && IsConvergent)
884 return TargetOpcode::G_INTRINSIC_CONVERGENT_W_SIDE_EFFECTS;
886 return TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS;
888 return TargetOpcode::G_INTRINSIC_CONVERGENT;
889 return TargetOpcode::G_INTRINSIC;
895 bool HasSideEffects,
bool isConvergent) {
897 for (
Register ResultReg : ResultRegs)
898 MIB.addDef(ResultReg);
899 MIB.addIntrinsicID(
ID);
907 bool HasSideEffects = !Attrs.getMemoryEffects().doesNotAccessMemory();
908 bool isConvergent = Attrs.hasAttribute(Attribute::Convergent);
918 Result.addDefToMIB(*
getMRI(), MIB);
919 MIB.addIntrinsicID(
ID);
926 bool HasSideEffects = !Attrs.getMemoryEffects().doesNotAccessMemory();
927 bool isConvergent = Attrs.hasAttribute(Attribute::Convergent);
933 std::optional<unsigned> Flags) {
934 return buildInstr(TargetOpcode::G_TRUNC, Res,
Op, Flags);
939 std::optional<unsigned> Flags) {
940 return buildInstr(TargetOpcode::G_FPTRUNC, Res,
Op, Flags);
947 std::optional<unsigned> Flags) {
948 return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1}, Flags);
955 std::optional<unsigned> Flags) {
957 return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
963 return buildInstr(TargetOpcode::G_SCMP, Res, {Op0, Op1});
969 return buildInstr(TargetOpcode::G_UCMP, Res, {Op0, Op1});
975 std::optional<unsigned> Flags) {
977 return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
984 return buildInstr(TargetOpcode::G_INSERT_SUBVECTOR, Res,
991 return buildInstr(TargetOpcode::G_EXTRACT_SUBVECTOR, Res,
998 return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
1004 return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
1021 assert(OldValResTy == CmpValTy &&
"type mismatch");
1022 assert(OldValResTy == NewValTy &&
"type mismatch");
1025 auto MIB =
buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS);
1031 MIB.addMemOperand(&MMO);
1048 assert(OldValResTy == CmpValTy &&
"type mismatch");
1049 assert(OldValResTy == NewValTy &&
"type mismatch");
1052 auto MIB =
buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG);
1057 MIB.addMemOperand(&MMO);
1062 unsigned Opcode,
const DstOp &OldValRes,
1071 assert(ValTy.isValid() &&
"invalid operand type");
1072 assert(OldValResTy == ValTy &&
"type mismatch");
1080 MIB.addMemOperand(&MMO);
1087 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
1093 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
1099 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
1105 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
1111 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
1118 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
1124 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
1130 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
1136 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
1142 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
1148 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
1156 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
1163 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
1170 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMAX, OldValRes, Addr, Val,
1177 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMIN, OldValRes, Addr, Val,
1185 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMAXIMUM, OldValRes, Addr,
1193 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMINIMUM, OldValRes, Addr,
1209 auto MIB =
buildInstr(TargetOpcode::G_PREFETCH);
1211 MIB.addImm(RW).addImm(Locality).addImm(CacheType);
1212 MIB.addMemOperand(&MMO);
1229 assert(SrcTy.isVector() &&
"mismatched cast between vector and non-vector");
1231 "different number of elements in a trunc/ext");
1233 assert(DstTy.
isScalar() && SrcTy.isScalar() &&
"invalid extend/trunc");
1237 "invalid narrowing extend");
1240 "invalid widening trunc");
1245 const LLT Op0Ty,
const LLT Op1Ty) {
1248 "invalid operand type");
1249 assert((ResTy == Op0Ty && ResTy == Op1Ty) &&
"type mismatch");
1263 std::optional<unsigned> Flags) {
1267 case TargetOpcode::G_SELECT: {
1268 assert(DstOps.
size() == 1 &&
"Invalid select");
1269 assert(SrcOps.
size() == 3 &&
"Invalid select");
1271 DstOps[0].getLLTTy(*
getMRI()), SrcOps[0].getLLTTy(*
getMRI()),
1272 SrcOps[1].getLLTTy(*
getMRI()), SrcOps[2].getLLTTy(*
getMRI()));
1275 case TargetOpcode::G_FNEG:
1276 case TargetOpcode::G_ABS:
1279 assert(SrcOps.
size() == 1 &&
"Invalid Srcs");
1281 SrcOps[0].getLLTTy(*
getMRI()));
1283 case TargetOpcode::G_ADD:
1284 case TargetOpcode::G_AND:
1285 case TargetOpcode::G_MUL:
1286 case TargetOpcode::G_OR:
1287 case TargetOpcode::G_SUB:
1288 case TargetOpcode::G_XOR:
1289 case TargetOpcode::G_UDIV:
1290 case TargetOpcode::G_SDIV:
1291 case TargetOpcode::G_UREM:
1292 case TargetOpcode::G_SREM:
1293 case TargetOpcode::G_SMIN:
1294 case TargetOpcode::G_SMAX:
1295 case TargetOpcode::G_UMIN:
1296 case TargetOpcode::G_UMAX:
1297 case TargetOpcode::G_UADDSAT:
1298 case TargetOpcode::G_SADDSAT:
1299 case TargetOpcode::G_USUBSAT:
1300 case TargetOpcode::G_SSUBSAT: {
1303 assert(SrcOps.
size() == 2 &&
"Invalid Srcs");
1305 SrcOps[0].getLLTTy(*
getMRI()),
1306 SrcOps[1].getLLTTy(*
getMRI()));
1309 case TargetOpcode::G_SHL:
1310 case TargetOpcode::G_ASHR:
1311 case TargetOpcode::G_LSHR:
1312 case TargetOpcode::G_USHLSAT:
1313 case TargetOpcode::G_SSHLSAT: {
1315 assert(SrcOps.
size() == 2 &&
"Invalid Srcs");
1317 SrcOps[0].getLLTTy(*
getMRI()),
1318 SrcOps[1].getLLTTy(*
getMRI()));
1321 case TargetOpcode::G_SEXT:
1322 case TargetOpcode::G_ZEXT:
1323 case TargetOpcode::G_ANYEXT:
1325 assert(SrcOps.
size() == 1 &&
"Invalid Srcs");
1327 SrcOps[0].getLLTTy(*
getMRI()),
true);
1329 case TargetOpcode::G_TRUNC:
1330 case TargetOpcode::G_FPTRUNC: {
1332 assert(SrcOps.
size() == 1 &&
"Invalid Srcs");
1334 SrcOps[0].getLLTTy(*
getMRI()),
false);
1337 case TargetOpcode::G_BITCAST: {
1339 assert(SrcOps.
size() == 1 &&
"Invalid Srcs");
1340 assert(DstOps[0].getLLTTy(*
getMRI()).getSizeInBits() ==
1341 SrcOps[0].getLLTTy(*
getMRI()).getSizeInBits() &&
"invalid bitcast");
1344 case TargetOpcode::COPY:
1349 case TargetOpcode::G_FCMP:
1350 case TargetOpcode::G_ICMP: {
1351 assert(DstOps.
size() == 1 &&
"Invalid Dst Operands");
1352 assert(SrcOps.
size() == 3 &&
"Invalid Src Operands");
1356 "Expecting predicate");
1361 }() &&
"Invalid predicate");
1365 LLT Op0Ty = SrcOps[1].getLLTTy(*
getMRI());
1366 LLT DstTy = DstOps[0].getLLTTy(*
getMRI());
1372 }() &&
"Type Mismatch");
1375 case TargetOpcode::G_UNMERGE_VALUES: {
1376 assert(!DstOps.
empty() &&
"Invalid trivial sequence");
1377 assert(SrcOps.
size() == 1 &&
"Invalid src for Unmerge");
1381 DstOps[0].getLLTTy(*
getMRI());
1383 "type mismatch in output list");
1385 DstOps[0].getLLTTy(*
getMRI()).getSizeInBits() ==
1386 SrcOps[0].getLLTTy(*
getMRI()).getSizeInBits() &&
1387 "input operands do not cover output register");
1390 case TargetOpcode::G_MERGE_VALUES: {
1391 assert(SrcOps.
size() >= 2 &&
"invalid trivial sequence");
1396 SrcOps[0].getLLTTy(*
getMRI());
1398 "type mismatch in input list");
1400 SrcOps[0].getLLTTy(*
getMRI()).getSizeInBits() ==
1401 DstOps[0].getLLTTy(*
getMRI()).getSizeInBits() &&
1402 "input operands do not cover output register");
1404 "vectors should be built with G_CONCAT_VECTOR or G_BUILD_VECTOR");
1407 case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1408 assert(DstOps.
size() == 1 &&
"Invalid Dst size");
1409 assert(SrcOps.
size() == 2 &&
"Invalid Src size");
1410 assert(SrcOps[0].getLLTTy(*
getMRI()).isVector() &&
"Invalid operand type");
1412 DstOps[0].getLLTTy(*
getMRI()).isPointer()) &&
1413 "Invalid operand type");
1414 assert(SrcOps[1].getLLTTy(*
getMRI()).isScalar() &&
"Invalid operand type");
1415 assert(SrcOps[0].getLLTTy(*
getMRI()).getElementType() ==
1416 DstOps[0].getLLTTy(*
getMRI()) &&
1420 case TargetOpcode::G_INSERT_VECTOR_ELT: {
1421 assert(DstOps.
size() == 1 &&
"Invalid dst size");
1422 assert(SrcOps.
size() == 3 &&
"Invalid src size");
1424 SrcOps[0].getLLTTy(*
getMRI()).isVector() &&
"Invalid operand type");
1425 assert(DstOps[0].getLLTTy(*
getMRI()).getElementType() ==
1426 SrcOps[1].getLLTTy(*
getMRI()) &&
1428 assert(SrcOps[2].getLLTTy(*
getMRI()).isScalar() &&
"Invalid index");
1429 assert(DstOps[0].getLLTTy(*
getMRI()).getElementCount() ==
1430 SrcOps[0].getLLTTy(*
getMRI()).getElementCount() &&
1434 case TargetOpcode::G_BUILD_VECTOR: {
1436 "Must have at least 2 operands");
1437 assert(DstOps.
size() == 1 &&
"Invalid DstOps");
1439 "Res type must be a vector");
1443 SrcOps[0].getLLTTy(*
getMRI());
1445 "type mismatch in input list");
1447 SrcOps[0].getLLTTy(*
getMRI()).getSizeInBits() ==
1448 DstOps[0].getLLTTy(*
getMRI()).getSizeInBits() &&
1449 "input scalars do not exactly cover the output vector register");
1452 case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1454 "Must have at least 2 operands");
1455 assert(DstOps.
size() == 1 &&
"Invalid DstOps");
1457 "Res type must be a vector");
1461 SrcOps[0].getLLTTy(*
getMRI());
1463 "type mismatch in input list");
1466 case TargetOpcode::G_CONCAT_VECTORS: {
1467 assert(DstOps.
size() == 1 &&
"Invalid DstOps");
1469 "Must have at least 2 operands");
1472 return (
Op.getLLTTy(*
getMRI()).isVector() &&
1474 SrcOps[0].getLLTTy(*
getMRI()));
1476 "type mismatch in input list");
1478 SrcOps[0].getLLTTy(*
getMRI()).getSizeInBits() ==
1479 DstOps[0].getLLTTy(*
getMRI()).getSizeInBits() &&
1480 "input vectors do not exactly cover the output vector register");
1483 case TargetOpcode::G_UADDE: {
1484 assert(DstOps.
size() == 2 &&
"Invalid no of dst operands");
1485 assert(SrcOps.
size() == 3 &&
"Invalid no of src operands");
1486 assert(DstOps[0].getLLTTy(*
getMRI()).isScalar() &&
"Invalid operand");
1488 (DstOps[0].getLLTTy(*
getMRI()) == SrcOps[1].getLLTTy(*
getMRI())) &&
1490 assert(DstOps[1].getLLTTy(*
getMRI()).isScalar() &&
"Invalid operand");
1498 for (
const DstOp &
Op : DstOps)
1500 for (
const SrcOp &
Op : SrcOps)
1501 Op.addSrcToMIB(MIB);
1503 MIB->setFlags(*Flags);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
Function Alias Analysis Results
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
static unsigned getIntrinsicOpcode(bool HasSideEffects, bool IsConvergent)
This file declares the MachineIRBuilder class.
Promote Memory to Register
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
static unsigned getNumElements(Type *Ty)
static SymbolRef::Type getType(const Symbol *Sym)
This file describes how to lower LLVM code to machine code.
static Function * getFunction(FunctionType *Ty, const Twine &Name, Module *M)
static LLVM_ABI unsigned getSizeInBits(const fltSemantics &Sem)
Returns the size of the floating point number (in bits) in the given semantics.
const fltSemantics & getSemantics() const
Class for arbitrary precision integers.
static APInt getLowBitsSet(unsigned numBits, unsigned loBitsSet)
Constructs an APInt value that has the bottom loBitsSet bits set.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
This class holds the attributes for a particular argument, parameter, function, or return value.
The address of a basic block.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isFPPredicate() const
bool isIntPredicate() const
ConstantFP - Floating Point Values [float, double].
const APFloat & getValueAPF() const
This is the shared class of boolean and integer constants.
unsigned getBitWidth() const
getBitWidth - Return the scalar bitwidth of this constant.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
A signed pointer, in the ptrauth sense.
ConstantInt * getKey() const
The Key ID, an i32 constant.
ConstantInt * getDiscriminator() const
The integer discriminator, an i64 constant, or 0.
This is an important base class in LLVM.
void addDefToMIB(MachineRegisterInfo &MRI, MachineInstrBuilder &MIB) const
LLT getLLTTy(const MachineRegisterInfo &MRI) const
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
PointerType * getType() const
Global values are always pointers.
static LLVM_ABI IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
constexpr unsigned getScalarSizeInBits() const
constexpr bool isScalar() const
static constexpr LLT scalar(unsigned SizeInBits)
Get a low-level scalar or aggregate "bag of bits".
constexpr bool isValid() const
constexpr uint16_t getNumElements() const
Returns the number of elements in a vector LLT.
constexpr bool isVector() const
constexpr TypeSize getSizeInBits() const
Returns the total size of the type. Must only be called on sized types.
constexpr bool isPointer() const
constexpr LLT getElementType() const
Returns the vector's element type. Only valid for vector types.
constexpr ElementCount getElementCount() const
constexpr bool isPointerOrPointerVector() const
constexpr LLT getScalarType() const
const MCInstrDesc & get(unsigned Opcode) const
Return the machine instruction descriptor that corresponds to the specified instruction opcode.
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
MachineInstrBundleIterator< MachineInstr > iterator
ArrayRef< int > allocateShuffleMask(ArrayRef< int > Mask)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineMemOperand * getMachineMemOperand(MachinePointerInfo PtrInfo, MachineMemOperand::Flags f, LLT MemTy, Align base_alignment, const AAMDNodes &AAInfo=AAMDNodes(), const MDNode *Ranges=nullptr, SyncScope::ID SSID=SyncScope::System, AtomicOrdering Ordering=AtomicOrdering::NotAtomic, AtomicOrdering FailureOrdering=AtomicOrdering::NotAtomic)
getMachineMemOperand - Allocate a new MachineMemOperand.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
MachineInstrBuilder buildLoadFromOffset(const DstOp &Dst, const SrcOp &BasePtr, MachineMemOperand &BaseMMO, int64_t Offset)
Helper to create a load from a constant offset given a base address.
MachineInstrBuilder buildAtomicRMWFMin(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_FMIN Addr, Val, MMO.
MachineInstrBuilder buildBoolExtInReg(const DstOp &Res, const SrcOp &Op, bool IsVector, bool IsFP)
MachineInstrBuilder insertInstr(MachineInstrBuilder MIB)
Insert an existing instruction at the insertion point.
MachineInstrBuilder buildAtomicRMWFMaximum(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_FMAXIMUM Addr, Val, MMO.
MachineInstrBuilder buildAtomicRMWXor(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_XOR Addr, Val, MMO.
MachineInstrBuilder buildGlobalValue(const DstOp &Res, const GlobalValue *GV)
Build and insert Res = G_GLOBAL_VALUE GV.
MachineInstrBuilder buildBr(MachineBasicBlock &Dest)
Build and insert G_BR Dest.
LLVMContext & getContext() const
MachineInstrBuilder buildUndef(const DstOp &Res)
Build and insert Res = IMPLICIT_DEF.
Value * getDeactivationSymbol()
MachineInstrBuilder buildUCmp(const DstOp &Res, const SrcOp &Op0, const SrcOp &Op1)
Build and insert a Res = G_UCMP Op0, Op1.
MachineInstrBuilder buildConstantPool(const DstOp &Res, unsigned Idx)
Build and insert Res = G_CONSTANT_POOL Idx.
MachineInstrBuilder buildJumpTable(const LLT PtrTy, unsigned JTI)
Build and insert Res = G_JUMP_TABLE JTI.
MachineInstrBuilder buildBoolExt(const DstOp &Res, const SrcOp &Op, bool IsFP)
MachineInstrBuilder buildUnmerge(ArrayRef< LLT > Res, const SrcOp &Op)
Build and insert Res0, ... = G_UNMERGE_VALUES Op.
MachineInstrBuilder buildSCmp(const DstOp &Res, const SrcOp &Op0, const SrcOp &Op1)
Build and insert a Res = G_SCMP Op0, Op1.
MachineInstrBuilder buildFence(unsigned Ordering, unsigned Scope)
Build and insert G_FENCE Ordering, Scope.
MachineInstrBuilder buildSelect(const DstOp &Res, const SrcOp &Tst, const SrcOp &Op0, const SrcOp &Op1, std::optional< unsigned > Flags=std::nullopt)
Build and insert a Res = G_SELECT Tst, Op0, Op1.
MachineInstrBuilder buildAtomicRMWAnd(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_AND Addr, Val, MMO.
MachineInstrBuilder buildZExtInReg(const DstOp &Res, const SrcOp &Op, int64_t ImmOp)
Build and inserts Res = G_AND Op, LowBitsSet(ImmOp) Since there is no G_ZEXT_INREG like G_SEXT_INREG,...
MachineInstrBuilder buildAtomicRMWMin(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_MIN Addr, Val, MMO.
MachineInstrBuilder buildExtract(const DstOp &Res, const SrcOp &Src, uint64_t Index)
Build and insert Res0, ... = G_EXTRACT Src, Idx0.
std::optional< MachineInstrBuilder > materializePtrAdd(Register &Res, Register Op0, const LLT ValueTy, uint64_t Value, std::optional< unsigned > Flags=std::nullopt)
Materialize and insert Res = G_PTR_ADD Op0, (G_CONSTANT Value)
MachineInstrBuilder buildInsertSubvector(const DstOp &Res, const SrcOp &Src0, const SrcOp &Src1, unsigned Index)
Build and insert Res = G_INSERT_SUBVECTOR Src0, Src1, Idx.
MachineInstrBuilder buildAnd(const DstOp &Dst, const SrcOp &Src0, const SrcOp &Src1)
Build and insert Res = G_AND Op0, Op1.
MachineInstrBuilder buildCast(const DstOp &Dst, const SrcOp &Src)
Build and insert an appropriate cast between two registers of equal size.
const TargetInstrInfo & getTII()
MachineInstrBuilder buildAtomicRMWFAdd(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_FADD Addr, Val, MMO.
MachineInstrBuilder buildAtomicRMWNand(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_NAND Addr, Val, MMO.
MachineInstrBuilder buildICmp(CmpInst::Predicate Pred, const DstOp &Res, const SrcOp &Op0, const SrcOp &Op1, std::optional< unsigned > Flags=std::nullopt)
Build and insert a Res = G_ICMP Pred, Op0, Op1.
MachineInstrBuilder buildAnyExtOrTrunc(const DstOp &Res, const SrcOp &Op)
Res = COPY Op depending on the differing sizes of Res and Op.
MachineInstrBuilder buildSExt(const DstOp &Res, const SrcOp &Op)
Build and insert Res = G_SEXT Op.
MachineBasicBlock::iterator getInsertPt()
Current insertion point for new instructions.
MachineInstrBuilder buildSExtOrTrunc(const DstOp &Res, const SrcOp &Op)
Build and insert Res = G_SEXT Op, Res = G_TRUNC Op, or Res = COPY Op depending on the differing sizes...
MachineInstrBuilder buildShuffleSplat(const DstOp &Res, const SrcOp &Src)
Build and insert a vector splat of a scalar Src using a G_INSERT_VECTOR_ELT and G_SHUFFLE_VECTOR idio...
MachineInstrBuilder buildZExt(const DstOp &Res, const SrcOp &Op, std::optional< unsigned > Flags=std::nullopt)
Build and insert Res = G_ZEXT Op.
MachineInstrBuilder buildConcatVectors(const DstOp &Res, ArrayRef< Register > Ops)
Build and insert Res = G_CONCAT_VECTORS Op0, ...
MachineInstrBuilder buildAtomicRMW(unsigned Opcode, const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_<Opcode> Addr, Val, MMO.
MachineInstrBuilder buildIntrinsic(Intrinsic::ID ID, ArrayRef< Register > Res, bool HasSideEffects, bool isConvergent)
Build and insert a G_INTRINSIC instruction.
MDNode * getPCSections()
Get the current instruction's PC sections metadata.
MachineInstrBuilder buildVScale(const DstOp &Res, unsigned MinElts)
Build and insert Res = G_VSCALE MinElts.
MachineInstrBuilder buildSplatBuildVector(const DstOp &Res, const SrcOp &Src)
Build and insert Res = G_BUILD_VECTOR with Src replicated to fill the number of elements.
MachineInstrBuilder buildIndirectDbgValue(Register Reg, const MDNode *Variable, const MDNode *Expr)
Build and insert a DBG_VALUE instruction expressing the fact that the associated Variable lives in me...
unsigned getBoolExtOp(bool IsVec, bool IsFP) const
MachineInstrBuilder buildObjectPtrOffset(const DstOp &Res, const SrcOp &Op0, const SrcOp &Op1)
Build and insert an instruction with appropriate flags for addressing some offset of an object,...
MachineInstrBuilder buildAtomicRMWUmax(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_UMAX Addr, Val, MMO.
MachineInstrBuilder buildBuildVector(const DstOp &Res, ArrayRef< Register > Ops)
Build and insert Res = G_BUILD_VECTOR Op0, ...
MachineInstrBuilder buildConstDbgValue(const Constant &C, const MDNode *Variable, const MDNode *Expr)
Build and insert a DBG_VALUE instructions specifying that Variable is given by C (suitably modified b...
void recordInsertion(MachineInstr *InsertedInstr) const
MachineInstrBuilder buildBrCond(const SrcOp &Tst, MachineBasicBlock &Dest)
Build and insert G_BRCOND Tst, Dest.
std::optional< MachineInstrBuilder > materializeObjectPtrOffset(Register &Res, Register Op0, const LLT ValueTy, uint64_t Value)
Materialize and insert an instruction with appropriate flags for addressing some offset of an object,...
MachineInstrBuilder buildMergeLikeInstr(const DstOp &Res, ArrayRef< Register > Ops)
Build and insert Res = G_MERGE_VALUES Op0, ... or Res = G_BUILD_VECTOR Op0, ... or Res = G_CONCAT_VEC...
MachineInstrBuilder buildAtomicRMWFMinimum(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_FMINIMUM Addr, Val, MMO.
MachineInstrBuilder buildExtractVectorElement(const DstOp &Res, const SrcOp &Val, const SrcOp &Idx)
Build and insert Res = G_EXTRACT_VECTOR_ELT Val, Idx.
MachineInstrBuilder buildLoad(const DstOp &Res, const SrcOp &Addr, MachineMemOperand &MMO)
Build and insert Res = G_LOAD Addr, MMO.
MachineInstrBuilder buildPtrAdd(const DstOp &Res, const SrcOp &Op0, const SrcOp &Op1, std::optional< unsigned > Flags=std::nullopt)
Build and insert Res = G_PTR_ADD Op0, Op1.
MachineInstrBuilder buildZExtOrTrunc(const DstOp &Res, const SrcOp &Op)
Build and insert Res = G_ZEXT Op, Res = G_TRUNC Op, or Res = COPY Op depending on the differing sizes...
MachineInstrBuilder buildBuildVectorTrunc(const DstOp &Res, ArrayRef< Register > Ops)
Build and insert Res = G_BUILD_VECTOR_TRUNC Op0, ...
virtual MachineInstrBuilder buildFConstant(const DstOp &Res, const ConstantFP &Val)
Build and insert Res = G_FCONSTANT Val.
MachineInstrBuilder buildStore(const SrcOp &Val, const SrcOp &Addr, MachineMemOperand &MMO)
Build and insert G_STORE Val, Addr, MMO.
MachineInstrBuilder buildInstr(unsigned Opcode)
Build and insert <empty> = Opcode <empty>.
MachineInstrBuilder buildPadVectorWithUndefElements(const DstOp &Res, const SrcOp &Op0)
Build and insert a, b, ..., x = G_UNMERGE_VALUES Op0 Res = G_BUILD_VECTOR a, b, .....
void validateSelectOp(const LLT ResTy, const LLT TstTy, const LLT Op0Ty, const LLT Op1Ty)
MachineInstrBuilder buildFrameIndex(const DstOp &Res, int Idx)
Build and insert Res = G_FRAME_INDEX Idx.
MachineInstrBuilder buildDirectDbgValue(Register Reg, const MDNode *Variable, const MDNode *Expr)
Build and insert a DBG_VALUE instruction expressing the fact that the associated Variable lives in Re...
const DebugLoc & getDL()
Getter for DebugLoc.
MachineInstrBuilder buildBuildVectorConstant(const DstOp &Res, ArrayRef< APInt > Ops)
Build and insert Res = G_BUILD_VECTOR Op0, ... where each OpN is built with G_CONSTANT.
MachineInstrBuilder buildAtomicRMWUmin(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_UMIN Addr, Val, MMO.
void validateBinaryOp(const LLT Res, const LLT Op0, const LLT Op1)
void validateShiftOp(const LLT Res, const LLT Op0, const LLT Op1)
MachineFunction & getMF()
Getter for the function we currently build.
MachineInstrBuilder buildDbgLabel(const MDNode *Label)
Build and insert a DBG_LABEL instructions specifying that Label is given.
MachineInstrBuilder buildBrJT(Register TablePtr, unsigned JTI, Register IndexReg)
Build and insert G_BRJT TablePtr, JTI, IndexReg.
MachineInstrBuilder buildInsert(const DstOp &Res, const SrcOp &Src, const SrcOp &Op, unsigned Index)
MachineInstrBuilder buildDynStackAlloc(const DstOp &Res, const SrcOp &Size, Align Alignment)
Build and insert Res = G_DYN_STACKALLOC Size, Align.
MachineInstrBuilder buildFIDbgValue(int FI, const MDNode *Variable, const MDNode *Expr)
Build and insert a DBG_VALUE instruction expressing the fact that the associated Variable lives in th...
MachineInstrBuilder buildExtOrTrunc(unsigned ExtOpc, const DstOp &Res, const SrcOp &Op)
Build and insert Res = ExtOpc, Res = G_TRUNC Op, or Res = COPY Op depending on the differing sizes of...
MachineInstrBuilder buildAtomicRMWSub(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_SUB Addr, Val, MMO.
MachineInstrBuilder buildMergeValues(const DstOp &Res, ArrayRef< Register > Ops)
Build and insert Res = G_MERGE_VALUES Op0, ...
MachineInstrBuilder buildTrunc(const DstOp &Res, const SrcOp &Op, std::optional< unsigned > Flags=std::nullopt)
Build and insert Res = G_TRUNC Op.
MachineInstrBuilder buildAtomicRMWFMax(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_FMAX Addr, Val, MMO.
MachineInstrBuilder buildAtomicRMWOr(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_OR Addr, Val, MMO.
const MachineBasicBlock & getMBB() const
Getter for the basic block we currently build.
MachineInstrBuilder buildInsertVectorElement(const DstOp &Res, const SrcOp &Val, const SrcOp &Elt, const SrcOp &Idx)
Build and insert Res = G_INSERT_VECTOR_ELT Val, Elt, Idx.
MachineInstrBuilder buildAnyExt(const DstOp &Res, const SrcOp &Op)
Build and insert Res = G_ANYEXT Op0.
MachineInstrBuilder buildAtomicCmpXchgWithSuccess(const DstOp &OldValRes, const DstOp &SuccessRes, const SrcOp &Addr, const SrcOp &CmpVal, const SrcOp &NewVal, MachineMemOperand &MMO)
Build and insert OldValRes<def>, SuccessRes<def> = / G_ATOMIC_CMPXCHG_WITH_SUCCESS Addr,...
MachineInstrBuilder buildDeleteTrailingVectorElements(const DstOp &Res, const SrcOp &Op0)
Build and insert a, b, ..., x, y, z = G_UNMERGE_VALUES Op0 Res = G_BUILD_VECTOR a,...
MachineRegisterInfo * getMRI()
Getter for MRI.
MachineInstrBuilder buildAtomicRMWAdd(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_ADD Addr, Val, MMO.
MachineInstrBuilder buildFPTrunc(const DstOp &Res, const SrcOp &Op, std::optional< unsigned > Flags=std::nullopt)
Build and insert Res = G_FPTRUNC Op.
MachineInstrBuilder buildAtomicCmpXchg(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &CmpVal, const SrcOp &NewVal, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMIC_CMPXCHG Addr, CmpVal, NewVal, / MMO.
MachineInstrBuilder buildShuffleVector(const DstOp &Res, const SrcOp &Src1, const SrcOp &Src2, ArrayRef< int > Mask)
Build and insert Res = G_SHUFFLE_VECTOR Src1, Src2, Mask.
void validateTruncExt(const LLT Dst, const LLT Src, bool IsExtend)
MachineInstrBuilder buildInstrNoInsert(unsigned Opcode)
Build but don't insert <empty> = Opcode <empty>.
MachineInstrBuilder buildPtrMask(const DstOp &Res, const SrcOp &Op0, const SrcOp &Op1)
Build and insert Res = G_PTRMASK Op0, Op1.
MachineInstrBuilder buildCopy(const DstOp &Res, const SrcOp &Op)
Build and insert Res = COPY Op.
void validateUnaryOp(const LLT Res, const LLT Op0)
MachineInstrBuilder buildBlockAddress(Register Res, const BlockAddress *BA)
Build and insert Res = G_BLOCK_ADDR BA.
MDNode * getMMRAMetadata()
Get the current instruction's MMRA metadata.
MachineInstrBuilder buildAtomicRMWMax(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_MAX Addr, Val, MMO.
MachineInstrBuilder buildPrefetch(const SrcOp &Addr, unsigned RW, unsigned Locality, unsigned CacheType, MachineMemOperand &MMO)
Build and insert G_PREFETCH Addr, RW, Locality, CacheType.
MachineInstrBuilder buildExtractSubvector(const DstOp &Res, const SrcOp &Src, unsigned Index)
Build and insert Res = G_EXTRACT_SUBVECTOR Src, Idx0.
MachineInstrBuilder buildBrIndirect(Register Tgt)
Build and insert G_BRINDIRECT Tgt.
MachineInstrBuilder buildSplatVector(const DstOp &Res, const SrcOp &Val)
Build and insert Res = G_SPLAT_VECTOR Val.
MachineInstrBuilder buildLoadInstr(unsigned Opcode, const DstOp &Res, const SrcOp &Addr, MachineMemOperand &MMO)
Build and insert Res = <opcode> Addr, MMO.
void setMF(MachineFunction &MF)
MachineInstrBuilder buildStepVector(const DstOp &Res, unsigned Step)
Build and insert Res = G_STEP_VECTOR Step.
MachineInstrBuilder buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_FSUB Addr, Val, MMO.
MachineInstrBuilder buildAtomicRMWXchg(Register OldValRes, Register Addr, Register Val, MachineMemOperand &MMO)
Build and insert OldValRes<def> = G_ATOMICRMW_XCHG Addr, Val, MMO.
MachineInstrBuilder buildMaskLowPtrBits(const DstOp &Res, const SrcOp &Op0, uint32_t NumBits)
Build and insert Res = G_PTRMASK Op0, G_CONSTANT (1 << NumBits) - 1.
virtual MachineInstrBuilder buildConstant(const DstOp &Res, const ConstantInt &Val)
Build and insert Res = G_CONSTANT Val.
MachineInstrBuilder buildFCmp(CmpInst::Predicate Pred, const DstOp &Res, const SrcOp &Op0, const SrcOp &Op1, std::optional< unsigned > Flags=std::nullopt)
Build and insert a Res = G_FCMP PredOp0, Op1.
MachineInstrBuilder buildSExtInReg(const DstOp &Res, const SrcOp &Op, int64_t ImmOp)
Build and insert Res = G_SEXT_INREG Op, ImmOp.
MachineInstrBuilder buildConstantPtrAuth(const DstOp &Res, const ConstantPtrAuth *CPA, Register Addr, Register AddrDisc)
Build and insert G_PTRAUTH_GLOBAL_VALUE.
Register getReg(unsigned Idx) const
Get the register for the operand index.
const MachineInstrBuilder & addCImm(const ConstantInt *Val) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & addBlockAddress(const BlockAddress *BA, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addFPImm(const ConstantFP *Val) const
const MachineInstrBuilder & addJumpTableIndex(unsigned Idx, unsigned TargetFlags=0) const
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
A description of a memory reference used in the backend.
bool isAtomic() const
Returns true if this operation has an atomic ordering requirement of unordered or higher,...
Flags
Flags values. These may be or'd together.
@ MOLoad
The memory access reads data.
@ MOStore
The memory access writes data.
LLVM_ABI Register createGenericVirtualRegister(LLT Ty, StringRef Name="")
Create and return a new generic virtual register with low-level type Ty.
unsigned getAddressSpace() const
Return the address space of the Pointer type.
Wrapper class representing virtual and physical registers.
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
LLT getLLTTy(const MachineRegisterInfo &MRI) const
void addSrcToMIB(MachineInstrBuilder &MIB) const
@ ZeroOrOneBooleanContent
@ UndefinedBooleanContent
@ ZeroOrNegativeOneBooleanContent
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetLowering * getTargetLowering() const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void addMetadata(unsigned KindID, MDNode &MD)
Add a metadata attachment.
static constexpr bool isKnownLT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
static constexpr bool isKnownGT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ C
The default llvm calling convention, compatible with C.
LLVM_ABI AttributeSet getFnAttributes(LLVMContext &C, ID id)
Return the function attributes for an intrinsic.
This is an optimization pass for GlobalISel generic memory operations.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
decltype(auto) get(const PointerIntPair< PointerTy, IntBits, IntType, PtrTraits, Info > &Pair)
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
constexpr T maskTrailingZeros(unsigned N)
Create a bitmask with the N right-most bits set to 0, and all other bits set to 1.
DWARFExpression::Operation Op
LLVM_ABI APFloat getAPFloatFromSize(double Val, unsigned Size)
Returns an APFloat from Val converted to the appropriate size.
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
A collection of metadata nodes that might be associated with a memory access used by the alias-analys...
This struct is a compact representation of a valid (non-zero power of two) alignment.
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
This class contains a discriminated union of information about pointers in memory operands,...
All attributes(register class or bank and low-level type) a virtual register can have.