Line data Source code
1 : //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 : //
3 : // The LLVM Compiler Infrastructure
4 : //
5 : // This file is distributed under the University of Illinois Open Source
6 : // License. See LICENSE.TXT for details.
7 : //
8 : //===----------------------------------------------------------------------===//
9 : /// \file
10 : /// This file implements the MachineIRBuidler class.
11 : //===----------------------------------------------------------------------===//
12 : #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
13 :
14 : #include "llvm/CodeGen/MachineFunction.h"
15 : #include "llvm/CodeGen/MachineInstr.h"
16 : #include "llvm/CodeGen/MachineInstrBuilder.h"
17 : #include "llvm/CodeGen/MachineRegisterInfo.h"
18 : #include "llvm/CodeGen/TargetInstrInfo.h"
19 : #include "llvm/CodeGen/TargetOpcodes.h"
20 : #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 : #include "llvm/IR/DebugInfo.h"
22 :
23 : using namespace llvm;
24 :
25 5528 : void MachineIRBuilderBase::setMF(MachineFunction &MF) {
26 5528 : State.MF = &MF;
27 5528 : State.MBB = nullptr;
28 5528 : State.MRI = &MF.getRegInfo();
29 5528 : State.TII = MF.getSubtarget().getInstrInfo();
30 5528 : State.DL = DebugLoc();
31 5528 : State.II = MachineBasicBlock::iterator();
32 : State.InsertedInstr = nullptr;
33 5528 : }
34 :
35 6390 : void MachineIRBuilderBase::setMBB(MachineBasicBlock &MBB) {
36 6390 : State.MBB = &MBB;
37 6390 : State.II = MBB.end();
38 : assert(&getMF() == MBB.getParent() &&
39 : "Basic block is in a different function");
40 6390 : }
41 :
42 1448 : void MachineIRBuilderBase::setInstr(MachineInstr &MI) {
43 : assert(MI.getParent() && "Instruction is not part of a basic block");
44 1448 : setMBB(*MI.getParent());
45 1448 : State.II = MI.getIterator();
46 1448 : }
47 :
48 375 : void MachineIRBuilderBase::setInsertPt(MachineBasicBlock &MBB,
49 : MachineBasicBlock::iterator II) {
50 : assert(MBB.getParent() == &getMF() &&
51 : "Basic block is in a different function");
52 375 : State.MBB = &MBB;
53 375 : State.II = II;
54 375 : }
55 :
56 16188 : void MachineIRBuilderBase::recordInsertion(MachineInstr *InsertedInstr) const {
57 16188 : if (State.InsertedInstr)
58 4732 : State.InsertedInstr(InsertedInstr);
59 16188 : }
60 :
61 1512 : void MachineIRBuilderBase::recordInsertions(
62 : std::function<void(MachineInstr *)> Inserted) {
63 1512 : State.InsertedInstr = std::move(Inserted);
64 1512 : }
65 :
66 30 : void MachineIRBuilderBase::stopRecordingInsertions() {
67 : State.InsertedInstr = nullptr;
68 30 : }
69 :
70 : //------------------------------------------------------------------------------
71 : // Build instruction variants.
72 : //------------------------------------------------------------------------------
73 :
74 14339 : MachineInstrBuilder MachineIRBuilderBase::buildInstr(unsigned Opcode) {
75 14339 : return insertInstr(buildInstrNoInsert(Opcode));
76 : }
77 :
78 16029 : MachineInstrBuilder MachineIRBuilderBase::buildInstrNoInsert(unsigned Opcode) {
79 32058 : MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80 16029 : return MIB;
81 : }
82 :
83 15840 : MachineInstrBuilder MachineIRBuilderBase::insertInstr(MachineInstrBuilder MIB) {
84 15840 : getMBB().insert(getInsertPt(), MIB);
85 15840 : recordInsertion(MIB);
86 15840 : return MIB;
87 : }
88 :
89 : MachineInstrBuilder
90 9 : MachineIRBuilderBase::buildDirectDbgValue(unsigned Reg, const MDNode *Variable,
91 : const MDNode *Expr) {
92 : assert(isa<DILocalVariable>(Variable) && "not a variable");
93 : assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94 : assert(
95 : cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96 : "Expected inlined-at fields to agree");
97 : return insertInstr(BuildMI(getMF(), getDL(),
98 9 : getTII().get(TargetOpcode::DBG_VALUE),
99 18 : /*IsIndirect*/ false, Reg, Variable, Expr));
100 : }
101 :
102 2 : MachineInstrBuilder MachineIRBuilderBase::buildIndirectDbgValue(
103 : unsigned Reg, const MDNode *Variable, const MDNode *Expr) {
104 : assert(isa<DILocalVariable>(Variable) && "not a variable");
105 : assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
106 : assert(
107 : cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
108 : "Expected inlined-at fields to agree");
109 : return insertInstr(BuildMI(getMF(), getDL(),
110 2 : getTII().get(TargetOpcode::DBG_VALUE),
111 4 : /*IsIndirect*/ true, Reg, Variable, Expr));
112 : }
113 :
114 : MachineInstrBuilder
115 0 : MachineIRBuilderBase::buildFIDbgValue(int FI, const MDNode *Variable,
116 : const MDNode *Expr) {
117 : assert(isa<DILocalVariable>(Variable) && "not a variable");
118 : assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
119 : assert(
120 : cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
121 : "Expected inlined-at fields to agree");
122 0 : return buildInstr(TargetOpcode::DBG_VALUE)
123 : .addFrameIndex(FI)
124 : .addImm(0)
125 : .addMetadata(Variable)
126 0 : .addMetadata(Expr);
127 : }
128 :
129 5 : MachineInstrBuilder MachineIRBuilderBase::buildConstDbgValue(
130 : const Constant &C, const MDNode *Variable, const MDNode *Expr) {
131 : assert(isa<DILocalVariable>(Variable) && "not a variable");
132 : assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
133 : assert(
134 : cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
135 : "Expected inlined-at fields to agree");
136 5 : auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
137 : if (auto *CI = dyn_cast<ConstantInt>(&C)) {
138 3 : if (CI->getBitWidth() > 64)
139 : MIB.addCImm(CI);
140 : else
141 3 : MIB.addImm(CI->getZExtValue());
142 : } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
143 : MIB.addFPImm(CFP);
144 : } else {
145 : // Insert %noreg if we didn't find a usable constant and had to drop it.
146 1 : MIB.addReg(0U);
147 : }
148 :
149 5 : return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
150 : }
151 :
152 0 : MachineInstrBuilder MachineIRBuilderBase::buildDbgLabel(const MDNode *Label) {
153 : assert(isa<DILabel>(Label) && "not a label");
154 : assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
155 : "Expected inlined-at fields to agree");
156 0 : auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
157 :
158 0 : return MIB.addMetadata(Label);
159 : }
160 :
161 358 : MachineInstrBuilder MachineIRBuilderBase::buildFrameIndex(unsigned Res,
162 : int Idx) {
163 : assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
164 358 : return buildInstr(TargetOpcode::G_FRAME_INDEX)
165 : .addDef(Res)
166 716 : .addFrameIndex(Idx);
167 : }
168 :
169 : MachineInstrBuilder
170 159 : MachineIRBuilderBase::buildGlobalValue(unsigned Res, const GlobalValue *GV) {
171 : assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
172 : assert(getMRI()->getType(Res).getAddressSpace() ==
173 : GV->getType()->getAddressSpace() &&
174 : "address space mismatch");
175 :
176 159 : return buildInstr(TargetOpcode::G_GLOBAL_VALUE)
177 : .addDef(Res)
178 318 : .addGlobalAddress(GV);
179 : }
180 :
181 415 : void MachineIRBuilderBase::validateBinaryOp(unsigned Res, unsigned Op0,
182 : unsigned Op1) {
183 : assert((getMRI()->getType(Res).isScalar() ||
184 : getMRI()->getType(Res).isVector()) &&
185 : "invalid operand type");
186 : assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
187 : getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
188 415 : }
189 :
190 596 : MachineInstrBuilder MachineIRBuilderBase::buildGEP(unsigned Res, unsigned Op0,
191 : unsigned Op1) {
192 : assert(getMRI()->getType(Res).isPointer() &&
193 : getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
194 : assert(getMRI()->getType(Op1).isScalar() && "invalid offset type");
195 :
196 596 : return buildInstr(TargetOpcode::G_GEP)
197 : .addDef(Res)
198 : .addUse(Op0)
199 1192 : .addUse(Op1);
200 : }
201 :
202 : Optional<MachineInstrBuilder>
203 812 : MachineIRBuilderBase::materializeGEP(unsigned &Res, unsigned Op0,
204 : const LLT &ValueTy, uint64_t Value) {
205 : assert(Res == 0 && "Res is a result argument");
206 : assert(ValueTy.isScalar() && "invalid offset type");
207 :
208 812 : if (Value == 0) {
209 549 : Res = Op0;
210 : return None;
211 : }
212 :
213 526 : Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
214 526 : unsigned TmpReg = getMRI()->createGenericVirtualRegister(ValueTy);
215 :
216 263 : buildConstant(TmpReg, Value);
217 263 : return buildGEP(Res, Op0, TmpReg);
218 : }
219 :
220 4 : MachineInstrBuilder MachineIRBuilderBase::buildPtrMask(unsigned Res,
221 : unsigned Op0,
222 : uint32_t NumBits) {
223 : assert(getMRI()->getType(Res).isPointer() &&
224 : getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
225 :
226 4 : return buildInstr(TargetOpcode::G_PTR_MASK)
227 : .addDef(Res)
228 : .addUse(Op0)
229 4 : .addImm(NumBits);
230 : }
231 :
232 69 : MachineInstrBuilder MachineIRBuilderBase::buildBr(MachineBasicBlock &Dest) {
233 69 : return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
234 : }
235 :
236 3 : MachineInstrBuilder MachineIRBuilderBase::buildBrIndirect(unsigned Tgt) {
237 : assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
238 3 : return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
239 : }
240 :
241 4238 : MachineInstrBuilder MachineIRBuilderBase::buildCopy(unsigned Res, unsigned Op) {
242 : assert(getMRI()->getType(Res) == LLT() || getMRI()->getType(Op) == LLT() ||
243 : getMRI()->getType(Res) == getMRI()->getType(Op));
244 4238 : return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op);
245 : }
246 :
247 : MachineInstrBuilder
248 1436 : MachineIRBuilderBase::buildConstant(unsigned Res, const ConstantInt &Val) {
249 1436 : LLT Ty = getMRI()->getType(Res);
250 :
251 : assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type");
252 :
253 : const ConstantInt *NewVal = &Val;
254 2872 : if (Ty.getSizeInBits() != Val.getBitWidth())
255 0 : NewVal = ConstantInt::get(getMF().getFunction().getContext(),
256 0 : Val.getValue().sextOrTrunc(Ty.getSizeInBits()));
257 :
258 1436 : return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal);
259 : }
260 :
261 984 : MachineInstrBuilder MachineIRBuilderBase::buildConstant(unsigned Res,
262 : int64_t Val) {
263 984 : auto IntN = IntegerType::get(getMF().getFunction().getContext(),
264 1968 : getMRI()->getType(Res).getSizeInBits());
265 984 : ConstantInt *CI = ConstantInt::get(IntN, Val, true);
266 984 : return buildConstant(Res, *CI);
267 : }
268 :
269 : MachineInstrBuilder
270 56 : MachineIRBuilderBase::buildFConstant(unsigned Res, const ConstantFP &Val) {
271 : assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
272 :
273 56 : return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val);
274 : }
275 :
276 3 : MachineInstrBuilder MachineIRBuilderBase::buildFConstant(unsigned Res,
277 : double Val) {
278 3 : LLT DstTy = getMRI()->getType(Res);
279 3 : auto &Ctx = getMF().getFunction().getContext();
280 : auto *CFP =
281 3 : ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getSizeInBits()));
282 3 : return buildFConstant(Res, *CFP);
283 : }
284 :
285 35 : MachineInstrBuilder MachineIRBuilderBase::buildBrCond(unsigned Tst,
286 : MachineBasicBlock &Dest) {
287 : assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
288 :
289 35 : return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
290 : }
291 :
292 808 : MachineInstrBuilder MachineIRBuilderBase::buildLoad(unsigned Res, unsigned Addr,
293 : MachineMemOperand &MMO) {
294 808 : return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
295 : }
296 :
297 : MachineInstrBuilder
298 808 : MachineIRBuilderBase::buildLoadInstr(unsigned Opcode, unsigned Res,
299 : unsigned Addr, MachineMemOperand &MMO) {
300 : assert(getMRI()->getType(Res).isValid() && "invalid operand type");
301 : assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
302 :
303 808 : return buildInstr(Opcode)
304 : .addDef(Res)
305 : .addUse(Addr)
306 1616 : .addMemOperand(&MMO);
307 : }
308 :
309 585 : MachineInstrBuilder MachineIRBuilderBase::buildStore(unsigned Val,
310 : unsigned Addr,
311 : MachineMemOperand &MMO) {
312 : assert(getMRI()->getType(Val).isValid() && "invalid operand type");
313 : assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
314 :
315 585 : return buildInstr(TargetOpcode::G_STORE)
316 : .addUse(Val)
317 : .addUse(Addr)
318 1170 : .addMemOperand(&MMO);
319 : }
320 :
321 9 : MachineInstrBuilder MachineIRBuilderBase::buildUAdde(unsigned Res,
322 : unsigned CarryOut,
323 : unsigned Op0, unsigned Op1,
324 : unsigned CarryIn) {
325 : assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
326 : assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
327 : getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
328 : assert(getMRI()->getType(CarryOut).isScalar() && "invalid operand type");
329 : assert(getMRI()->getType(CarryOut) == getMRI()->getType(CarryIn) &&
330 : "type mismatch");
331 :
332 9 : return buildInstr(TargetOpcode::G_UADDE)
333 : .addDef(Res)
334 : .addDef(CarryOut)
335 : .addUse(Op0)
336 : .addUse(Op1)
337 18 : .addUse(CarryIn);
338 : }
339 :
340 150 : MachineInstrBuilder MachineIRBuilderBase::buildAnyExt(unsigned Res,
341 : unsigned Op) {
342 150 : validateTruncExt(Res, Op, true);
343 150 : return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op);
344 : }
345 :
346 35 : MachineInstrBuilder MachineIRBuilderBase::buildSExt(unsigned Res, unsigned Op) {
347 35 : validateTruncExt(Res, Op, true);
348 35 : return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op);
349 : }
350 :
351 44 : MachineInstrBuilder MachineIRBuilderBase::buildZExt(unsigned Res, unsigned Op) {
352 44 : validateTruncExt(Res, Op, true);
353 44 : return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op);
354 : }
355 :
356 650 : MachineInstrBuilder MachineIRBuilderBase::buildExtOrTrunc(unsigned ExtOpc,
357 : unsigned Res,
358 : unsigned Op) {
359 : assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
360 : TargetOpcode::G_SEXT == ExtOpc) &&
361 : "Expecting Extending Opc");
362 : assert(getMRI()->getType(Res).isScalar() ||
363 : getMRI()->getType(Res).isVector());
364 : assert(getMRI()->getType(Res).isScalar() == getMRI()->getType(Op).isScalar());
365 :
366 : unsigned Opcode = TargetOpcode::COPY;
367 1300 : if (getMRI()->getType(Res).getSizeInBits() >
368 650 : getMRI()->getType(Op).getSizeInBits())
369 : Opcode = ExtOpc;
370 596 : else if (getMRI()->getType(Res).getSizeInBits() <
371 596 : getMRI()->getType(Op).getSizeInBits())
372 : Opcode = TargetOpcode::G_TRUNC;
373 : else
374 : assert(getMRI()->getType(Res) == getMRI()->getType(Op));
375 :
376 650 : return buildInstr(Opcode).addDef(Res).addUse(Op);
377 : }
378 :
379 5 : MachineInstrBuilder MachineIRBuilderBase::buildSExtOrTrunc(unsigned Res,
380 : unsigned Op) {
381 5 : return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
382 : }
383 :
384 4 : MachineInstrBuilder MachineIRBuilderBase::buildZExtOrTrunc(unsigned Res,
385 : unsigned Op) {
386 4 : return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
387 : }
388 :
389 641 : MachineInstrBuilder MachineIRBuilderBase::buildAnyExtOrTrunc(unsigned Res,
390 : unsigned Op) {
391 641 : return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
392 : }
393 :
394 28 : MachineInstrBuilder MachineIRBuilderBase::buildCast(unsigned Dst,
395 : unsigned Src) {
396 28 : LLT SrcTy = getMRI()->getType(Src);
397 : LLT DstTy = getMRI()->getType(Dst);
398 : if (SrcTy == DstTy)
399 9 : return buildCopy(Dst, Src);
400 :
401 : unsigned Opcode;
402 : if (SrcTy.isPointer() && DstTy.isScalar())
403 : Opcode = TargetOpcode::G_PTRTOINT;
404 : else if (DstTy.isPointer() && SrcTy.isScalar())
405 : Opcode = TargetOpcode::G_INTTOPTR;
406 : else {
407 : assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
408 : Opcode = TargetOpcode::G_BITCAST;
409 : }
410 :
411 38 : return buildInstr(Opcode).addDef(Dst).addUse(Src);
412 : }
413 :
414 : MachineInstrBuilder
415 372 : MachineIRBuilderBase::buildExtract(unsigned Res, unsigned Src, uint64_t Index) {
416 : #ifndef NDEBUG
417 : assert(getMRI()->getType(Src).isValid() && "invalid operand type");
418 : assert(getMRI()->getType(Res).isValid() && "invalid operand type");
419 : assert(Index + getMRI()->getType(Res).getSizeInBits() <=
420 : getMRI()->getType(Src).getSizeInBits() &&
421 : "extracting off end of register");
422 : #endif
423 :
424 744 : if (getMRI()->getType(Res).getSizeInBits() ==
425 372 : getMRI()->getType(Src).getSizeInBits()) {
426 : assert(Index == 0 && "insertion past the end of a register");
427 2 : return buildCast(Res, Src);
428 : }
429 :
430 370 : return buildInstr(TargetOpcode::G_EXTRACT)
431 : .addDef(Res)
432 : .addUse(Src)
433 370 : .addImm(Index);
434 : }
435 :
436 1 : void MachineIRBuilderBase::buildSequence(unsigned Res, ArrayRef<unsigned> Ops,
437 : ArrayRef<uint64_t> Indices) {
438 : #ifndef NDEBUG
439 : assert(Ops.size() == Indices.size() && "incompatible args");
440 : assert(!Ops.empty() && "invalid trivial sequence");
441 : assert(std::is_sorted(Indices.begin(), Indices.end()) &&
442 : "sequence offsets must be in ascending order");
443 :
444 : assert(getMRI()->getType(Res).isValid() && "invalid operand type");
445 : for (auto Op : Ops)
446 : assert(getMRI()->getType(Op).isValid() && "invalid operand type");
447 : #endif
448 :
449 1 : LLT ResTy = getMRI()->getType(Res);
450 1 : LLT OpTy = getMRI()->getType(Ops[0]);
451 1 : unsigned OpSize = OpTy.getSizeInBits();
452 : bool MaybeMerge = true;
453 5 : for (unsigned i = 0; i < Ops.size(); ++i) {
454 12 : if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
455 : MaybeMerge = false;
456 : break;
457 : }
458 : }
459 :
460 1 : if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
461 1 : buildMerge(Res, Ops);
462 1 : return;
463 : }
464 :
465 0 : unsigned ResIn = getMRI()->createGenericVirtualRegister(ResTy);
466 0 : buildUndef(ResIn);
467 :
468 0 : for (unsigned i = 0; i < Ops.size(); ++i) {
469 0 : unsigned ResOut = i + 1 == Ops.size()
470 0 : ? Res
471 0 : : getMRI()->createGenericVirtualRegister(ResTy);
472 0 : buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
473 : ResIn = ResOut;
474 : }
475 : }
476 :
477 127 : MachineInstrBuilder MachineIRBuilderBase::buildUndef(unsigned Res) {
478 127 : return buildInstr(TargetOpcode::G_IMPLICIT_DEF).addDef(Res);
479 : }
480 :
481 290 : MachineInstrBuilder MachineIRBuilderBase::buildMerge(unsigned Res,
482 : ArrayRef<unsigned> Ops) {
483 :
484 : #ifndef NDEBUG
485 : assert(!Ops.empty() && "invalid trivial sequence");
486 : LLT Ty = getMRI()->getType(Ops[0]);
487 : for (auto Reg : Ops)
488 : assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
489 : assert(Ops.size() * getMRI()->getType(Ops[0]).getSizeInBits() ==
490 : getMRI()->getType(Res).getSizeInBits() &&
491 : "input operands do not cover output register");
492 : #endif
493 :
494 290 : if (Ops.size() == 1)
495 7 : return buildCast(Res, Ops[0]);
496 :
497 283 : MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES);
498 : MIB.addDef(Res);
499 997 : for (unsigned i = 0; i < Ops.size(); ++i)
500 714 : MIB.addUse(Ops[i]);
501 283 : return MIB;
502 : }
503 :
504 335 : MachineInstrBuilder MachineIRBuilderBase::buildUnmerge(ArrayRef<unsigned> Res,
505 : unsigned Op) {
506 :
507 : #ifndef NDEBUG
508 : assert(!Res.empty() && "invalid trivial sequence");
509 : LLT Ty = getMRI()->getType(Res[0]);
510 : for (auto Reg : Res)
511 : assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
512 : assert(Res.size() * getMRI()->getType(Res[0]).getSizeInBits() ==
513 : getMRI()->getType(Op).getSizeInBits() &&
514 : "input operands do not cover output register");
515 : #endif
516 :
517 335 : MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES);
518 1136 : for (unsigned i = 0; i < Res.size(); ++i)
519 1602 : MIB.addDef(Res[i]);
520 : MIB.addUse(Op);
521 335 : return MIB;
522 : }
523 :
524 226 : MachineInstrBuilder MachineIRBuilderBase::buildInsert(unsigned Res,
525 : unsigned Src, unsigned Op,
526 : unsigned Index) {
527 : assert(Index + getMRI()->getType(Op).getSizeInBits() <=
528 : getMRI()->getType(Res).getSizeInBits() &&
529 : "insertion past the end of a register");
530 :
531 452 : if (getMRI()->getType(Res).getSizeInBits() ==
532 226 : getMRI()->getType(Op).getSizeInBits()) {
533 1 : return buildCast(Res, Op);
534 : }
535 :
536 225 : return buildInstr(TargetOpcode::G_INSERT)
537 : .addDef(Res)
538 : .addUse(Src)
539 : .addUse(Op)
540 225 : .addImm(Index);
541 : }
542 :
543 25 : MachineInstrBuilder MachineIRBuilderBase::buildIntrinsic(Intrinsic::ID ID,
544 : unsigned Res,
545 : bool HasSideEffects) {
546 : auto MIB =
547 : buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
548 40 : : TargetOpcode::G_INTRINSIC);
549 25 : if (Res)
550 : MIB.addDef(Res);
551 : MIB.addIntrinsicID(ID);
552 25 : return MIB;
553 : }
554 :
555 438 : MachineInstrBuilder MachineIRBuilderBase::buildTrunc(unsigned Res,
556 : unsigned Op) {
557 438 : validateTruncExt(Res, Op, false);
558 438 : return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op);
559 : }
560 :
561 1 : MachineInstrBuilder MachineIRBuilderBase::buildFPTrunc(unsigned Res,
562 : unsigned Op) {
563 1 : validateTruncExt(Res, Op, false);
564 1 : return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op);
565 : }
566 :
567 112 : MachineInstrBuilder MachineIRBuilderBase::buildICmp(CmpInst::Predicate Pred,
568 : unsigned Res, unsigned Op0,
569 : unsigned Op1) {
570 : #ifndef NDEBUG
571 : assert(getMRI()->getType(Op0) == getMRI()->getType(Op0) && "type mismatch");
572 : assert(CmpInst::isIntPredicate(Pred) && "invalid predicate");
573 : if (getMRI()->getType(Op0).isScalar() || getMRI()->getType(Op0).isPointer())
574 : assert(getMRI()->getType(Res).isScalar() && "type mismatch");
575 : else
576 : assert(getMRI()->getType(Res).isVector() &&
577 : getMRI()->getType(Res).getNumElements() ==
578 : getMRI()->getType(Op0).getNumElements() &&
579 : "type mismatch");
580 : #endif
581 :
582 112 : return buildInstr(TargetOpcode::G_ICMP)
583 : .addDef(Res)
584 : .addPredicate(Pred)
585 : .addUse(Op0)
586 224 : .addUse(Op1);
587 : }
588 :
589 7 : MachineInstrBuilder MachineIRBuilderBase::buildFCmp(CmpInst::Predicate Pred,
590 : unsigned Res, unsigned Op0,
591 : unsigned Op1) {
592 : #ifndef NDEBUG
593 : assert((getMRI()->getType(Op0).isScalar() ||
594 : getMRI()->getType(Op0).isVector()) &&
595 : "invalid operand type");
596 : assert(getMRI()->getType(Op0) == getMRI()->getType(Op1) && "type mismatch");
597 : assert(CmpInst::isFPPredicate(Pred) && "invalid predicate");
598 : if (getMRI()->getType(Op0).isScalar())
599 : assert(getMRI()->getType(Res).isScalar() && "type mismatch");
600 : else
601 : assert(getMRI()->getType(Res).isVector() &&
602 : getMRI()->getType(Res).getNumElements() ==
603 : getMRI()->getType(Op0).getNumElements() &&
604 : "type mismatch");
605 : #endif
606 :
607 7 : return buildInstr(TargetOpcode::G_FCMP)
608 : .addDef(Res)
609 : .addPredicate(Pred)
610 : .addUse(Op0)
611 14 : .addUse(Op1);
612 : }
613 :
614 8 : MachineInstrBuilder MachineIRBuilderBase::buildSelect(unsigned Res,
615 : unsigned Tst,
616 : unsigned Op0,
617 : unsigned Op1) {
618 : #ifndef NDEBUG
619 : LLT ResTy = getMRI()->getType(Res);
620 : assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
621 : "invalid operand type");
622 : assert(ResTy == getMRI()->getType(Op0) && ResTy == getMRI()->getType(Op1) &&
623 : "type mismatch");
624 : if (ResTy.isScalar() || ResTy.isPointer())
625 : assert(getMRI()->getType(Tst).isScalar() && "type mismatch");
626 : else
627 : assert((getMRI()->getType(Tst).isScalar() ||
628 : (getMRI()->getType(Tst).isVector() &&
629 : getMRI()->getType(Tst).getNumElements() ==
630 : getMRI()->getType(Op0).getNumElements())) &&
631 : "type mismatch");
632 : #endif
633 :
634 8 : return buildInstr(TargetOpcode::G_SELECT)
635 : .addDef(Res)
636 : .addUse(Tst)
637 : .addUse(Op0)
638 16 : .addUse(Op1);
639 : }
640 :
641 : MachineInstrBuilder
642 19 : MachineIRBuilderBase::buildInsertVectorElement(unsigned Res, unsigned Val,
643 : unsigned Elt, unsigned Idx) {
644 : #ifndef NDEBUG
645 : LLT ResTy = getMRI()->getType(Res);
646 : LLT ValTy = getMRI()->getType(Val);
647 : LLT EltTy = getMRI()->getType(Elt);
648 : LLT IdxTy = getMRI()->getType(Idx);
649 : assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type");
650 : assert(IdxTy.isScalar() && "invalid operand type");
651 : assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch");
652 : assert(ResTy.getElementType() == EltTy && "type mismatch");
653 : #endif
654 :
655 19 : return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT)
656 : .addDef(Res)
657 : .addUse(Val)
658 : .addUse(Elt)
659 38 : .addUse(Idx);
660 : }
661 :
662 : MachineInstrBuilder
663 22 : MachineIRBuilderBase::buildExtractVectorElement(unsigned Res, unsigned Val,
664 : unsigned Idx) {
665 : #ifndef NDEBUG
666 : LLT ResTy = getMRI()->getType(Res);
667 : LLT ValTy = getMRI()->getType(Val);
668 : LLT IdxTy = getMRI()->getType(Idx);
669 : assert(ValTy.isVector() && "invalid operand type");
670 : assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type");
671 : assert(IdxTy.isScalar() && "invalid operand type");
672 : assert(ValTy.getElementType() == ResTy && "type mismatch");
673 : #endif
674 :
675 22 : return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT)
676 : .addDef(Res)
677 : .addUse(Val)
678 44 : .addUse(Idx);
679 : }
680 :
681 3 : MachineInstrBuilder MachineIRBuilderBase::buildAtomicCmpXchgWithSuccess(
682 : unsigned OldValRes, unsigned SuccessRes, unsigned Addr, unsigned CmpVal,
683 : unsigned NewVal, MachineMemOperand &MMO) {
684 : #ifndef NDEBUG
685 : LLT OldValResTy = getMRI()->getType(OldValRes);
686 : LLT SuccessResTy = getMRI()->getType(SuccessRes);
687 : LLT AddrTy = getMRI()->getType(Addr);
688 : LLT CmpValTy = getMRI()->getType(CmpVal);
689 : LLT NewValTy = getMRI()->getType(NewVal);
690 : assert(OldValResTy.isScalar() && "invalid operand type");
691 : assert(SuccessResTy.isScalar() && "invalid operand type");
692 : assert(AddrTy.isPointer() && "invalid operand type");
693 : assert(CmpValTy.isValid() && "invalid operand type");
694 : assert(NewValTy.isValid() && "invalid operand type");
695 : assert(OldValResTy == CmpValTy && "type mismatch");
696 : assert(OldValResTy == NewValTy && "type mismatch");
697 : #endif
698 :
699 3 : return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
700 : .addDef(OldValRes)
701 : .addDef(SuccessRes)
702 : .addUse(Addr)
703 : .addUse(CmpVal)
704 : .addUse(NewVal)
705 6 : .addMemOperand(&MMO);
706 : }
707 :
708 : MachineInstrBuilder
709 2 : MachineIRBuilderBase::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr,
710 : unsigned CmpVal, unsigned NewVal,
711 : MachineMemOperand &MMO) {
712 : #ifndef NDEBUG
713 : LLT OldValResTy = getMRI()->getType(OldValRes);
714 : LLT AddrTy = getMRI()->getType(Addr);
715 : LLT CmpValTy = getMRI()->getType(CmpVal);
716 : LLT NewValTy = getMRI()->getType(NewVal);
717 : assert(OldValResTy.isScalar() && "invalid operand type");
718 : assert(AddrTy.isPointer() && "invalid operand type");
719 : assert(CmpValTy.isValid() && "invalid operand type");
720 : assert(NewValTy.isValid() && "invalid operand type");
721 : assert(OldValResTy == CmpValTy && "type mismatch");
722 : assert(OldValResTy == NewValTy && "type mismatch");
723 : #endif
724 :
725 2 : return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
726 : .addDef(OldValRes)
727 : .addUse(Addr)
728 : .addUse(CmpVal)
729 : .addUse(NewVal)
730 4 : .addMemOperand(&MMO);
731 : }
732 :
733 : MachineInstrBuilder
734 11 : MachineIRBuilderBase::buildAtomicRMW(unsigned Opcode, unsigned OldValRes,
735 : unsigned Addr, unsigned Val,
736 : MachineMemOperand &MMO) {
737 : #ifndef NDEBUG
738 : LLT OldValResTy = getMRI()->getType(OldValRes);
739 : LLT AddrTy = getMRI()->getType(Addr);
740 : LLT ValTy = getMRI()->getType(Val);
741 : assert(OldValResTy.isScalar() && "invalid operand type");
742 : assert(AddrTy.isPointer() && "invalid operand type");
743 : assert(ValTy.isValid() && "invalid operand type");
744 : assert(OldValResTy == ValTy && "type mismatch");
745 : #endif
746 :
747 11 : return buildInstr(Opcode)
748 : .addDef(OldValRes)
749 : .addUse(Addr)
750 : .addUse(Val)
751 22 : .addMemOperand(&MMO);
752 : }
753 :
754 : MachineInstrBuilder
755 0 : MachineIRBuilderBase::buildAtomicRMWXchg(unsigned OldValRes, unsigned Addr,
756 : unsigned Val, MachineMemOperand &MMO) {
757 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
758 0 : MMO);
759 : }
760 : MachineInstrBuilder
761 0 : MachineIRBuilderBase::buildAtomicRMWAdd(unsigned OldValRes, unsigned Addr,
762 : unsigned Val, MachineMemOperand &MMO) {
763 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
764 0 : MMO);
765 : }
766 : MachineInstrBuilder
767 0 : MachineIRBuilderBase::buildAtomicRMWSub(unsigned OldValRes, unsigned Addr,
768 : unsigned Val, MachineMemOperand &MMO) {
769 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
770 0 : MMO);
771 : }
772 : MachineInstrBuilder
773 0 : MachineIRBuilderBase::buildAtomicRMWAnd(unsigned OldValRes, unsigned Addr,
774 : unsigned Val, MachineMemOperand &MMO) {
775 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
776 0 : MMO);
777 : }
778 : MachineInstrBuilder
779 0 : MachineIRBuilderBase::buildAtomicRMWNand(unsigned OldValRes, unsigned Addr,
780 : unsigned Val, MachineMemOperand &MMO) {
781 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
782 0 : MMO);
783 : }
784 : MachineInstrBuilder
785 0 : MachineIRBuilderBase::buildAtomicRMWOr(unsigned OldValRes, unsigned Addr,
786 : unsigned Val, MachineMemOperand &MMO) {
787 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
788 0 : MMO);
789 : }
790 : MachineInstrBuilder
791 0 : MachineIRBuilderBase::buildAtomicRMWXor(unsigned OldValRes, unsigned Addr,
792 : unsigned Val, MachineMemOperand &MMO) {
793 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
794 0 : MMO);
795 : }
796 : MachineInstrBuilder
797 0 : MachineIRBuilderBase::buildAtomicRMWMax(unsigned OldValRes, unsigned Addr,
798 : unsigned Val, MachineMemOperand &MMO) {
799 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
800 0 : MMO);
801 : }
802 : MachineInstrBuilder
803 0 : MachineIRBuilderBase::buildAtomicRMWMin(unsigned OldValRes, unsigned Addr,
804 : unsigned Val, MachineMemOperand &MMO) {
805 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
806 0 : MMO);
807 : }
808 : MachineInstrBuilder
809 0 : MachineIRBuilderBase::buildAtomicRMWUmax(unsigned OldValRes, unsigned Addr,
810 : unsigned Val, MachineMemOperand &MMO) {
811 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
812 0 : MMO);
813 : }
814 : MachineInstrBuilder
815 0 : MachineIRBuilderBase::buildAtomicRMWUmin(unsigned OldValRes, unsigned Addr,
816 : unsigned Val, MachineMemOperand &MMO) {
817 : return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
818 0 : MMO);
819 : }
820 :
821 : MachineInstrBuilder
822 1 : MachineIRBuilderBase::buildBlockAddress(unsigned Res, const BlockAddress *BA) {
823 : #ifndef NDEBUG
824 : assert(getMRI()->getType(Res).isPointer() && "invalid res type");
825 : #endif
826 :
827 1 : return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
828 : }
829 :
830 668 : void MachineIRBuilderBase::validateTruncExt(unsigned Dst, unsigned Src,
831 : bool IsExtend) {
832 : #ifndef NDEBUG
833 : LLT SrcTy = getMRI()->getType(Src);
834 : LLT DstTy = getMRI()->getType(Dst);
835 :
836 : if (DstTy.isVector()) {
837 : assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
838 : assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
839 : "different number of elements in a trunc/ext");
840 : } else
841 : assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
842 :
843 : if (IsExtend)
844 : assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
845 : "invalid narrowing extend");
846 : else
847 : assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
848 : "invalid widening trunc");
849 : #endif
850 668 : }
|