Bug Summary

File:build/source/bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp
Warning:line 708, column 17
Assigned value is garbage or undefined

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name AArch64MCPlusBuilder.cpp -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model pic -pic-level 2 -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -ffunction-sections -fdata-sections -fcoverage-compilation-dir=/build/source/build-llvm/tools/clang/stage2-bins -resource-dir /usr/lib/llvm-16/lib/clang/16.0.0 -D _DEBUG -D _GNU_SOURCE -D __STDC_CONSTANT_MACROS -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -I tools/bolt/lib/Target/AArch64 -I /build/source/bolt/lib/Target/AArch64 -I include -I /build/source/llvm/include -I /build/source/bolt/include -I tools/bolt/include -I /build/source/llvm/lib/Target/AArch64 -I lib/Target/AArch64 -D _FORTIFY_SOURCE=2 -D NDEBUG -U NDEBUG -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/c++/10 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/x86_64-linux-gnu/c++/10 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/c++/10/backward -internal-isystem /usr/lib/llvm-16/lib/clang/16.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../x86_64-linux-gnu/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -fmacro-prefix-map=/build/source/build-llvm/tools/clang/stage2-bins=build-llvm/tools/clang/stage2-bins -fmacro-prefix-map=/build/source/= -fcoverage-prefix-map=/build/source/build-llvm/tools/clang/stage2-bins=build-llvm/tools/clang/stage2-bins -fcoverage-prefix-map=/build/source/= -source-date-epoch 1668078801 -O2 -Wno-unused-command-line-argument -Wno-unused-parameter -Wwrite-strings -Wno-missing-field-initializers -Wno-long-long -Wno-maybe-uninitialized -Wno-class-memaccess -Wno-redundant-move -Wno-pessimizing-move -Wno-noexcept-type -Wno-comment -Wno-misleading-indentation -std=c++17 -fdeprecated-macro -fdebug-compilation-dir=/build/source/build-llvm/tools/clang/stage2-bins -fdebug-prefix-map=/build/source/build-llvm/tools/clang/stage2-bins=build-llvm/tools/clang/stage2-bins -fdebug-prefix-map=/build/source/= -ferror-limit 19 -fvisibility-inlines-hidden -stack-protector 2 -fgnuc-version=4.2.1 -fcolor-diagnostics -vectorize-loops -vectorize-slp -analyzer-output=html -analyzer-config stable-report-filename=true -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /tmp/scan-build-2022-11-10-135928-647445-1 -x c++ /build/source/bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp
1//===- bolt/Target/AArch64/AArch64MCPlusBuilder.cpp -----------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file provides AArch64-specific MCPlus builder.
10//
11//===----------------------------------------------------------------------===//
12
13#include "MCTargetDesc/AArch64AddressingModes.h"
14#include "MCTargetDesc/AArch64MCExpr.h"
15#include "MCTargetDesc/AArch64MCTargetDesc.h"
16#include "Utils/AArch64BaseInfo.h"
17#include "bolt/Core/MCPlusBuilder.h"
18#include "llvm/BinaryFormat/ELF.h"
19#include "llvm/MC/MCInstrInfo.h"
20#include "llvm/MC/MCRegisterInfo.h"
21#include "llvm/Support/Debug.h"
22#include "llvm/Support/ErrorHandling.h"
23
24#define DEBUG_TYPE"mcplus" "mcplus"
25
26using namespace llvm;
27using namespace bolt;
28
29namespace {
30
31class AArch64MCPlusBuilder : public MCPlusBuilder {
32public:
33 AArch64MCPlusBuilder(const MCInstrAnalysis *Analysis, const MCInstrInfo *Info,
34 const MCRegisterInfo *RegInfo)
35 : MCPlusBuilder(Analysis, Info, RegInfo) {}
36
37 bool equals(const MCTargetExpr &A, const MCTargetExpr &B,
38 CompFuncTy Comp) const override {
39 const auto &AArch64ExprA = cast<AArch64MCExpr>(A);
40 const auto &AArch64ExprB = cast<AArch64MCExpr>(B);
41 if (AArch64ExprA.getKind() != AArch64ExprB.getKind())
42 return false;
43
44 return MCPlusBuilder::equals(*AArch64ExprA.getSubExpr(),
45 *AArch64ExprB.getSubExpr(), Comp);
46 }
47
48 bool hasEVEXEncoding(const MCInst &) const override { return false; }
49
50 bool isMacroOpFusionPair(ArrayRef<MCInst> Insts) const override {
51 return false;
52 }
53
54 bool shortenInstruction(MCInst &, const MCSubtargetInfo &) const override {
55 return false;
56 }
57
58 bool isADRP(const MCInst &Inst) const override {
59 return Inst.getOpcode() == AArch64::ADRP;
60 }
61
62 bool isADR(const MCInst &Inst) const override {
63 return Inst.getOpcode() == AArch64::ADR;
64 }
65
66 void getADRReg(const MCInst &Inst, MCPhysReg &RegName) const override {
67 assert((isADR(Inst) || isADRP(Inst)) && "Not an ADR instruction")(static_cast <bool> ((isADR(Inst) || isADRP(Inst)) &&
"Not an ADR instruction") ? void (0) : __assert_fail ("(isADR(Inst) || isADRP(Inst)) && \"Not an ADR instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 67, __extension__
__PRETTY_FUNCTION__))
;
68 assert(MCPlus::getNumPrimeOperands(Inst) != 0 &&(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) !=
0 && "No operands for ADR instruction") ? void (0) :
__assert_fail ("MCPlus::getNumPrimeOperands(Inst) != 0 && \"No operands for ADR instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 69, __extension__
__PRETTY_FUNCTION__))
69 "No operands for ADR instruction")(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) !=
0 && "No operands for ADR instruction") ? void (0) :
__assert_fail ("MCPlus::getNumPrimeOperands(Inst) != 0 && \"No operands for ADR instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 69, __extension__
__PRETTY_FUNCTION__))
;
70 assert(Inst.getOperand(0).isReg() &&(static_cast <bool> (Inst.getOperand(0).isReg() &&
"Unexpected operand in ADR instruction") ? void (0) : __assert_fail
("Inst.getOperand(0).isReg() && \"Unexpected operand in ADR instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 71, __extension__
__PRETTY_FUNCTION__))
71 "Unexpected operand in ADR instruction")(static_cast <bool> (Inst.getOperand(0).isReg() &&
"Unexpected operand in ADR instruction") ? void (0) : __assert_fail
("Inst.getOperand(0).isReg() && \"Unexpected operand in ADR instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 71, __extension__
__PRETTY_FUNCTION__))
;
72 RegName = Inst.getOperand(0).getReg();
73 }
74
75 bool isTB(const MCInst &Inst) const {
76 return (Inst.getOpcode() == AArch64::TBNZW ||
77 Inst.getOpcode() == AArch64::TBNZX ||
78 Inst.getOpcode() == AArch64::TBZW ||
79 Inst.getOpcode() == AArch64::TBZX);
80 }
81
82 bool isCB(const MCInst &Inst) const {
83 return (Inst.getOpcode() == AArch64::CBNZW ||
84 Inst.getOpcode() == AArch64::CBNZX ||
85 Inst.getOpcode() == AArch64::CBZW ||
86 Inst.getOpcode() == AArch64::CBZX);
87 }
88
89 bool isMOVW(const MCInst &Inst) const {
90 return (Inst.getOpcode() == AArch64::MOVKWi ||
91 Inst.getOpcode() == AArch64::MOVKXi ||
92 Inst.getOpcode() == AArch64::MOVNWi ||
93 Inst.getOpcode() == AArch64::MOVNXi ||
94 Inst.getOpcode() == AArch64::MOVZXi ||
95 Inst.getOpcode() == AArch64::MOVZWi);
96 }
97
98 bool isADD(const MCInst &Inst) const {
99 return (Inst.getOpcode() == AArch64::ADDSWri ||
100 Inst.getOpcode() == AArch64::ADDSWrr ||
101 Inst.getOpcode() == AArch64::ADDSWrs ||
102 Inst.getOpcode() == AArch64::ADDSWrx ||
103 Inst.getOpcode() == AArch64::ADDSXri ||
104 Inst.getOpcode() == AArch64::ADDSXrr ||
105 Inst.getOpcode() == AArch64::ADDSXrs ||
106 Inst.getOpcode() == AArch64::ADDSXrx ||
107 Inst.getOpcode() == AArch64::ADDSXrx64 ||
108 Inst.getOpcode() == AArch64::ADDWri ||
109 Inst.getOpcode() == AArch64::ADDWrr ||
110 Inst.getOpcode() == AArch64::ADDWrs ||
111 Inst.getOpcode() == AArch64::ADDWrx ||
112 Inst.getOpcode() == AArch64::ADDXri ||
113 Inst.getOpcode() == AArch64::ADDXrr ||
114 Inst.getOpcode() == AArch64::ADDXrs ||
115 Inst.getOpcode() == AArch64::ADDXrx ||
116 Inst.getOpcode() == AArch64::ADDXrx64);
117 }
118
119 bool isLDRB(const MCInst &Inst) const {
120 return (Inst.getOpcode() == AArch64::LDRBBpost ||
121 Inst.getOpcode() == AArch64::LDRBBpre ||
122 Inst.getOpcode() == AArch64::LDRBBroW ||
123 Inst.getOpcode() == AArch64::LDRBBroX ||
124 Inst.getOpcode() == AArch64::LDRBBui ||
125 Inst.getOpcode() == AArch64::LDRSBWpost ||
126 Inst.getOpcode() == AArch64::LDRSBWpre ||
127 Inst.getOpcode() == AArch64::LDRSBWroW ||
128 Inst.getOpcode() == AArch64::LDRSBWroX ||
129 Inst.getOpcode() == AArch64::LDRSBWui ||
130 Inst.getOpcode() == AArch64::LDRSBXpost ||
131 Inst.getOpcode() == AArch64::LDRSBXpre ||
132 Inst.getOpcode() == AArch64::LDRSBXroW ||
133 Inst.getOpcode() == AArch64::LDRSBXroX ||
134 Inst.getOpcode() == AArch64::LDRSBXui);
135 }
136
137 bool isLDRH(const MCInst &Inst) const {
138 return (Inst.getOpcode() == AArch64::LDRHHpost ||
139 Inst.getOpcode() == AArch64::LDRHHpre ||
140 Inst.getOpcode() == AArch64::LDRHHroW ||
141 Inst.getOpcode() == AArch64::LDRHHroX ||
142 Inst.getOpcode() == AArch64::LDRHHui ||
143 Inst.getOpcode() == AArch64::LDRSHWpost ||
144 Inst.getOpcode() == AArch64::LDRSHWpre ||
145 Inst.getOpcode() == AArch64::LDRSHWroW ||
146 Inst.getOpcode() == AArch64::LDRSHWroX ||
147 Inst.getOpcode() == AArch64::LDRSHWui ||
148 Inst.getOpcode() == AArch64::LDRSHXpost ||
149 Inst.getOpcode() == AArch64::LDRSHXpre ||
150 Inst.getOpcode() == AArch64::LDRSHXroW ||
151 Inst.getOpcode() == AArch64::LDRSHXroX ||
152 Inst.getOpcode() == AArch64::LDRSHXui);
153 }
154
155 bool isLDRW(const MCInst &Inst) const {
156 return (Inst.getOpcode() == AArch64::LDRWpost ||
157 Inst.getOpcode() == AArch64::LDRWpre ||
158 Inst.getOpcode() == AArch64::LDRWroW ||
159 Inst.getOpcode() == AArch64::LDRWroX ||
160 Inst.getOpcode() == AArch64::LDRWui);
161 }
162
163 bool isLDRX(const MCInst &Inst) const {
164 return (Inst.getOpcode() == AArch64::LDRXpost ||
165 Inst.getOpcode() == AArch64::LDRXpre ||
166 Inst.getOpcode() == AArch64::LDRXroW ||
167 Inst.getOpcode() == AArch64::LDRXroX ||
168 Inst.getOpcode() == AArch64::LDRXui);
169 }
170
171 bool isLoad(const MCInst &Inst) const override {
172 return isLDRB(Inst) || isLDRH(Inst) || isLDRW(Inst) || isLDRX(Inst);
173 }
174
175 bool isLoadFromStack(const MCInst &Inst) const {
176 if (!isLoad(Inst))
177 return false;
178 const MCInstrDesc &InstInfo = Info->get(Inst.getOpcode());
179 unsigned NumDefs = InstInfo.getNumDefs();
180 for (unsigned I = NumDefs, E = InstInfo.getNumOperands(); I < E; ++I) {
181 const MCOperand &Operand = Inst.getOperand(I);
182 if (!Operand.isReg())
183 continue;
184 unsigned Reg = Operand.getReg();
185 if (Reg == AArch64::SP || Reg == AArch64::WSP || Reg == AArch64::FP ||
186 Reg == AArch64::W29)
187 return true;
188 }
189 return false;
190 }
191
192 bool isRegToRegMove(const MCInst &Inst, MCPhysReg &From,
193 MCPhysReg &To) const override {
194 if (Inst.getOpcode() != AArch64::ORRXrs)
195 return false;
196 if (Inst.getOperand(1).getReg() != AArch64::XZR)
197 return false;
198 if (Inst.getOperand(3).getImm() != 0)
199 return false;
200 From = Inst.getOperand(2).getReg();
201 To = Inst.getOperand(0).getReg();
202 return true;
203 }
204
205 bool isIndirectCall(const MCInst &Inst) const override {
206 return Inst.getOpcode() == AArch64::BLR;
207 }
208
209 bool hasPCRelOperand(const MCInst &Inst) const override {
210 // ADRP is blacklisted and is an exception. Even though it has a
211 // PC-relative operand, this operand is not a complete symbol reference
212 // and BOLT shouldn't try to process it in isolation.
213 if (isADRP(Inst))
214 return false;
215
216 if (isADR(Inst))
217 return true;
218
219 // Look for literal addressing mode (see C1-143 ARM DDI 0487B.a)
220 const MCInstrDesc &MCII = Info->get(Inst.getOpcode());
221 for (unsigned I = 0, E = MCII.getNumOperands(); I != E; ++I)
222 if (MCII.OpInfo[I].OperandType == MCOI::OPERAND_PCREL)
223 return true;
224
225 return false;
226 }
227
228 bool evaluateADR(const MCInst &Inst, int64_t &Imm,
229 const MCExpr **DispExpr) const {
230 assert((isADR(Inst) || isADRP(Inst)) && "Not an ADR instruction")(static_cast <bool> ((isADR(Inst) || isADRP(Inst)) &&
"Not an ADR instruction") ? void (0) : __assert_fail ("(isADR(Inst) || isADRP(Inst)) && \"Not an ADR instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 230, __extension__
__PRETTY_FUNCTION__))
;
231
232 const MCOperand &Label = Inst.getOperand(1);
233 if (!Label.isImm()) {
234 assert(Label.isExpr() && "Unexpected ADR operand")(static_cast <bool> (Label.isExpr() && "Unexpected ADR operand"
) ? void (0) : __assert_fail ("Label.isExpr() && \"Unexpected ADR operand\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 234, __extension__
__PRETTY_FUNCTION__))
;
235 assert(DispExpr && "DispExpr must be set")(static_cast <bool> (DispExpr && "DispExpr must be set"
) ? void (0) : __assert_fail ("DispExpr && \"DispExpr must be set\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 235, __extension__
__PRETTY_FUNCTION__))
;
236 *DispExpr = Label.getExpr();
237 return false;
238 }
239
240 if (Inst.getOpcode() == AArch64::ADR) {
241 Imm = Label.getImm();
242 return true;
243 }
244 Imm = Label.getImm() << 12;
245 return true;
246 }
247
248 bool evaluateAArch64MemoryOperand(const MCInst &Inst, int64_t &DispImm,
249 const MCExpr **DispExpr = nullptr) const {
250 if (isADR(Inst) || isADRP(Inst))
251 return evaluateADR(Inst, DispImm, DispExpr);
252
253 // Literal addressing mode
254 const MCInstrDesc &MCII = Info->get(Inst.getOpcode());
255 for (unsigned I = 0, E = MCII.getNumOperands(); I != E; ++I) {
256 if (MCII.OpInfo[I].OperandType != MCOI::OPERAND_PCREL)
257 continue;
258
259 if (!Inst.getOperand(I).isImm()) {
260 assert(Inst.getOperand(I).isExpr() && "Unexpected PCREL operand")(static_cast <bool> (Inst.getOperand(I).isExpr() &&
"Unexpected PCREL operand") ? void (0) : __assert_fail ("Inst.getOperand(I).isExpr() && \"Unexpected PCREL operand\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 260, __extension__
__PRETTY_FUNCTION__))
;
261 assert(DispExpr && "DispExpr must be set")(static_cast <bool> (DispExpr && "DispExpr must be set"
) ? void (0) : __assert_fail ("DispExpr && \"DispExpr must be set\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 261, __extension__
__PRETTY_FUNCTION__))
;
262 *DispExpr = Inst.getOperand(I).getExpr();
263 return true;
264 }
265
266 DispImm = Inst.getOperand(I).getImm() << 2;
267 return true;
268 }
269 return false;
270 }
271
272 bool evaluateMemOperandTarget(const MCInst &Inst, uint64_t &Target,
273 uint64_t Address,
274 uint64_t Size) const override {
275 int64_t DispValue;
276 const MCExpr *DispExpr = nullptr;
277 if (!evaluateAArch64MemoryOperand(Inst, DispValue, &DispExpr))
278 return false;
279
280 // Make sure it's a well-formed addressing we can statically evaluate.
281 if (DispExpr)
282 return false;
283
284 Target = DispValue;
285 if (Inst.getOpcode() == AArch64::ADRP)
286 Target += Address & ~0xFFFULL;
287 else
288 Target += Address;
289 return true;
290 }
291
292 MCInst::iterator getMemOperandDisp(MCInst &Inst) const override {
293 MCInst::iterator OI = Inst.begin();
294 if (isADR(Inst) || isADRP(Inst)) {
295 assert(MCPlus::getNumPrimeOperands(Inst) >= 2 &&(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
2 && "Unexpected number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 2 && \"Unexpected number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 296, __extension__
__PRETTY_FUNCTION__))
296 "Unexpected number of operands")(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
2 && "Unexpected number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 2 && \"Unexpected number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 296, __extension__
__PRETTY_FUNCTION__))
;
297 return ++OI;
298 }
299 const MCInstrDesc &MCII = Info->get(Inst.getOpcode());
300 for (unsigned I = 0, E = MCII.getNumOperands(); I != E; ++I) {
301 if (MCII.OpInfo[I].OperandType == MCOI::OPERAND_PCREL)
302 break;
303 ++OI;
304 }
305 assert(OI != Inst.end() && "Literal operand not found")(static_cast <bool> (OI != Inst.end() && "Literal operand not found"
) ? void (0) : __assert_fail ("OI != Inst.end() && \"Literal operand not found\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 305, __extension__
__PRETTY_FUNCTION__))
;
306 return OI;
307 }
308
309 bool replaceMemOperandDisp(MCInst &Inst, MCOperand Operand) const override {
310 MCInst::iterator OI = getMemOperandDisp(Inst);
311 *OI = Operand;
312 return true;
313 }
314
315 const MCExpr *getTargetExprFor(MCInst &Inst, const MCExpr *Expr,
316 MCContext &Ctx,
317 uint64_t RelType) const override {
318
319 if (isADR(Inst) || RelType == ELF::R_AARCH64_ADR_PREL_LO21 ||
320 RelType == ELF::R_AARCH64_TLSDESC_ADR_PREL21) {
321 return AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS, Ctx);
322 } else if (isADRP(Inst) || RelType == ELF::R_AARCH64_ADR_PREL_PG_HI21 ||
323 RelType == ELF::R_AARCH64_ADR_PREL_PG_HI21_NC ||
324 RelType == ELF::R_AARCH64_TLSDESC_ADR_PAGE21 ||
325 RelType == ELF::R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21 ||
326 RelType == ELF::R_AARCH64_ADR_GOT_PAGE) {
327 // Never emit a GOT reloc, we handled this in
328 // RewriteInstance::readRelocations().
329 return AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS_PAGE, Ctx);
330 } else {
331 switch (RelType) {
332 case ELF::R_AARCH64_ADD_ABS_LO12_NC:
333 case ELF::R_AARCH64_LD64_GOT_LO12_NC:
334 case ELF::R_AARCH64_LDST8_ABS_LO12_NC:
335 case ELF::R_AARCH64_LDST16_ABS_LO12_NC:
336 case ELF::R_AARCH64_LDST32_ABS_LO12_NC:
337 case ELF::R_AARCH64_LDST64_ABS_LO12_NC:
338 case ELF::R_AARCH64_LDST128_ABS_LO12_NC:
339 case ELF::R_AARCH64_TLSDESC_ADD_LO12:
340 case ELF::R_AARCH64_TLSDESC_LD64_LO12:
341 case ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC:
342 case ELF::R_AARCH64_TLSLE_ADD_TPREL_LO12_NC:
343 return AArch64MCExpr::create(Expr, AArch64MCExpr::VK_LO12, Ctx);
344 case ELF::R_AARCH64_MOVW_UABS_G3:
345 return AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS_G3, Ctx);
346 case ELF::R_AARCH64_MOVW_UABS_G2:
347 case ELF::R_AARCH64_MOVW_UABS_G2_NC:
348 return AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS_G2_NC, Ctx);
349 case ELF::R_AARCH64_MOVW_UABS_G1:
350 case ELF::R_AARCH64_MOVW_UABS_G1_NC:
351 return AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS_G1_NC, Ctx);
352 case ELF::R_AARCH64_MOVW_UABS_G0:
353 case ELF::R_AARCH64_MOVW_UABS_G0_NC:
354 return AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS_G0_NC, Ctx);
355 default:
356 break;
357 }
358 }
359 return Expr;
360 }
361
362 bool getSymbolRefOperandNum(const MCInst &Inst, unsigned &OpNum) const {
363 if (OpNum >= MCPlus::getNumPrimeOperands(Inst))
364 return false;
365
366 // Auto-select correct operand number
367 if (OpNum == 0) {
368 if (isConditionalBranch(Inst) || isADR(Inst) || isADRP(Inst))
369 OpNum = 1;
370 if (isTB(Inst))
371 OpNum = 2;
372 if (isMOVW(Inst))
373 OpNum = 1;
374 }
375
376 return true;
377 }
378
379 const MCSymbol *getTargetSymbol(const MCExpr *Expr) const override {
380 auto *AArchExpr = dyn_cast<AArch64MCExpr>(Expr);
381 if (AArchExpr && AArchExpr->getSubExpr())
382 return getTargetSymbol(AArchExpr->getSubExpr());
383
384 auto *BinExpr = dyn_cast<MCBinaryExpr>(Expr);
385 if (BinExpr)
386 return getTargetSymbol(BinExpr->getLHS());
387
388 auto *SymExpr = dyn_cast<MCSymbolRefExpr>(Expr);
389 if (SymExpr && SymExpr->getKind() == MCSymbolRefExpr::VK_None)
390 return &SymExpr->getSymbol();
391
392 return nullptr;
393 }
394
395 const MCSymbol *getTargetSymbol(const MCInst &Inst,
396 unsigned OpNum = 0) const override {
397 if (!getSymbolRefOperandNum(Inst, OpNum))
398 return nullptr;
399
400 const MCOperand &Op = Inst.getOperand(OpNum);
401 if (!Op.isExpr())
402 return nullptr;
403
404 return getTargetSymbol(Op.getExpr());
405 }
406
407 int64_t getTargetAddend(const MCExpr *Expr) const override {
408 auto *AArchExpr = dyn_cast<AArch64MCExpr>(Expr);
409 if (AArchExpr && AArchExpr->getSubExpr())
410 return getTargetAddend(AArchExpr->getSubExpr());
411
412 auto *BinExpr = dyn_cast<MCBinaryExpr>(Expr);
413 if (BinExpr && BinExpr->getOpcode() == MCBinaryExpr::Add)
414 return getTargetAddend(BinExpr->getRHS());
415
416 auto *ConstExpr = dyn_cast<MCConstantExpr>(Expr);
417 if (ConstExpr)
418 return ConstExpr->getValue();
419
420 return 0;
421 }
422
423 int64_t getTargetAddend(const MCInst &Inst,
424 unsigned OpNum = 0) const override {
425 if (!getSymbolRefOperandNum(Inst, OpNum))
426 return 0;
427
428 const MCOperand &Op = Inst.getOperand(OpNum);
429 if (!Op.isExpr())
430 return 0;
431
432 return getTargetAddend(Op.getExpr());
433 }
434
435 bool evaluateBranch(const MCInst &Inst, uint64_t Addr, uint64_t Size,
436 uint64_t &Target) const override {
437 size_t OpNum = 0;
438
439 if (isConditionalBranch(Inst)) {
440 assert(MCPlus::getNumPrimeOperands(Inst) >= 2 &&(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
2 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 2 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 441, __extension__
__PRETTY_FUNCTION__))
441 "Invalid number of operands")(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
2 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 2 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 441, __extension__
__PRETTY_FUNCTION__))
;
442 OpNum = 1;
443 }
444
445 if (isTB(Inst)) {
446 assert(MCPlus::getNumPrimeOperands(Inst) >= 3 &&(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
3 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 3 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 447, __extension__
__PRETTY_FUNCTION__))
447 "Invalid number of operands")(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
3 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 3 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 447, __extension__
__PRETTY_FUNCTION__))
;
448 OpNum = 2;
449 }
450
451 if (Info->get(Inst.getOpcode()).OpInfo[OpNum].OperandType !=
452 MCOI::OPERAND_PCREL) {
453 assert((isIndirectBranch(Inst) || isIndirectCall(Inst)) &&(static_cast <bool> ((isIndirectBranch(Inst) || isIndirectCall
(Inst)) && "FAILED evaluateBranch") ? void (0) : __assert_fail
("(isIndirectBranch(Inst) || isIndirectCall(Inst)) && \"FAILED evaluateBranch\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 454, __extension__
__PRETTY_FUNCTION__))
454 "FAILED evaluateBranch")(static_cast <bool> ((isIndirectBranch(Inst) || isIndirectCall
(Inst)) && "FAILED evaluateBranch") ? void (0) : __assert_fail
("(isIndirectBranch(Inst) || isIndirectCall(Inst)) && \"FAILED evaluateBranch\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 454, __extension__
__PRETTY_FUNCTION__))
;
455 return false;
456 }
457
458 int64_t Imm = Inst.getOperand(OpNum).getImm() << 2;
459 Target = Addr + Imm;
460 return true;
461 }
462
463 bool replaceBranchTarget(MCInst &Inst, const MCSymbol *TBB,
464 MCContext *Ctx) const override {
465 assert((isCall(Inst) || isBranch(Inst)) && !isIndirectBranch(Inst) &&(static_cast <bool> ((isCall(Inst) || isBranch(Inst)) &&
!isIndirectBranch(Inst) && "Invalid instruction") ? void
(0) : __assert_fail ("(isCall(Inst) || isBranch(Inst)) && !isIndirectBranch(Inst) && \"Invalid instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 466, __extension__
__PRETTY_FUNCTION__))
466 "Invalid instruction")(static_cast <bool> ((isCall(Inst) || isBranch(Inst)) &&
!isIndirectBranch(Inst) && "Invalid instruction") ? void
(0) : __assert_fail ("(isCall(Inst) || isBranch(Inst)) && !isIndirectBranch(Inst) && \"Invalid instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 466, __extension__
__PRETTY_FUNCTION__))
;
467 assert(MCPlus::getNumPrimeOperands(Inst) >= 1 &&(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
1 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 1 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 468, __extension__
__PRETTY_FUNCTION__))
468 "Invalid number of operands")(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
1 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 1 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 468, __extension__
__PRETTY_FUNCTION__))
;
469 MCInst::iterator OI = Inst.begin();
470
471 if (isConditionalBranch(Inst)) {
472 assert(MCPlus::getNumPrimeOperands(Inst) >= 2 &&(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
2 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 2 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 473, __extension__
__PRETTY_FUNCTION__))
473 "Invalid number of operands")(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
2 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 2 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 473, __extension__
__PRETTY_FUNCTION__))
;
474 ++OI;
475 }
476
477 if (isTB(Inst)) {
478 assert(MCPlus::getNumPrimeOperands(Inst) >= 3 &&(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
3 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 3 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 479, __extension__
__PRETTY_FUNCTION__))
479 "Invalid number of operands")(static_cast <bool> (MCPlus::getNumPrimeOperands(Inst) >=
3 && "Invalid number of operands") ? void (0) : __assert_fail
("MCPlus::getNumPrimeOperands(Inst) >= 3 && \"Invalid number of operands\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 479, __extension__
__PRETTY_FUNCTION__))
;
480 OI = Inst.begin() + 2;
481 }
482
483 *OI = MCOperand::createExpr(
484 MCSymbolRefExpr::create(TBB, MCSymbolRefExpr::VK_None, *Ctx));
485 return true;
486 }
487
488 /// Matches indirect branch patterns in AArch64 related to a jump table (JT),
489 /// helping us to build the complete CFG. A typical indirect branch to
490 /// a jump table entry in AArch64 looks like the following:
491 ///
492 /// adrp x1, #-7585792 # Get JT Page location
493 /// add x1, x1, #692 # Complement with JT Page offset
494 /// ldrh w0, [x1, w0, uxtw #1] # Loads JT entry
495 /// adr x1, #12 # Get PC + 12 (end of this BB) used next
496 /// add x0, x1, w0, sxth #2 # Finish building branch target
497 /// # (entries in JT are relative to the end
498 /// # of this BB)
499 /// br x0 # Indirect jump instruction
500 ///
501 bool analyzeIndirectBranchFragment(
502 const MCInst &Inst,
503 DenseMap<const MCInst *, SmallVector<MCInst *, 4>> &UDChain,
504 const MCExpr *&JumpTable, int64_t &Offset, int64_t &ScaleValue,
505 MCInst *&PCRelBase) const {
506 // Expect AArch64 BR
507 assert(Inst.getOpcode() == AArch64::BR && "Unexpected opcode")(static_cast <bool> (Inst.getOpcode() == AArch64::BR &&
"Unexpected opcode") ? void (0) : __assert_fail ("Inst.getOpcode() == AArch64::BR && \"Unexpected opcode\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 507, __extension__
__PRETTY_FUNCTION__))
;
3
Assuming the condition is true
4
'?' condition is true
508
509 // Match the indirect branch pattern for aarch64
510 SmallVector<MCInst *, 4> &UsesRoot = UDChain[&Inst];
511 if (UsesRoot.size() == 0 || UsesRoot[0] == nullptr)
5
Assuming the condition is false
6
Assuming the condition is false
7
Taking false branch
512 return false;
513
514 const MCInst *DefAdd = UsesRoot[0];
515
516 // Now we match an ADD
517 if (!isADD(*DefAdd)) {
8
Taking false branch
518 // If the address is not broken up in two parts, this is not branching
519 // according to a jump table entry. Fail.
520 return false;
521 }
522 if (DefAdd->getOpcode() == AArch64::ADDXri) {
9
Assuming the condition is false
10
Taking false branch
523 // This can happen when there is no offset, but a direct jump that was
524 // transformed into an indirect one (indirect tail call) :
525 // ADRP x2, Perl_re_compiler
526 // ADD x2, x2, :lo12:Perl_re_compiler
527 // BR x2
528 return false;
529 }
530 if (DefAdd->getOpcode() == AArch64::ADDXrs) {
11
Assuming the condition is false
531 // Covers the less common pattern where JT entries are relative to
532 // the JT itself (like x86). Seems less efficient since we can't
533 // assume the JT is aligned at 4B boundary and thus drop 2 bits from
534 // JT values.
535 // cde264:
536 // adrp x12, #21544960 ; 216a000
537 // add x12, x12, #1696 ; 216a6a0 (JT object in .rodata)
538 // ldrsw x8, [x12, x8, lsl #2] --> loads e.g. 0xfeb73bd8
539 // * add x8, x8, x12 --> = cde278, next block
540 // br x8
541 // cde278:
542 //
543 // Parsed as ADDXrs reg:x8 reg:x8 reg:x12 imm:0
544 return false;
545 }
546 assert(DefAdd->getOpcode() == AArch64::ADDXrx &&(static_cast <bool> (DefAdd->getOpcode() == AArch64::
ADDXrx && "Failed to match indirect branch!") ? void (
0) : __assert_fail ("DefAdd->getOpcode() == AArch64::ADDXrx && \"Failed to match indirect branch!\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 547, __extension__
__PRETTY_FUNCTION__))
12
Taking false branch
13
Assuming the condition is true
14
'?' condition is true
547 "Failed to match indirect branch!")(static_cast <bool> (DefAdd->getOpcode() == AArch64::
ADDXrx && "Failed to match indirect branch!") ? void (
0) : __assert_fail ("DefAdd->getOpcode() == AArch64::ADDXrx && \"Failed to match indirect branch!\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 547, __extension__
__PRETTY_FUNCTION__))
;
548
549 // Validate ADD operands
550 int64_t OperandExtension = DefAdd->getOperand(3).getImm();
551 unsigned ShiftVal = AArch64_AM::getArithShiftValue(OperandExtension);
552 AArch64_AM::ShiftExtendType ExtendType =
553 AArch64_AM::getArithExtendType(OperandExtension);
554 if (ShiftVal != 2)
15
Assuming 'ShiftVal' is equal to 2
16
Taking false branch
555 llvm_unreachable("Failed to match indirect branch! (fragment 2)")::llvm::llvm_unreachable_internal("Failed to match indirect branch! (fragment 2)"
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 555)
;
556
557 if (ExtendType
16.1
'ExtendType' is equal to SXTB
== AArch64_AM::SXTB)
17
Taking true branch
558 ScaleValue = 1LL;
559 else if (ExtendType == AArch64_AM::SXTH)
560 ScaleValue = 2LL;
561 else if (ExtendType == AArch64_AM::SXTW)
562 ScaleValue = 4LL;
563 else
564 llvm_unreachable("Failed to match indirect branch! (fragment 3)")::llvm::llvm_unreachable_internal("Failed to match indirect branch! (fragment 3)"
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 564)
;
565
566 // Match an ADR to load base address to be used when addressing JT targets
567 SmallVector<MCInst *, 4> &UsesAdd = UDChain[DefAdd];
568 if (UsesAdd.size() <= 1 || UsesAdd[1] == nullptr || UsesAdd[2] == nullptr) {
18
Assuming the condition is false
19
Assuming the condition is false
20
Assuming the condition is false
21
Taking false branch
569 // This happens when we don't have enough context about this jump table
570 // because the jumping code sequence was split in multiple basic blocks.
571 // This was observed in the wild in HHVM code (dispatchImpl).
572 return false;
573 }
574 MCInst *DefBaseAddr = UsesAdd[1];
575 assert(DefBaseAddr->getOpcode() == AArch64::ADR &&(static_cast <bool> (DefBaseAddr->getOpcode() == AArch64
::ADR && "Failed to match indirect branch pattern! (fragment 3)"
) ? void (0) : __assert_fail ("DefBaseAddr->getOpcode() == AArch64::ADR && \"Failed to match indirect branch pattern! (fragment 3)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 576, __extension__
__PRETTY_FUNCTION__))
22
Assuming the condition is true
23
'?' condition is true
576 "Failed to match indirect branch pattern! (fragment 3)")(static_cast <bool> (DefBaseAddr->getOpcode() == AArch64
::ADR && "Failed to match indirect branch pattern! (fragment 3)"
) ? void (0) : __assert_fail ("DefBaseAddr->getOpcode() == AArch64::ADR && \"Failed to match indirect branch pattern! (fragment 3)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 576, __extension__
__PRETTY_FUNCTION__))
;
577
578 PCRelBase = DefBaseAddr;
579 // Match LOAD to load the jump table (relative) target
580 const MCInst *DefLoad = UsesAdd[2];
581 assert(isLoad(*DefLoad) &&(static_cast <bool> (isLoad(*DefLoad) && "Failed to match indirect branch load pattern! (1)"
) ? void (0) : __assert_fail ("isLoad(*DefLoad) && \"Failed to match indirect branch load pattern! (1)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 582, __extension__
__PRETTY_FUNCTION__))
24
Assuming the condition is true
25
'?' condition is true
582 "Failed to match indirect branch load pattern! (1)")(static_cast <bool> (isLoad(*DefLoad) && "Failed to match indirect branch load pattern! (1)"
) ? void (0) : __assert_fail ("isLoad(*DefLoad) && \"Failed to match indirect branch load pattern! (1)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 582, __extension__
__PRETTY_FUNCTION__))
;
583 assert((ScaleValue != 1LL || isLDRB(*DefLoad)) &&(static_cast <bool> ((ScaleValue != 1LL || isLDRB(*DefLoad
)) && "Failed to match indirect branch load pattern! (2)"
) ? void (0) : __assert_fail ("(ScaleValue != 1LL || isLDRB(*DefLoad)) && \"Failed to match indirect branch load pattern! (2)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 584, __extension__
__PRETTY_FUNCTION__))
26
'?' condition is true
584 "Failed to match indirect branch load pattern! (2)")(static_cast <bool> ((ScaleValue != 1LL || isLDRB(*DefLoad
)) && "Failed to match indirect branch load pattern! (2)"
) ? void (0) : __assert_fail ("(ScaleValue != 1LL || isLDRB(*DefLoad)) && \"Failed to match indirect branch load pattern! (2)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 584, __extension__
__PRETTY_FUNCTION__))
;
585 assert((ScaleValue != 2LL || isLDRH(*DefLoad)) &&(static_cast <bool> ((ScaleValue != 2LL || isLDRH(*DefLoad
)) && "Failed to match indirect branch load pattern! (3)"
) ? void (0) : __assert_fail ("(ScaleValue != 2LL || isLDRH(*DefLoad)) && \"Failed to match indirect branch load pattern! (3)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 586, __extension__
__PRETTY_FUNCTION__))
27
'?' condition is true
586 "Failed to match indirect branch load pattern! (3)")(static_cast <bool> ((ScaleValue != 2LL || isLDRH(*DefLoad
)) && "Failed to match indirect branch load pattern! (3)"
) ? void (0) : __assert_fail ("(ScaleValue != 2LL || isLDRH(*DefLoad)) && \"Failed to match indirect branch load pattern! (3)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 586, __extension__
__PRETTY_FUNCTION__))
;
587
588 // Match ADD that calculates the JumpTable Base Address (not the offset)
589 SmallVector<MCInst *, 4> &UsesLoad = UDChain[DefLoad];
590 const MCInst *DefJTBaseAdd = UsesLoad[1];
591 MCPhysReg From, To;
592 if (DefJTBaseAdd == nullptr || isLoadFromStack(*DefJTBaseAdd) ||
28
Assuming the condition is false
29
Assuming the condition is false
593 isRegToRegMove(*DefJTBaseAdd, From, To)) {
30
Assuming the condition is false
594 // Sometimes base address may have been defined in another basic block
595 // (hoisted). Return with no jump table info.
596 JumpTable = nullptr;
597 return true;
598 }
599
600 assert(DefJTBaseAdd->getOpcode() == AArch64::ADDXri &&(static_cast <bool> (DefJTBaseAdd->getOpcode() == AArch64
::ADDXri && "Failed to match jump table base address pattern! (1)"
) ? void (0) : __assert_fail ("DefJTBaseAdd->getOpcode() == AArch64::ADDXri && \"Failed to match jump table base address pattern! (1)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 601, __extension__
__PRETTY_FUNCTION__))
31
Taking false branch
32
Assuming the condition is true
33
'?' condition is true
601 "Failed to match jump table base address pattern! (1)")(static_cast <bool> (DefJTBaseAdd->getOpcode() == AArch64
::ADDXri && "Failed to match jump table base address pattern! (1)"
) ? void (0) : __assert_fail ("DefJTBaseAdd->getOpcode() == AArch64::ADDXri && \"Failed to match jump table base address pattern! (1)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 601, __extension__
__PRETTY_FUNCTION__))
;
602
603 if (DefJTBaseAdd->getOperand(2).isImm())
34
Taking true branch
604 Offset = DefJTBaseAdd->getOperand(2).getImm();
605 SmallVector<MCInst *, 4> &UsesJTBaseAdd = UDChain[DefJTBaseAdd];
606 const MCInst *DefJTBasePage = UsesJTBaseAdd[1];
607 if (DefJTBasePage == nullptr || isLoadFromStack(*DefJTBasePage)) {
35
Assuming the condition is false
36
Assuming the condition is false
608 JumpTable = nullptr;
609 return true;
610 }
611 assert(DefJTBasePage->getOpcode() == AArch64::ADRP &&(static_cast <bool> (DefJTBasePage->getOpcode() == AArch64
::ADRP && "Failed to match jump table base page pattern! (2)"
) ? void (0) : __assert_fail ("DefJTBasePage->getOpcode() == AArch64::ADRP && \"Failed to match jump table base page pattern! (2)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 612, __extension__
__PRETTY_FUNCTION__))
37
Taking false branch
38
Assuming the condition is true
39
'?' condition is true
612 "Failed to match jump table base page pattern! (2)")(static_cast <bool> (DefJTBasePage->getOpcode() == AArch64
::ADRP && "Failed to match jump table base page pattern! (2)"
) ? void (0) : __assert_fail ("DefJTBasePage->getOpcode() == AArch64::ADRP && \"Failed to match jump table base page pattern! (2)\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 612, __extension__
__PRETTY_FUNCTION__))
;
613 if (DefJTBasePage->getOperand(1).isExpr())
40
Taking false branch
614 JumpTable = DefJTBasePage->getOperand(1).getExpr();
615 return true;
41
Returning without writing to 'JumpTable'
616 }
617
618 DenseMap<const MCInst *, SmallVector<MCInst *, 4>>
619 computeLocalUDChain(const MCInst *CurInstr, InstructionIterator Begin,
620 InstructionIterator End) const {
621 DenseMap<int, MCInst *> RegAliasTable;
622 DenseMap<const MCInst *, SmallVector<MCInst *, 4>> Uses;
623
624 auto addInstrOperands = [&](const MCInst &Instr) {
625 // Update Uses table
626 for (const MCOperand &Operand : MCPlus::primeOperands(Instr)) {
627 if (!Operand.isReg())
628 continue;
629 unsigned Reg = Operand.getReg();
630 MCInst *AliasInst = RegAliasTable[Reg];
631 Uses[&Instr].push_back(AliasInst);
632 LLVM_DEBUG({do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { { dbgs() << "Adding reg operand " <<
Reg << " refs "; if (AliasInst != nullptr) AliasInst->
dump(); else dbgs() << "\n"; }; } } while (false)
633 dbgs() << "Adding reg operand " << Reg << " refs ";do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { { dbgs() << "Adding reg operand " <<
Reg << " refs "; if (AliasInst != nullptr) AliasInst->
dump(); else dbgs() << "\n"; }; } } while (false)
634 if (AliasInst != nullptr)do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { { dbgs() << "Adding reg operand " <<
Reg << " refs "; if (AliasInst != nullptr) AliasInst->
dump(); else dbgs() << "\n"; }; } } while (false)
635 AliasInst->dump();do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { { dbgs() << "Adding reg operand " <<
Reg << " refs "; if (AliasInst != nullptr) AliasInst->
dump(); else dbgs() << "\n"; }; } } while (false)
636 elsedo { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { { dbgs() << "Adding reg operand " <<
Reg << " refs "; if (AliasInst != nullptr) AliasInst->
dump(); else dbgs() << "\n"; }; } } while (false)
637 dbgs() << "\n";do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { { dbgs() << "Adding reg operand " <<
Reg << " refs "; if (AliasInst != nullptr) AliasInst->
dump(); else dbgs() << "\n"; }; } } while (false)
638 })do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { { dbgs() << "Adding reg operand " <<
Reg << " refs "; if (AliasInst != nullptr) AliasInst->
dump(); else dbgs() << "\n"; }; } } while (false)
;
639 }
640 };
641
642 LLVM_DEBUG(dbgs() << "computeLocalUDChain\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { dbgs() << "computeLocalUDChain\n"; } } while
(false)
;
643 bool TerminatorSeen = false;
644 for (auto II = Begin; II != End; ++II) {
645 MCInst &Instr = *II;
646 // Ignore nops and CFIs
647 if (isPseudo(Instr) || isNoop(Instr))
648 continue;
649 if (TerminatorSeen) {
650 RegAliasTable.clear();
651 Uses.clear();
652 }
653
654 LLVM_DEBUG(dbgs() << "Now updating for:\n ")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { dbgs() << "Now updating for:\n "; } } while
(false)
;
655 LLVM_DEBUG(Instr.dump())do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { Instr.dump(); } } while (false)
;
656 addInstrOperands(Instr);
657
658 BitVector Regs = BitVector(RegInfo->getNumRegs(), false);
659 getWrittenRegs(Instr, Regs);
660
661 // Update register definitions after this point
662 for (int Idx : Regs.set_bits()) {
663 RegAliasTable[Idx] = &Instr;
664 LLVM_DEBUG(dbgs() << "Setting reg " << Idxdo { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { dbgs() << "Setting reg " << Idx <<
" def to current instr.\n"; } } while (false)
665 << " def to current instr.\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { dbgs() << "Setting reg " << Idx <<
" def to current instr.\n"; } } while (false)
;
666 }
667
668 TerminatorSeen = isTerminator(Instr);
669 }
670
671 // Process the last instruction, which is not currently added into the
672 // instruction stream
673 if (CurInstr)
674 addInstrOperands(*CurInstr);
675
676 return Uses;
677 }
678
679 IndirectBranchType analyzeIndirectBranch(
680 MCInst &Instruction, InstructionIterator Begin, InstructionIterator End,
681 const unsigned PtrSize, MCInst *&MemLocInstrOut, unsigned &BaseRegNumOut,
682 unsigned &IndexRegNumOut, int64_t &DispValueOut,
683 const MCExpr *&DispExprOut, MCInst *&PCRelBaseOut) const override {
684 MemLocInstrOut = nullptr;
685 BaseRegNumOut = AArch64::NoRegister;
686 IndexRegNumOut = AArch64::NoRegister;
687 DispValueOut = 0;
688 DispExprOut = nullptr;
689
690 // An instruction referencing memory used by jump instruction (directly or
691 // via register). This location could be an array of function pointers
692 // in case of indirect tail call, or a jump table.
693 MCInst *MemLocInstr = nullptr;
694
695 // Analyze the memory location.
696 int64_t ScaleValue, DispValue;
697 const MCExpr *DispExpr;
1
'DispExpr' declared without an initial value
698
699 DenseMap<const MCInst *, SmallVector<llvm::MCInst *, 4>> UDChain =
700 computeLocalUDChain(&Instruction, Begin, End);
701 MCInst *PCRelBase;
702 if (!analyzeIndirectBranchFragment(Instruction, UDChain, DispExpr,
2
Calling 'AArch64MCPlusBuilder::analyzeIndirectBranchFragment'
42
Returning from 'AArch64MCPlusBuilder::analyzeIndirectBranchFragment'
43
Taking false branch
703 DispValue, ScaleValue, PCRelBase))
704 return IndirectBranchType::UNKNOWN;
705
706 MemLocInstrOut = MemLocInstr;
707 DispValueOut = DispValue;
708 DispExprOut = DispExpr;
44
Assigned value is garbage or undefined
709 PCRelBaseOut = PCRelBase;
710 return IndirectBranchType::POSSIBLE_PIC_JUMP_TABLE;
711 }
712
713 /// Matches PLT entry pattern and returns the associated GOT entry address.
714 /// Typical PLT entry looks like the following:
715 ///
716 /// adrp x16, 230000
717 /// ldr x17, [x16, #3040]
718 /// add x16, x16, #0xbe0
719 /// br x17
720 ///
721 uint64_t analyzePLTEntry(MCInst &Instruction, InstructionIterator Begin,
722 InstructionIterator End,
723 uint64_t BeginPC) const override {
724 // Check branch instruction
725 MCInst *Branch = &Instruction;
726 assert(Branch->getOpcode() == AArch64::BR && "Unexpected opcode")(static_cast <bool> (Branch->getOpcode() == AArch64::
BR && "Unexpected opcode") ? void (0) : __assert_fail
("Branch->getOpcode() == AArch64::BR && \"Unexpected opcode\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 726, __extension__
__PRETTY_FUNCTION__))
;
727
728 DenseMap<const MCInst *, SmallVector<llvm::MCInst *, 4>> UDChain =
729 computeLocalUDChain(Branch, Begin, End);
730
731 // Match ldr instruction
732 SmallVector<MCInst *, 4> &BranchUses = UDChain[Branch];
733 if (BranchUses.size() < 1 || BranchUses[0] == nullptr)
734 return 0;
735
736 // Check ldr instruction
737 const MCInst *Ldr = BranchUses[0];
738 if (Ldr->getOpcode() != AArch64::LDRXui)
739 return 0;
740
741 // Get ldr value
742 const unsigned ScaleLdr = 8; // LDRX operates on 8 bytes segments
743 assert(Ldr->getOperand(2).isImm() && "Unexpected ldr operand")(static_cast <bool> (Ldr->getOperand(2).isImm() &&
"Unexpected ldr operand") ? void (0) : __assert_fail ("Ldr->getOperand(2).isImm() && \"Unexpected ldr operand\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 743, __extension__
__PRETTY_FUNCTION__))
;
744 const uint64_t Offset = Ldr->getOperand(2).getImm() * ScaleLdr;
745
746 // Match adrp instruction
747 SmallVector<MCInst *, 4> &LdrUses = UDChain[Ldr];
748 if (LdrUses.size() < 2 || LdrUses[1] == nullptr)
749 return 0;
750
751 // Check adrp instruction
752 MCInst *Adrp = LdrUses[1];
753 if (Adrp->getOpcode() != AArch64::ADRP)
754 return 0;
755
756 // Get adrp instruction PC
757 const unsigned InstSize = 4;
758 uint64_t AdrpPC = BeginPC;
759 for (InstructionIterator It = Begin; It != End; ++It) {
760 if (&(*It) == Adrp)
761 break;
762 AdrpPC += InstSize;
763 }
764
765 // Get adrp value
766 uint64_t Base;
767 assert(Adrp->getOperand(1).isImm() && "Unexpected adrp operand")(static_cast <bool> (Adrp->getOperand(1).isImm() &&
"Unexpected adrp operand") ? void (0) : __assert_fail ("Adrp->getOperand(1).isImm() && \"Unexpected adrp operand\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 767, __extension__
__PRETTY_FUNCTION__))
;
768 bool Ret = evaluateMemOperandTarget(*Adrp, Base, AdrpPC, InstSize);
769 assert(Ret && "Failed to evaluate adrp")(static_cast <bool> (Ret && "Failed to evaluate adrp"
) ? void (0) : __assert_fail ("Ret && \"Failed to evaluate adrp\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 769, __extension__
__PRETTY_FUNCTION__))
;
770 (void)Ret;
771
772 return Base + Offset;
773 }
774
775 unsigned getInvertedBranchOpcode(unsigned Opcode) const {
776 switch (Opcode) {
777 default:
778 llvm_unreachable("Failed to invert branch opcode")::llvm::llvm_unreachable_internal("Failed to invert branch opcode"
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 778)
;
779 return Opcode;
780 case AArch64::TBZW: return AArch64::TBNZW;
781 case AArch64::TBZX: return AArch64::TBNZX;
782 case AArch64::TBNZW: return AArch64::TBZW;
783 case AArch64::TBNZX: return AArch64::TBZX;
784 case AArch64::CBZW: return AArch64::CBNZW;
785 case AArch64::CBZX: return AArch64::CBNZX;
786 case AArch64::CBNZW: return AArch64::CBZW;
787 case AArch64::CBNZX: return AArch64::CBZX;
788 }
789 }
790
791 unsigned getCondCode(const MCInst &Inst) const override {
792 // AArch64 does not use conditional codes, so we just return the opcode
793 // of the conditional branch here.
794 return Inst.getOpcode();
795 }
796
797 unsigned getCanonicalBranchCondCode(unsigned Opcode) const override {
798 switch (Opcode) {
799 default:
800 return Opcode;
801 case AArch64::TBNZW: return AArch64::TBZW;
802 case AArch64::TBNZX: return AArch64::TBZX;
803 case AArch64::CBNZW: return AArch64::CBZW;
804 case AArch64::CBNZX: return AArch64::CBZX;
805 }
806 }
807
808 bool reverseBranchCondition(MCInst &Inst, const MCSymbol *TBB,
809 MCContext *Ctx) const override {
810 if (isTB(Inst) || isCB(Inst)) {
811 Inst.setOpcode(getInvertedBranchOpcode(Inst.getOpcode()));
812 assert(Inst.getOpcode() != 0 && "Invalid branch instruction")(static_cast <bool> (Inst.getOpcode() != 0 && "Invalid branch instruction"
) ? void (0) : __assert_fail ("Inst.getOpcode() != 0 && \"Invalid branch instruction\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 812, __extension__
__PRETTY_FUNCTION__))
;
813 } else if (Inst.getOpcode() == AArch64::Bcc) {
814 Inst.getOperand(0).setImm(AArch64CC::getInvertedCondCode(
815 static_cast<AArch64CC::CondCode>(Inst.getOperand(0).getImm())));
816 assert(Inst.getOperand(0).getImm() != AArch64CC::AL &&(static_cast <bool> (Inst.getOperand(0).getImm() != AArch64CC
::AL && Inst.getOperand(0).getImm() != AArch64CC::NV &&
"Can't reverse ALWAYS cond code") ? void (0) : __assert_fail
("Inst.getOperand(0).getImm() != AArch64CC::AL && Inst.getOperand(0).getImm() != AArch64CC::NV && \"Can't reverse ALWAYS cond code\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 818, __extension__
__PRETTY_FUNCTION__))
817 Inst.getOperand(0).getImm() != AArch64CC::NV &&(static_cast <bool> (Inst.getOperand(0).getImm() != AArch64CC
::AL && Inst.getOperand(0).getImm() != AArch64CC::NV &&
"Can't reverse ALWAYS cond code") ? void (0) : __assert_fail
("Inst.getOperand(0).getImm() != AArch64CC::AL && Inst.getOperand(0).getImm() != AArch64CC::NV && \"Can't reverse ALWAYS cond code\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 818, __extension__
__PRETTY_FUNCTION__))
818 "Can't reverse ALWAYS cond code")(static_cast <bool> (Inst.getOperand(0).getImm() != AArch64CC
::AL && Inst.getOperand(0).getImm() != AArch64CC::NV &&
"Can't reverse ALWAYS cond code") ? void (0) : __assert_fail
("Inst.getOperand(0).getImm() != AArch64CC::AL && Inst.getOperand(0).getImm() != AArch64CC::NV && \"Can't reverse ALWAYS cond code\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 818, __extension__
__PRETTY_FUNCTION__))
;
819 } else {
820 LLVM_DEBUG(Inst.dump())do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("mcplus")) { Inst.dump(); } } while (false)
;
821 llvm_unreachable("Unrecognized branch instruction")::llvm::llvm_unreachable_internal("Unrecognized branch instruction"
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 821)
;
822 }
823 return replaceBranchTarget(Inst, TBB, Ctx);
824 }
825
826 int getPCRelEncodingSize(const MCInst &Inst) const override {
827 switch (Inst.getOpcode()) {
828 default:
829 llvm_unreachable("Failed to get pcrel encoding size")::llvm::llvm_unreachable_internal("Failed to get pcrel encoding size"
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 829)
;
830 return 0;
831 case AArch64::TBZW: return 16;
832 case AArch64::TBZX: return 16;
833 case AArch64::TBNZW: return 16;
834 case AArch64::TBNZX: return 16;
835 case AArch64::CBZW: return 21;
836 case AArch64::CBZX: return 21;
837 case AArch64::CBNZW: return 21;
838 case AArch64::CBNZX: return 21;
839 case AArch64::B: return 28;
840 case AArch64::BL: return 28;
841 case AArch64::Bcc: return 21;
842 }
843 }
844
845 int getShortJmpEncodingSize() const override { return 33; }
846
847 int getUncondBranchEncodingSize() const override { return 28; }
848
849 bool createTailCall(MCInst &Inst, const MCSymbol *Target,
850 MCContext *Ctx) override {
851 Inst.setOpcode(AArch64::B);
852 Inst.addOperand(MCOperand::createExpr(getTargetExprFor(
853 Inst, MCSymbolRefExpr::create(Target, MCSymbolRefExpr::VK_None, *Ctx),
854 *Ctx, 0)));
855 setTailCall(Inst);
856 return true;
857 }
858
859 void createLongTailCall(InstructionListType &Seq, const MCSymbol *Target,
860 MCContext *Ctx) override {
861 createShortJmp(Seq, Target, Ctx, /*IsTailCall*/ true);
862 }
863
864 bool createTrap(MCInst &Inst) const override {
865 Inst.clear();
866 Inst.setOpcode(AArch64::BRK);
867 Inst.addOperand(MCOperand::createImm(1));
868 return true;
869 }
870
871 bool convertJmpToTailCall(MCInst &Inst) override {
872 setTailCall(Inst);
873 return true;
874 }
875
876 bool convertTailCallToJmp(MCInst &Inst) override {
877 removeAnnotation(Inst, MCPlus::MCAnnotation::kTailCall);
878 clearOffset(Inst);
879 if (getConditionalTailCall(Inst))
880 unsetConditionalTailCall(Inst);
881 return true;
882 }
883
884 bool lowerTailCall(MCInst &Inst) override {
885 removeAnnotation(Inst, MCPlus::MCAnnotation::kTailCall);
886 if (getConditionalTailCall(Inst))
887 unsetConditionalTailCall(Inst);
888 return true;
889 }
890
891 bool isNoop(const MCInst &Inst) const override {
892 return Inst.getOpcode() == AArch64::HINT &&
893 Inst.getOperand(0).getImm() == 0;
894 }
895
896 bool createNoop(MCInst &Inst) const override {
897 Inst.setOpcode(AArch64::HINT);
898 Inst.clear();
899 Inst.addOperand(MCOperand::createImm(0));
900 return true;
901 }
902
903 bool isStore(const MCInst &Inst) const override { return false; }
904
905 bool analyzeBranch(InstructionIterator Begin, InstructionIterator End,
906 const MCSymbol *&TBB, const MCSymbol *&FBB,
907 MCInst *&CondBranch,
908 MCInst *&UncondBranch) const override {
909 auto I = End;
910
911 while (I != Begin) {
912 --I;
913
914 // Ignore nops and CFIs
915 if (isPseudo(*I) || isNoop(*I))
916 continue;
917
918 // Stop when we find the first non-terminator
919 if (!isTerminator(*I) || isTailCall(*I) || !isBranch(*I))
920 break;
921
922 // Handle unconditional branches.
923 if (isUnconditionalBranch(*I)) {
924 // If any code was seen after this unconditional branch, we've seen
925 // unreachable code. Ignore them.
926 CondBranch = nullptr;
927 UncondBranch = &*I;
928 const MCSymbol *Sym = getTargetSymbol(*I);
929 assert(Sym != nullptr &&(static_cast <bool> (Sym != nullptr && "Couldn't extract BB symbol from jump operand"
) ? void (0) : __assert_fail ("Sym != nullptr && \"Couldn't extract BB symbol from jump operand\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 930, __extension__
__PRETTY_FUNCTION__))
930 "Couldn't extract BB symbol from jump operand")(static_cast <bool> (Sym != nullptr && "Couldn't extract BB symbol from jump operand"
) ? void (0) : __assert_fail ("Sym != nullptr && \"Couldn't extract BB symbol from jump operand\""
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 930, __extension__
__PRETTY_FUNCTION__))
;
931 TBB = Sym;
932 continue;
933 }
934
935 // Handle conditional branches and ignore indirect branches
936 if (isIndirectBranch(*I))
937 return false;
938
939 if (CondBranch == nullptr) {
940 const MCSymbol *TargetBB = getTargetSymbol(*I);
941 if (TargetBB == nullptr) {
942 // Unrecognized branch target
943 return false;
944 }
945 FBB = TBB;
946 TBB = TargetBB;
947 CondBranch = &*I;
948 continue;
949 }
950
951 llvm_unreachable("multiple conditional branches in one BB")::llvm::llvm_unreachable_internal("multiple conditional branches in one BB"
, "bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp", 951)
;
952 }
953 return true;
954 }
955
956 void createLongJmp(InstructionListType &Seq, const MCSymbol *Target,
957 MCContext *Ctx, bool IsTailCall) override {
958 // ip0 (r16) is reserved to the linker (refer to 5.3.1.1 of "Procedure Call
959 // Standard for the ARM 64-bit Architecture (AArch64)".
960 // The sequence of instructions we create here is the following:
961 // movz ip0, #:abs_g3:<addr>
962 // movk ip0, #:abs_g2_nc:<addr>
963 // movk ip0, #:abs_g1_nc:<addr>
964 // movk ip0, #:abs_g0_nc:<addr>
965 // br ip0
966 MCInst Inst;
967 Inst.setOpcode(AArch64::MOVZXi);
968 Inst.addOperand(MCOperand::createReg(AArch64::X16));
969 Inst.addOperand(MCOperand::createExpr(AArch64MCExpr::create(
970 MCSymbolRefExpr::create(Target, MCSymbolRefExpr::VK_None, *Ctx),
971 AArch64MCExpr::VK_ABS_G3, *Ctx)));
972 Inst.addOperand(MCOperand::createImm(0x30));
973 Seq.emplace_back(Inst);
974
975 Inst.clear();
976 Inst.setOpcode(AArch64::MOVKXi);
977 Inst.addOperand(MCOperand::createReg(AArch64::X16));
978 Inst.addOperand(MCOperand::createReg(AArch64::X16));
979 Inst.addOperand(MCOperand::createExpr(AArch64MCExpr::create(
980 MCSymbolRefExpr::create(Target, MCSymbolRefExpr::VK_None, *Ctx),
981 AArch64MCExpr::VK_ABS_G2_NC, *Ctx)));
982 Inst.addOperand(MCOperand::createImm(0x20));
983 Seq.emplace_back(Inst);
984
985 Inst.clear();
986 Inst.setOpcode(AArch64::MOVKXi);
987 Inst.addOperand(MCOperand::createReg(AArch64::X16));
988 Inst.addOperand(MCOperand::createReg(AArch64::X16));
989 Inst.addOperand(MCOperand::createExpr(AArch64MCExpr::create(
990 MCSymbolRefExpr::create(Target, MCSymbolRefExpr::VK_None, *Ctx),
991 AArch64MCExpr::VK_ABS_G1_NC, *Ctx)));
992 Inst.addOperand(MCOperand::createImm(0x10));
993 Seq.emplace_back(Inst);
994
995 Inst.clear();
996 Inst.setOpcode(AArch64::MOVKXi);
997 Inst.addOperand(MCOperand::createReg(AArch64::X16));
998 Inst.addOperand(MCOperand::createReg(AArch64::X16));
999 Inst.addOperand(MCOperand::createExpr(AArch64MCExpr::create(
1000 MCSymbolRefExpr::create(Target, MCSymbolRefExpr::VK_None, *Ctx),
1001 AArch64MCExpr::VK_ABS_G0_NC, *Ctx)));
1002 Inst.addOperand(MCOperand::createImm(0));
1003 Seq.emplace_back(Inst);
1004
1005 Inst.clear();
1006 Inst.setOpcode(AArch64::BR);
1007 Inst.addOperand(MCOperand::createReg(AArch64::X16));
1008 if (IsTailCall)
1009 setTailCall(Inst);
1010 Seq.emplace_back(Inst);
1011 }
1012
1013 void createShortJmp(InstructionListType &Seq, const MCSymbol *Target,
1014 MCContext *Ctx, bool IsTailCall) override {
1015 // ip0 (r16) is reserved to the linker (refer to 5.3.1.1 of "Procedure Call
1016 // Standard for the ARM 64-bit Architecture (AArch64)".
1017 // The sequence of instructions we create here is the following:
1018 // adrp ip0, imm
1019 // add ip0, ip0, imm
1020 // br ip0
1021 MCPhysReg Reg = AArch64::X16;
1022 InstructionListType Insts = materializeAddress(Target, Ctx, Reg);
1023 Insts.emplace_back();
1024 MCInst &Inst = Insts.back();
1025 Inst.clear();
1026 Inst.setOpcode(AArch64::BR);
1027 Inst.addOperand(MCOperand::createReg(Reg));
1028 if (IsTailCall)
1029 setTailCall(Inst);
1030 Seq.swap(Insts);
1031 }
1032
1033 /// Matching pattern here is
1034 ///
1035 /// ADRP x16, imm
1036 /// ADD x16, x16, imm
1037 /// BR x16
1038 ///
1039 uint64_t matchLinkerVeneer(InstructionIterator Begin, InstructionIterator End,
1040 uint64_t Address, const MCInst &CurInst,
1041 MCInst *&TargetHiBits, MCInst *&TargetLowBits,
1042 uint64_t &Target) const override {
1043 if (CurInst.getOpcode() != AArch64::BR || !CurInst.getOperand(0).isReg() ||
1044 CurInst.getOperand(0).getReg() != AArch64::X16)
1045 return 0;
1046
1047 auto I = End;
1048 if (I == Begin)
1049 return 0;
1050
1051 --I;
1052 Address -= 4;
1053 if (I == Begin || I->getOpcode() != AArch64::ADDXri ||
1054 MCPlus::getNumPrimeOperands(*I) < 3 || !I->getOperand(0).isReg() ||
1055 !I->getOperand(1).isReg() ||
1056 I->getOperand(0).getReg() != AArch64::X16 ||
1057 I->getOperand(1).getReg() != AArch64::X16 || !I->getOperand(2).isImm())
1058 return 0;
1059 TargetLowBits = &*I;
1060 uint64_t Addr = I->getOperand(2).getImm() & 0xFFF;
1061
1062 --I;
1063 Address -= 4;
1064 if (I->getOpcode() != AArch64::ADRP ||
1065 MCPlus::getNumPrimeOperands(*I) < 2 || !I->getOperand(0).isReg() ||
1066 !I->getOperand(1).isImm() || I->getOperand(0).getReg() != AArch64::X16)
1067 return 0;
1068 TargetHiBits = &*I;
1069 Addr |= (Address + ((int64_t)I->getOperand(1).getImm() << 12)) &
1070 0xFFFFFFFFFFFFF000ULL;
1071 Target = Addr;
1072 return 3;
1073 }
1074
1075 bool replaceImmWithSymbolRef(MCInst &Inst, const MCSymbol *Symbol,
1076 int64_t Addend, MCContext *Ctx, int64_t &Value,
1077 uint64_t RelType) const override {
1078 unsigned ImmOpNo = -1U;
1079 for (unsigned Index = 0; Index < MCPlus::getNumPrimeOperands(Inst);
1080 ++Index) {
1081 if (Inst.getOperand(Index).isImm()) {
1082 ImmOpNo = Index;
1083 break;
1084 }
1085 }
1086 if (ImmOpNo == -1U)
1087 return false;
1088
1089 Value = Inst.getOperand(ImmOpNo).getImm();
1090
1091 setOperandToSymbolRef(Inst, ImmOpNo, Symbol, Addend, Ctx, RelType);
1092
1093 return true;
1094 }
1095
1096 bool createUncondBranch(MCInst &Inst, const MCSymbol *TBB,
1097 MCContext *Ctx) const override {
1098 Inst.setOpcode(AArch64::B);
1099 Inst.clear();
1100 Inst.addOperand(MCOperand::createExpr(getTargetExprFor(
1101 Inst, MCSymbolRefExpr::create(TBB, MCSymbolRefExpr::VK_None, *Ctx),
1102 *Ctx, 0)));
1103 return true;
1104 }
1105
1106 bool isMoveMem2Reg(const MCInst &Inst) const override { return false; }
1107
1108 bool isLeave(const MCInst &Inst) const override { return false; }
1109
1110 bool isPop(const MCInst &Inst) const override { return false; }
1111
1112 bool isPrefix(const MCInst &Inst) const override { return false; }
1113
1114 bool createReturn(MCInst &Inst) const override {
1115 Inst.setOpcode(AArch64::RET);
1116 Inst.clear();
1117 Inst.addOperand(MCOperand::createReg(AArch64::LR));
1118 return true;
1119 }
1120
1121 InstructionListType materializeAddress(const MCSymbol *Target, MCContext *Ctx,
1122 MCPhysReg RegName,
1123 int64_t Addend = 0) const override {
1124 // Get page-aligned address and add page offset
1125 InstructionListType Insts(2);
1126 Insts[0].setOpcode(AArch64::ADRP);
1127 Insts[0].clear();
1128 Insts[0].addOperand(MCOperand::createReg(RegName));
1129 Insts[0].addOperand(MCOperand::createImm(0));
1130 setOperandToSymbolRef(Insts[0], /* OpNum */ 1, Target, Addend, Ctx,
1131 ELF::R_AARCH64_NONE);
1132 Insts[1].setOpcode(AArch64::ADDXri);
1133 Insts[1].clear();
1134 Insts[1].addOperand(MCOperand::createReg(RegName));
1135 Insts[1].addOperand(MCOperand::createReg(RegName));
1136 Insts[1].addOperand(MCOperand::createImm(0));
1137 Insts[1].addOperand(MCOperand::createImm(0));
1138 setOperandToSymbolRef(Insts[1], /* OpNum */ 2, Target, Addend, Ctx,
1139 ELF::R_AARCH64_ADD_ABS_LO12_NC);
1140 return Insts;
1141 }
1142};
1143
1144} // end anonymous namespace
1145
1146namespace llvm {
1147namespace bolt {
1148
1149MCPlusBuilder *createAArch64MCPlusBuilder(const MCInstrAnalysis *Analysis,
1150 const MCInstrInfo *Info,
1151 const MCRegisterInfo *RegInfo) {
1152 return new AArch64MCPlusBuilder(Analysis, Info, RegInfo);
1153}
1154
1155} // namespace bolt
1156} // namespace llvm