Line data Source code
1 : //===- ARMTargetStreamer.cpp - ARMTargetStreamer class --*- C++ -*---------===//
2 : //
3 : // The LLVM Compiler Infrastructure
4 : //
5 : // This file is distributed under the University of Illinois Open Source
6 : // License. See LICENSE.TXT for details.
7 : //
8 : //===----------------------------------------------------------------------===//
9 : //
10 : // This file implements the ARMTargetStreamer class.
11 : //
12 : //===----------------------------------------------------------------------===//
13 :
14 : #include "MCTargetDesc/ARMMCTargetDesc.h"
15 : #include "llvm/MC/ConstantPools.h"
16 : #include "llvm/MC/MCAsmInfo.h"
17 : #include "llvm/MC/MCContext.h"
18 : #include "llvm/MC/MCExpr.h"
19 : #include "llvm/MC/MCStreamer.h"
20 : #include "llvm/MC/MCSubtargetInfo.h"
21 : #include "llvm/Support/ARMBuildAttributes.h"
22 : #include "llvm/Support/TargetParser.h"
23 :
24 : using namespace llvm;
25 :
26 : //
27 : // ARMTargetStreamer Implemenation
28 : //
29 :
30 4043 : ARMTargetStreamer::ARMTargetStreamer(MCStreamer &S)
31 4043 : : MCTargetStreamer(S), ConstantPools(new AssemblerConstantPools()) {}
32 :
33 : ARMTargetStreamer::~ARMTargetStreamer() = default;
34 :
35 : // The constant pool handling is shared by all ARMTargetStreamer
36 : // implementations.
37 292 : const MCExpr *ARMTargetStreamer::addConstantPoolEntry(const MCExpr *Expr, SMLoc Loc) {
38 292 : return ConstantPools->addEntry(Streamer, Expr, 4, Loc);
39 : }
40 :
41 44 : void ARMTargetStreamer::emitCurrentConstantPool() {
42 44 : ConstantPools->emitForCurrentSection(Streamer);
43 44 : ConstantPools->clearCacheForCurrentSection(Streamer);
44 44 : }
45 :
46 : // finish() - write out any non-empty assembler constant pools.
47 7106 : void ARMTargetStreamer::finish() { ConstantPools->emitAll(Streamer); }
48 :
49 : // reset() - Reset any state
50 0 : void ARMTargetStreamer::reset() {}
51 :
52 4 : void ARMTargetStreamer::emitInst(uint32_t Inst, char Suffix) {
53 : unsigned Size;
54 : char Buffer[4];
55 4 : const bool LittleEndian = getStreamer().getContext().getAsmInfo()->isLittleEndian();
56 :
57 4 : switch (Suffix) {
58 : case '\0':
59 : Size = 4;
60 :
61 0 : for (unsigned II = 0, IE = Size; II != IE; II++) {
62 0 : const unsigned I = LittleEndian ? (Size - II - 1) : II;
63 0 : Buffer[Size - II - 1] = uint8_t(Inst >> I * CHAR_BIT);
64 0 : }
65 :
66 : break;
67 4 : case 'n':
68 : case 'w':
69 4 : Size = (Suffix == 'n' ? 2 : 4);
70 :
71 : // Thumb wide instructions are emitted as a pair of 16-bit words of the
72 : // appropriate endianness.
73 10 : for (unsigned II = 0, IE = Size; II != IE; II = II + 2) {
74 6 : const unsigned I0 = LittleEndian ? II + 0 : II + 1;
75 6 : const unsigned I1 = LittleEndian ? II + 1 : II + 0;
76 6 : Buffer[Size - II - 2] = uint8_t(Inst >> I0 * CHAR_BIT);
77 6 : Buffer[Size - II - 1] = uint8_t(Inst >> I1 * CHAR_BIT);
78 4 : }
79 :
80 : break;
81 0 : default:
82 0 : llvm_unreachable("Invalid Suffix");
83 : }
84 8 : getStreamer().EmitBytes(StringRef(Buffer, Size));
85 4 : }
86 :
87 : // The remaining callbacks should be handled separately by each
88 : // streamer.
89 3 : void ARMTargetStreamer::emitFnStart() {}
90 1 : void ARMTargetStreamer::emitFnEnd() {}
91 0 : void ARMTargetStreamer::emitCantUnwind() {}
92 0 : void ARMTargetStreamer::emitPersonality(const MCSymbol *Personality) {}
93 0 : void ARMTargetStreamer::emitPersonalityIndex(unsigned Index) {}
94 0 : void ARMTargetStreamer::emitHandlerData() {}
95 0 : void ARMTargetStreamer::emitSetFP(unsigned FpReg, unsigned SpReg,
96 0 : int64_t Offset) {}
97 0 : void ARMTargetStreamer::emitMovSP(unsigned Reg, int64_t Offset) {}
98 0 : void ARMTargetStreamer::emitPad(int64_t Offset) {}
99 0 : void ARMTargetStreamer::emitRegSave(const SmallVectorImpl<unsigned> &RegList,
100 0 : bool isVector) {}
101 0 : void ARMTargetStreamer::emitUnwindRaw(int64_t StackOffset,
102 : const SmallVectorImpl<uint8_t> &Opcodes) {
103 0 : }
104 2 : void ARMTargetStreamer::switchVendor(StringRef Vendor) {}
105 11 : void ARMTargetStreamer::emitAttribute(unsigned Attribute, unsigned Value) {}
106 1 : void ARMTargetStreamer::emitTextAttribute(unsigned Attribute,
107 1 : StringRef String) {}
108 0 : void ARMTargetStreamer::emitIntTextAttribute(unsigned Attribute,
109 : unsigned IntValue,
110 0 : StringRef StringValue) {}
111 0 : void ARMTargetStreamer::emitArch(ARM::ArchKind Arch) {}
112 2 : void ARMTargetStreamer::emitArchExtension(unsigned ArchExt) {}
113 0 : void ARMTargetStreamer::emitObjectArch(ARM::ArchKind Arch) {}
114 0 : void ARMTargetStreamer::emitFPU(unsigned FPU) {}
115 60 : void ARMTargetStreamer::finishAttributeSection() {}
116 : void
117 0 : ARMTargetStreamer::AnnotateTLSDescriptorSequence(const MCSymbolRefExpr *SRE) {}
118 0 : void ARMTargetStreamer::emitThumbSet(MCSymbol *Symbol, const MCExpr *Value) {}
119 :
120 2013 : static ARMBuildAttrs::CPUArch getArchForCPU(const MCSubtargetInfo &STI) {
121 : if (STI.getCPU() == "xscale")
122 : return ARMBuildAttrs::v5TEJ;
123 :
124 2012 : if (STI.hasFeature(ARM::HasV8Ops)) {
125 160 : if (STI.hasFeature(ARM::FeatureRClass))
126 : return ARMBuildAttrs::v8_R;
127 150 : return ARMBuildAttrs::v8_A;
128 1852 : } else if (STI.hasFeature(ARM::HasV8MMainlineOps))
129 : return ARMBuildAttrs::v8_M_Main;
130 1808 : else if (STI.hasFeature(ARM::HasV7Ops)) {
131 912 : if (STI.hasFeature(ARM::FeatureMClass) && STI.hasFeature(ARM::FeatureDSP))
132 62 : return ARMBuildAttrs::v7E_M;
133 : return ARMBuildAttrs::v7;
134 896 : } else if (STI.hasFeature(ARM::HasV6T2Ops))
135 : return ARMBuildAttrs::v6T2;
136 781 : else if (STI.hasFeature(ARM::HasV8MBaselineOps))
137 : return ARMBuildAttrs::v8_M_Base;
138 757 : else if (STI.hasFeature(ARM::HasV6MOps))
139 : return ARMBuildAttrs::v6S_M;
140 673 : else if (STI.hasFeature(ARM::HasV6Ops))
141 : return ARMBuildAttrs::v6;
142 588 : else if (STI.hasFeature(ARM::HasV5TEOps))
143 : return ARMBuildAttrs::v5TE;
144 575 : else if (STI.hasFeature(ARM::HasV5TOps))
145 : return ARMBuildAttrs::v5T;
146 558 : else if (STI.hasFeature(ARM::HasV4TOps))
147 : return ARMBuildAttrs::v4T;
148 : else
149 456 : return ARMBuildAttrs::v4;
150 : }
151 :
152 : static bool isV8M(const MCSubtargetInfo &STI) {
153 : // Note that v8M Baseline is a subset of v6T2!
154 2356 : return (STI.hasFeature(ARM::HasV8MBaselineOps) &&
155 5506 : !STI.hasFeature(ARM::HasV6T2Ops)) ||
156 : STI.hasFeature(ARM::HasV8MMainlineOps);
157 : }
158 :
159 : /// Emit the build attributes that only depend on the hardware that we expect
160 : // /to be available, and not on the ABI, or any source-language choices.
161 2013 : void ARMTargetStreamer::emitTargetAttributes(const MCSubtargetInfo &STI) {
162 4026 : switchVendor("aeabi");
163 :
164 : const StringRef CPUString = STI.getCPU();
165 2013 : if (!CPUString.empty() && !CPUString.startswith("generic")) {
166 : // FIXME: remove krait check when GNU tools support krait cpu
167 549 : if (STI.hasFeature(ARM::ProcKrait)) {
168 12 : emitTextAttribute(ARMBuildAttrs::CPU_name, "cortex-a9");
169 : // We consider krait as a "cortex-a9" + hwdiv CPU
170 : // Enable hwdiv through ".arch_extension idiv"
171 6 : if (STI.hasFeature(ARM::FeatureHWDivThumb) ||
172 : STI.hasFeature(ARM::FeatureHWDivARM))
173 5 : emitArchExtension(ARM::AEK_HWDIVTHUMB | ARM::AEK_HWDIVARM);
174 : } else {
175 543 : emitTextAttribute(ARMBuildAttrs::CPU_name, CPUString);
176 : }
177 : }
178 :
179 2013 : emitAttribute(ARMBuildAttrs::CPU_arch, getArchForCPU(STI));
180 :
181 2013 : if (STI.hasFeature(ARM::FeatureAClass)) {
182 889 : emitAttribute(ARMBuildAttrs::CPU_arch_profile,
183 889 : ARMBuildAttrs::ApplicationProfile);
184 1124 : } else if (STI.hasFeature(ARM::FeatureRClass)) {
185 23 : emitAttribute(ARMBuildAttrs::CPU_arch_profile,
186 23 : ARMBuildAttrs::RealTimeProfile);
187 1101 : } else if (STI.hasFeature(ARM::FeatureMClass)) {
188 283 : emitAttribute(ARMBuildAttrs::CPU_arch_profile,
189 283 : ARMBuildAttrs::MicroControllerProfile);
190 : }
191 :
192 3743 : emitAttribute(ARMBuildAttrs::ARM_ISA_use, STI.hasFeature(ARM::FeatureNoARM)
193 : ? ARMBuildAttrs::Not_Allowed
194 2013 : : ARMBuildAttrs::Allowed);
195 :
196 : if (isV8M(STI)) {
197 68 : emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
198 68 : ARMBuildAttrs::AllowThumbDerived);
199 1945 : } else if (STI.hasFeature(ARM::FeatureThumb2)) {
200 1187 : emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
201 1187 : ARMBuildAttrs::AllowThumb32);
202 758 : } else if (STI.hasFeature(ARM::HasV4TOps)) {
203 302 : emitAttribute(ARMBuildAttrs::THUMB_ISA_use, ARMBuildAttrs::Allowed);
204 : }
205 :
206 2013 : if (STI.hasFeature(ARM::FeatureNEON)) {
207 : /* NEON is not exactly a VFP architecture, but GAS emit one of
208 : * neon/neon-fp-armv8/neon-vfpv4/vfpv3/vfpv2 for .fpu parameters */
209 982 : if (STI.hasFeature(ARM::FeatureFPARMv8)) {
210 164 : if (STI.hasFeature(ARM::FeatureCrypto))
211 137 : emitFPU(ARM::FK_CRYPTO_NEON_FP_ARMV8);
212 : else
213 27 : emitFPU(ARM::FK_NEON_FP_ARMV8);
214 818 : } else if (STI.hasFeature(ARM::FeatureVFP4))
215 101 : emitFPU(ARM::FK_NEON_VFPV4);
216 : else
217 1389 : emitFPU(STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_NEON_FP16
218 717 : : ARM::FK_NEON);
219 : // Emit Tag_Advanced_SIMD_arch for ARMv8 architecture
220 982 : if (STI.hasFeature(ARM::HasV8Ops))
221 296 : emitAttribute(ARMBuildAttrs::Advanced_SIMD_arch,
222 : STI.hasFeature(ARM::HasV8_1aOps)
223 : ? ARMBuildAttrs::AllowNeonARMv8_1a
224 156 : : ARMBuildAttrs::AllowNeonARMv8);
225 : } else {
226 1031 : if (STI.hasFeature(ARM::FeatureFPARMv8))
227 : // FPv5 and FP-ARMv8 have the same instructions, so are modeled as one
228 : // FPU, but there are two different names for it depending on the CPU.
229 73 : emitFPU(STI.hasFeature(ARM::FeatureD16)
230 31 : ? (STI.hasFeature(ARM::FeatureVFPOnlySP) ? ARM::FK_FPV5_SP_D16
231 : : ARM::FK_FPV5_D16)
232 42 : : ARM::FK_FP_ARMV8);
233 989 : else if (STI.hasFeature(ARM::FeatureVFP4))
234 64 : emitFPU(STI.hasFeature(ARM::FeatureD16)
235 30 : ? (STI.hasFeature(ARM::FeatureVFPOnlySP) ? ARM::FK_FPV4_SP_D16
236 : : ARM::FK_VFPV4_D16)
237 34 : : ARM::FK_VFPV4);
238 955 : else if (STI.hasFeature(ARM::FeatureVFP3))
239 78 : emitFPU(
240 : STI.hasFeature(ARM::FeatureD16)
241 : // +d16
242 39 : ? (STI.hasFeature(ARM::FeatureVFPOnlySP)
243 : ? (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3XD_FP16
244 : : ARM::FK_VFPV3XD)
245 : : (STI.hasFeature(ARM::FeatureFP16)
246 : ? ARM::FK_VFPV3_D16_FP16
247 : : ARM::FK_VFPV3_D16))
248 : // -d16
249 : : (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3_FP16
250 39 : : ARM::FK_VFPV3));
251 916 : else if (STI.hasFeature(ARM::FeatureVFP2))
252 59 : emitFPU(ARM::FK_VFPV2);
253 : }
254 :
255 : // ABI_HardFP_use attribute to indicate single precision FP.
256 2013 : if (STI.hasFeature(ARM::FeatureVFPOnlySP))
257 57 : emitAttribute(ARMBuildAttrs::ABI_HardFP_use,
258 57 : ARMBuildAttrs::HardFPSinglePrecision);
259 :
260 2013 : if (STI.hasFeature(ARM::FeatureFP16))
261 404 : emitAttribute(ARMBuildAttrs::FP_HP_extension, ARMBuildAttrs::AllowHPFP);
262 :
263 2013 : if (STI.hasFeature(ARM::FeatureMP))
264 301 : emitAttribute(ARMBuildAttrs::MPextension_use, ARMBuildAttrs::AllowMP);
265 :
266 : // Hardware divide in ARM mode is part of base arch, starting from ARMv8.
267 : // If only Thumb hwdiv is present, it must also be in base arch (ARMv7-R/M).
268 : // It is not possible to produce DisallowDIV: if hwdiv is present in the base
269 : // arch, supplying -hwdiv downgrades the effective arch, via ClearImpliedBits.
270 : // AllowDIVExt is only emitted if hwdiv isn't available in the base arch;
271 : // otherwise, the default value (AllowDIVIfExists) applies.
272 2013 : if (STI.hasFeature(ARM::FeatureHWDivARM) && !STI.hasFeature(ARM::HasV8Ops))
273 117 : emitAttribute(ARMBuildAttrs::DIV_use, ARMBuildAttrs::AllowDIVExt);
274 :
275 2013 : if (STI.hasFeature(ARM::FeatureDSP) && isV8M(STI))
276 24 : emitAttribute(ARMBuildAttrs::DSP_extension, ARMBuildAttrs::Allowed);
277 :
278 2013 : if (STI.hasFeature(ARM::FeatureStrictAlign))
279 163 : emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
280 163 : ARMBuildAttrs::Not_Allowed);
281 : else
282 1850 : emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
283 1850 : ARMBuildAttrs::Allowed);
284 :
285 2013 : if (STI.hasFeature(ARM::FeatureTrustZone) &&
286 : STI.hasFeature(ARM::FeatureVirtualization))
287 223 : emitAttribute(ARMBuildAttrs::Virtualization_use,
288 223 : ARMBuildAttrs::AllowTZVirtualization);
289 1790 : else if (STI.hasFeature(ARM::FeatureTrustZone))
290 156 : emitAttribute(ARMBuildAttrs::Virtualization_use, ARMBuildAttrs::AllowTZ);
291 1634 : else if (STI.hasFeature(ARM::FeatureVirtualization))
292 10 : emitAttribute(ARMBuildAttrs::Virtualization_use,
293 10 : ARMBuildAttrs::AllowVirtualization);
294 2013 : }
|