xref: /freebsd/contrib/llvm-project/llvm/lib/Target/ARM/MCTargetDesc/ARMTargetStreamer.cpp (revision 9729f076e4d93c5a37e78d427bfe0f1ab99bbcc6)
1 //===- ARMTargetStreamer.cpp - ARMTargetStreamer class --*- C++ -*---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the ARMTargetStreamer class.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "MCTargetDesc/ARMMCTargetDesc.h"
14 #include "llvm/MC/ConstantPools.h"
15 #include "llvm/MC/MCAsmInfo.h"
16 #include "llvm/MC/MCContext.h"
17 #include "llvm/MC/MCExpr.h"
18 #include "llvm/MC/MCStreamer.h"
19 #include "llvm/MC/MCSubtargetInfo.h"
20 #include "llvm/Support/ARMBuildAttributes.h"
21 #include "llvm/Support/TargetParser.h"
22 
23 using namespace llvm;
24 
25 //
26 // ARMTargetStreamer Implemenation
27 //
28 
29 ARMTargetStreamer::ARMTargetStreamer(MCStreamer &S)
30     : MCTargetStreamer(S), ConstantPools(new AssemblerConstantPools()) {}
31 
32 ARMTargetStreamer::~ARMTargetStreamer() = default;
33 
34 // The constant pool handling is shared by all ARMTargetStreamer
35 // implementations.
36 const MCExpr *ARMTargetStreamer::addConstantPoolEntry(const MCExpr *Expr, SMLoc Loc) {
37   return ConstantPools->addEntry(Streamer, Expr, 4, Loc);
38 }
39 
40 void ARMTargetStreamer::emitCurrentConstantPool() {
41   ConstantPools->emitForCurrentSection(Streamer);
42   ConstantPools->clearCacheForCurrentSection(Streamer);
43 }
44 
45 // finish() - write out any non-empty assembler constant pools.
46 void ARMTargetStreamer::emitConstantPools() {
47   ConstantPools->emitAll(Streamer);
48 }
49 
50 // reset() - Reset any state
51 void ARMTargetStreamer::reset() {}
52 
53 void ARMTargetStreamer::emitInst(uint32_t Inst, char Suffix) {
54   unsigned Size;
55   char Buffer[4];
56   const bool LittleEndian = getStreamer().getContext().getAsmInfo()->isLittleEndian();
57 
58   switch (Suffix) {
59   case '\0':
60     Size = 4;
61 
62     for (unsigned II = 0, IE = Size; II != IE; II++) {
63       const unsigned I = LittleEndian ? (Size - II - 1) : II;
64       Buffer[Size - II - 1] = uint8_t(Inst >> I * CHAR_BIT);
65     }
66 
67     break;
68   case 'n':
69   case 'w':
70     Size = (Suffix == 'n' ? 2 : 4);
71 
72     // Thumb wide instructions are emitted as a pair of 16-bit words of the
73     // appropriate endianness.
74     for (unsigned II = 0, IE = Size; II != IE; II = II + 2) {
75       const unsigned I0 = LittleEndian ? II + 0 : II + 1;
76       const unsigned I1 = LittleEndian ? II + 1 : II + 0;
77       Buffer[Size - II - 2] = uint8_t(Inst >> I0 * CHAR_BIT);
78       Buffer[Size - II - 1] = uint8_t(Inst >> I1 * CHAR_BIT);
79     }
80 
81     break;
82   default:
83     llvm_unreachable("Invalid Suffix");
84   }
85   getStreamer().emitBytes(StringRef(Buffer, Size));
86 }
87 
88 // The remaining callbacks should be handled separately by each
89 // streamer.
90 void ARMTargetStreamer::emitFnStart() {}
91 void ARMTargetStreamer::emitFnEnd() {}
92 void ARMTargetStreamer::emitCantUnwind() {}
93 void ARMTargetStreamer::emitPersonality(const MCSymbol *Personality) {}
94 void ARMTargetStreamer::emitPersonalityIndex(unsigned Index) {}
95 void ARMTargetStreamer::emitHandlerData() {}
96 void ARMTargetStreamer::emitSetFP(unsigned FpReg, unsigned SpReg,
97                                   int64_t Offset) {}
98 void ARMTargetStreamer::emitMovSP(unsigned Reg, int64_t Offset) {}
99 void ARMTargetStreamer::emitPad(int64_t Offset) {}
100 void ARMTargetStreamer::emitRegSave(const SmallVectorImpl<unsigned> &RegList,
101                                     bool isVector) {}
102 void ARMTargetStreamer::emitUnwindRaw(int64_t StackOffset,
103                                       const SmallVectorImpl<uint8_t> &Opcodes) {
104 }
105 void ARMTargetStreamer::switchVendor(StringRef Vendor) {}
106 void ARMTargetStreamer::emitAttribute(unsigned Attribute, unsigned Value) {}
107 void ARMTargetStreamer::emitTextAttribute(unsigned Attribute,
108                                           StringRef String) {}
109 void ARMTargetStreamer::emitIntTextAttribute(unsigned Attribute,
110                                              unsigned IntValue,
111                                              StringRef StringValue) {}
112 void ARMTargetStreamer::emitArch(ARM::ArchKind Arch) {}
113 void ARMTargetStreamer::emitArchExtension(uint64_t ArchExt) {}
114 void ARMTargetStreamer::emitObjectArch(ARM::ArchKind Arch) {}
115 void ARMTargetStreamer::emitFPU(unsigned FPU) {}
116 void ARMTargetStreamer::finishAttributeSection() {}
117 void
118 ARMTargetStreamer::AnnotateTLSDescriptorSequence(const MCSymbolRefExpr *SRE) {}
119 void ARMTargetStreamer::emitThumbSet(MCSymbol *Symbol, const MCExpr *Value) {}
120 
121 static ARMBuildAttrs::CPUArch getArchForCPU(const MCSubtargetInfo &STI) {
122   if (STI.getCPU() == "xscale")
123     return ARMBuildAttrs::v5TEJ;
124 
125   if (STI.hasFeature(ARM::HasV8Ops)) {
126     if (STI.hasFeature(ARM::FeatureRClass))
127       return ARMBuildAttrs::v8_R;
128     return ARMBuildAttrs::v8_A;
129   } else if (STI.hasFeature(ARM::HasV8_1MMainlineOps))
130     return ARMBuildAttrs::v8_1_M_Main;
131   else if (STI.hasFeature(ARM::HasV8MMainlineOps))
132     return ARMBuildAttrs::v8_M_Main;
133   else if (STI.hasFeature(ARM::HasV7Ops)) {
134     if (STI.hasFeature(ARM::FeatureMClass) && STI.hasFeature(ARM::FeatureDSP))
135       return ARMBuildAttrs::v7E_M;
136     return ARMBuildAttrs::v7;
137   } else if (STI.hasFeature(ARM::HasV6T2Ops))
138     return ARMBuildAttrs::v6T2;
139   else if (STI.hasFeature(ARM::HasV8MBaselineOps))
140     return ARMBuildAttrs::v8_M_Base;
141   else if (STI.hasFeature(ARM::HasV6MOps))
142     return ARMBuildAttrs::v6S_M;
143   else if (STI.hasFeature(ARM::HasV6Ops))
144     return ARMBuildAttrs::v6;
145   else if (STI.hasFeature(ARM::HasV5TEOps))
146     return ARMBuildAttrs::v5TE;
147   else if (STI.hasFeature(ARM::HasV5TOps))
148     return ARMBuildAttrs::v5T;
149   else if (STI.hasFeature(ARM::HasV4TOps))
150     return ARMBuildAttrs::v4T;
151   else
152     return ARMBuildAttrs::v4;
153 }
154 
155 static bool isV8M(const MCSubtargetInfo &STI) {
156   // Note that v8M Baseline is a subset of v6T2!
157   return (STI.hasFeature(ARM::HasV8MBaselineOps) &&
158           !STI.hasFeature(ARM::HasV6T2Ops)) ||
159          STI.hasFeature(ARM::HasV8MMainlineOps);
160 }
161 
162 /// Emit the build attributes that only depend on the hardware that we expect
163 // /to be available, and not on the ABI, or any source-language choices.
164 void ARMTargetStreamer::emitTargetAttributes(const MCSubtargetInfo &STI) {
165   switchVendor("aeabi");
166 
167   const StringRef CPUString = STI.getCPU();
168   if (!CPUString.empty() && !CPUString.startswith("generic")) {
169     // FIXME: remove krait check when GNU tools support krait cpu
170     if (STI.hasFeature(ARM::ProcKrait)) {
171       emitTextAttribute(ARMBuildAttrs::CPU_name, "cortex-a9");
172       // We consider krait as a "cortex-a9" + hwdiv CPU
173       // Enable hwdiv through ".arch_extension idiv"
174       if (STI.hasFeature(ARM::FeatureHWDivThumb) ||
175           STI.hasFeature(ARM::FeatureHWDivARM))
176         emitArchExtension(ARM::AEK_HWDIVTHUMB | ARM::AEK_HWDIVARM);
177     } else {
178       emitTextAttribute(ARMBuildAttrs::CPU_name, CPUString);
179     }
180   }
181 
182   emitAttribute(ARMBuildAttrs::CPU_arch, getArchForCPU(STI));
183 
184   if (STI.hasFeature(ARM::FeatureAClass)) {
185     emitAttribute(ARMBuildAttrs::CPU_arch_profile,
186                       ARMBuildAttrs::ApplicationProfile);
187   } else if (STI.hasFeature(ARM::FeatureRClass)) {
188     emitAttribute(ARMBuildAttrs::CPU_arch_profile,
189                       ARMBuildAttrs::RealTimeProfile);
190   } else if (STI.hasFeature(ARM::FeatureMClass)) {
191     emitAttribute(ARMBuildAttrs::CPU_arch_profile,
192                       ARMBuildAttrs::MicroControllerProfile);
193   }
194 
195   emitAttribute(ARMBuildAttrs::ARM_ISA_use, STI.hasFeature(ARM::FeatureNoARM)
196                                                 ? ARMBuildAttrs::Not_Allowed
197                                                 : ARMBuildAttrs::Allowed);
198 
199   if (isV8M(STI)) {
200     emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
201                       ARMBuildAttrs::AllowThumbDerived);
202   } else if (STI.hasFeature(ARM::FeatureThumb2)) {
203     emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
204                       ARMBuildAttrs::AllowThumb32);
205   } else if (STI.hasFeature(ARM::HasV4TOps)) {
206     emitAttribute(ARMBuildAttrs::THUMB_ISA_use, ARMBuildAttrs::Allowed);
207   }
208 
209   if (STI.hasFeature(ARM::FeatureNEON)) {
210     /* NEON is not exactly a VFP architecture, but GAS emit one of
211      * neon/neon-fp-armv8/neon-vfpv4/vfpv3/vfpv2 for .fpu parameters */
212     if (STI.hasFeature(ARM::FeatureFPARMv8)) {
213       if (STI.hasFeature(ARM::FeatureCrypto))
214         emitFPU(ARM::FK_CRYPTO_NEON_FP_ARMV8);
215       else
216         emitFPU(ARM::FK_NEON_FP_ARMV8);
217     } else if (STI.hasFeature(ARM::FeatureVFP4))
218       emitFPU(ARM::FK_NEON_VFPV4);
219     else
220       emitFPU(STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_NEON_FP16
221                                                : ARM::FK_NEON);
222     // Emit Tag_Advanced_SIMD_arch for ARMv8 architecture
223     if (STI.hasFeature(ARM::HasV8Ops))
224       emitAttribute(ARMBuildAttrs::Advanced_SIMD_arch,
225                     STI.hasFeature(ARM::HasV8_1aOps)
226                         ? ARMBuildAttrs::AllowNeonARMv8_1a
227                         : ARMBuildAttrs::AllowNeonARMv8);
228   } else {
229     if (STI.hasFeature(ARM::FeatureFPARMv8_D16_SP))
230       // FPv5 and FP-ARMv8 have the same instructions, so are modeled as one
231       // FPU, but there are two different names for it depending on the CPU.
232       emitFPU(STI.hasFeature(ARM::FeatureD32)
233                   ? ARM::FK_FP_ARMV8
234                   : (STI.hasFeature(ARM::FeatureFP64) ? ARM::FK_FPV5_D16
235                                                       : ARM::FK_FPV5_SP_D16));
236     else if (STI.hasFeature(ARM::FeatureVFP4_D16_SP))
237       emitFPU(STI.hasFeature(ARM::FeatureD32)
238                   ? ARM::FK_VFPV4
239                   : (STI.hasFeature(ARM::FeatureFP64) ? ARM::FK_VFPV4_D16
240                                                       : ARM::FK_FPV4_SP_D16));
241     else if (STI.hasFeature(ARM::FeatureVFP3_D16_SP))
242       emitFPU(
243           STI.hasFeature(ARM::FeatureD32)
244               // +d32
245               ? (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3_FP16
246                                                   : ARM::FK_VFPV3)
247               // -d32
248               : (STI.hasFeature(ARM::FeatureFP64)
249                      ? (STI.hasFeature(ARM::FeatureFP16)
250                             ? ARM::FK_VFPV3_D16_FP16
251                             : ARM::FK_VFPV3_D16)
252                      : (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3XD_FP16
253                                                          : ARM::FK_VFPV3XD)));
254     else if (STI.hasFeature(ARM::FeatureVFP2_SP))
255       emitFPU(ARM::FK_VFPV2);
256   }
257 
258   // ABI_HardFP_use attribute to indicate single precision FP.
259   if (STI.hasFeature(ARM::FeatureVFP2_SP) && !STI.hasFeature(ARM::FeatureFP64))
260     emitAttribute(ARMBuildAttrs::ABI_HardFP_use,
261                   ARMBuildAttrs::HardFPSinglePrecision);
262 
263   if (STI.hasFeature(ARM::FeatureFP16))
264     emitAttribute(ARMBuildAttrs::FP_HP_extension, ARMBuildAttrs::AllowHPFP);
265 
266   if (STI.hasFeature(ARM::FeatureMP))
267     emitAttribute(ARMBuildAttrs::MPextension_use, ARMBuildAttrs::AllowMP);
268 
269   if (STI.hasFeature(ARM::HasMVEFloatOps))
270     emitAttribute(ARMBuildAttrs::MVE_arch, ARMBuildAttrs::AllowMVEIntegerAndFloat);
271   else if (STI.hasFeature(ARM::HasMVEIntegerOps))
272     emitAttribute(ARMBuildAttrs::MVE_arch, ARMBuildAttrs::AllowMVEInteger);
273 
274   // Hardware divide in ARM mode is part of base arch, starting from ARMv8.
275   // If only Thumb hwdiv is present, it must also be in base arch (ARMv7-R/M).
276   // It is not possible to produce DisallowDIV: if hwdiv is present in the base
277   // arch, supplying -hwdiv downgrades the effective arch, via ClearImpliedBits.
278   // AllowDIVExt is only emitted if hwdiv isn't available in the base arch;
279   // otherwise, the default value (AllowDIVIfExists) applies.
280   if (STI.hasFeature(ARM::FeatureHWDivARM) && !STI.hasFeature(ARM::HasV8Ops))
281     emitAttribute(ARMBuildAttrs::DIV_use, ARMBuildAttrs::AllowDIVExt);
282 
283   if (STI.hasFeature(ARM::FeatureDSP) && isV8M(STI))
284     emitAttribute(ARMBuildAttrs::DSP_extension, ARMBuildAttrs::Allowed);
285 
286   if (STI.hasFeature(ARM::FeatureStrictAlign))
287     emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
288                   ARMBuildAttrs::Not_Allowed);
289   else
290     emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
291                   ARMBuildAttrs::Allowed);
292 
293   if (STI.hasFeature(ARM::FeatureTrustZone) &&
294       STI.hasFeature(ARM::FeatureVirtualization))
295     emitAttribute(ARMBuildAttrs::Virtualization_use,
296                   ARMBuildAttrs::AllowTZVirtualization);
297   else if (STI.hasFeature(ARM::FeatureTrustZone))
298     emitAttribute(ARMBuildAttrs::Virtualization_use, ARMBuildAttrs::AllowTZ);
299   else if (STI.hasFeature(ARM::FeatureVirtualization))
300     emitAttribute(ARMBuildAttrs::Virtualization_use,
301                   ARMBuildAttrs::AllowVirtualization);
302 
303   if (STI.hasFeature(ARM::FeaturePACBTI)) {
304     emitAttribute(ARMBuildAttrs::PAC_extension, ARMBuildAttrs::AllowPAC);
305     emitAttribute(ARMBuildAttrs::BTI_extension, ARMBuildAttrs::AllowBTI);
306   }
307 }
308