• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===- ARMTargetStreamer.cpp - ARMTargetStreamer class --*- C++ -*---------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the ARMTargetStreamer class.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "ARMTargetMachine.h"
15 #include "llvm/MC/ConstantPools.h"
16 #include "llvm/MC/MCAsmInfo.h"
17 #include "llvm/MC/MCContext.h"
18 #include "llvm/MC/MCExpr.h"
19 #include "llvm/MC/MCStreamer.h"
20 #include "llvm/MC/MCSubtargetInfo.h"
21 #include "llvm/Support/ARMBuildAttributes.h"
22 #include "llvm/Support/TargetParser.h"
23 
24 using namespace llvm;
25 
26 //
27 // ARMTargetStreamer Implemenation
28 //
29 
ARMTargetStreamer(MCStreamer & S)30 ARMTargetStreamer::ARMTargetStreamer(MCStreamer &S)
31     : MCTargetStreamer(S), ConstantPools(new AssemblerConstantPools()) {}
32 
33 ARMTargetStreamer::~ARMTargetStreamer() = default;
34 
35 // The constant pool handling is shared by all ARMTargetStreamer
36 // implementations.
addConstantPoolEntry(const MCExpr * Expr,SMLoc Loc)37 const MCExpr *ARMTargetStreamer::addConstantPoolEntry(const MCExpr *Expr, SMLoc Loc) {
38   return ConstantPools->addEntry(Streamer, Expr, 4, Loc);
39 }
40 
emitCurrentConstantPool()41 void ARMTargetStreamer::emitCurrentConstantPool() {
42   ConstantPools->emitForCurrentSection(Streamer);
43   ConstantPools->clearCacheForCurrentSection(Streamer);
44 }
45 
46 // finish() - write out any non-empty assembler constant pools.
finish()47 void ARMTargetStreamer::finish() { ConstantPools->emitAll(Streamer); }
48 
49 // reset() - Reset any state
reset()50 void ARMTargetStreamer::reset() {}
51 
emitInst(uint32_t Inst,char Suffix)52 void ARMTargetStreamer::emitInst(uint32_t Inst, char Suffix) {
53   unsigned Size;
54   char Buffer[4];
55   const bool LittleEndian = getStreamer().getContext().getAsmInfo()->isLittleEndian();
56 
57   switch (Suffix) {
58   case '\0':
59     Size = 4;
60 
61     for (unsigned II = 0, IE = Size; II != IE; II++) {
62       const unsigned I = LittleEndian ? (Size - II - 1) : II;
63       Buffer[Size - II - 1] = uint8_t(Inst >> I * CHAR_BIT);
64     }
65 
66     break;
67   case 'n':
68   case 'w':
69     Size = (Suffix == 'n' ? 2 : 4);
70 
71     // Thumb wide instructions are emitted as a pair of 16-bit words of the
72     // appropriate endianness.
73     for (unsigned II = 0, IE = Size; II != IE; II = II + 2) {
74       const unsigned I0 = LittleEndian ? II + 0 : II + 1;
75       const unsigned I1 = LittleEndian ? II + 1 : II + 0;
76       Buffer[Size - II - 2] = uint8_t(Inst >> I0 * CHAR_BIT);
77       Buffer[Size - II - 1] = uint8_t(Inst >> I1 * CHAR_BIT);
78     }
79 
80     break;
81   default:
82     llvm_unreachable("Invalid Suffix");
83   }
84   getStreamer().EmitBytes(StringRef(Buffer, Size));
85 }
86 
87 // The remaining callbacks should be handled separately by each
88 // streamer.
emitFnStart()89 void ARMTargetStreamer::emitFnStart() {}
emitFnEnd()90 void ARMTargetStreamer::emitFnEnd() {}
emitCantUnwind()91 void ARMTargetStreamer::emitCantUnwind() {}
emitPersonality(const MCSymbol * Personality)92 void ARMTargetStreamer::emitPersonality(const MCSymbol *Personality) {}
emitPersonalityIndex(unsigned Index)93 void ARMTargetStreamer::emitPersonalityIndex(unsigned Index) {}
emitHandlerData()94 void ARMTargetStreamer::emitHandlerData() {}
emitSetFP(unsigned FpReg,unsigned SpReg,int64_t Offset)95 void ARMTargetStreamer::emitSetFP(unsigned FpReg, unsigned SpReg,
96                                   int64_t Offset) {}
emitMovSP(unsigned Reg,int64_t Offset)97 void ARMTargetStreamer::emitMovSP(unsigned Reg, int64_t Offset) {}
emitPad(int64_t Offset)98 void ARMTargetStreamer::emitPad(int64_t Offset) {}
emitRegSave(const SmallVectorImpl<unsigned> & RegList,bool isVector)99 void ARMTargetStreamer::emitRegSave(const SmallVectorImpl<unsigned> &RegList,
100                                     bool isVector) {}
emitUnwindRaw(int64_t StackOffset,const SmallVectorImpl<uint8_t> & Opcodes)101 void ARMTargetStreamer::emitUnwindRaw(int64_t StackOffset,
102                                       const SmallVectorImpl<uint8_t> &Opcodes) {
103 }
switchVendor(StringRef Vendor)104 void ARMTargetStreamer::switchVendor(StringRef Vendor) {}
emitAttribute(unsigned Attribute,unsigned Value)105 void ARMTargetStreamer::emitAttribute(unsigned Attribute, unsigned Value) {}
emitTextAttribute(unsigned Attribute,StringRef String)106 void ARMTargetStreamer::emitTextAttribute(unsigned Attribute,
107                                           StringRef String) {}
emitIntTextAttribute(unsigned Attribute,unsigned IntValue,StringRef StringValue)108 void ARMTargetStreamer::emitIntTextAttribute(unsigned Attribute,
109                                              unsigned IntValue,
110                                              StringRef StringValue) {}
emitArch(ARM::ArchKind Arch)111 void ARMTargetStreamer::emitArch(ARM::ArchKind Arch) {}
emitArchExtension(unsigned ArchExt)112 void ARMTargetStreamer::emitArchExtension(unsigned ArchExt) {}
emitObjectArch(ARM::ArchKind Arch)113 void ARMTargetStreamer::emitObjectArch(ARM::ArchKind Arch) {}
emitFPU(unsigned FPU)114 void ARMTargetStreamer::emitFPU(unsigned FPU) {}
finishAttributeSection()115 void ARMTargetStreamer::finishAttributeSection() {}
116 void
AnnotateTLSDescriptorSequence(const MCSymbolRefExpr * SRE)117 ARMTargetStreamer::AnnotateTLSDescriptorSequence(const MCSymbolRefExpr *SRE) {}
emitThumbSet(MCSymbol * Symbol,const MCExpr * Value)118 void ARMTargetStreamer::emitThumbSet(MCSymbol *Symbol, const MCExpr *Value) {}
119 
getArchForCPU(const MCSubtargetInfo & STI)120 static ARMBuildAttrs::CPUArch getArchForCPU(const MCSubtargetInfo &STI) {
121   if (STI.getCPU() == "xscale")
122     return ARMBuildAttrs::v5TEJ;
123 
124   if (STI.hasFeature(ARM::HasV8Ops)) {
125     if (STI.hasFeature(ARM::FeatureRClass))
126       return ARMBuildAttrs::v8_R;
127     return ARMBuildAttrs::v8_A;
128   } else if (STI.hasFeature(ARM::HasV8MMainlineOps))
129     return ARMBuildAttrs::v8_M_Main;
130   else if (STI.hasFeature(ARM::HasV7Ops)) {
131     if (STI.hasFeature(ARM::FeatureMClass) && STI.hasFeature(ARM::FeatureDSP))
132       return ARMBuildAttrs::v7E_M;
133     return ARMBuildAttrs::v7;
134   } else if (STI.hasFeature(ARM::HasV6T2Ops))
135     return ARMBuildAttrs::v6T2;
136   else if (STI.hasFeature(ARM::HasV8MBaselineOps))
137     return ARMBuildAttrs::v8_M_Base;
138   else if (STI.hasFeature(ARM::HasV6MOps))
139     return ARMBuildAttrs::v6S_M;
140   else if (STI.hasFeature(ARM::HasV6Ops))
141     return ARMBuildAttrs::v6;
142   else if (STI.hasFeature(ARM::HasV5TEOps))
143     return ARMBuildAttrs::v5TE;
144   else if (STI.hasFeature(ARM::HasV5TOps))
145     return ARMBuildAttrs::v5T;
146   else if (STI.hasFeature(ARM::HasV4TOps))
147     return ARMBuildAttrs::v4T;
148   else
149     return ARMBuildAttrs::v4;
150 }
151 
isV8M(const MCSubtargetInfo & STI)152 static bool isV8M(const MCSubtargetInfo &STI) {
153   // Note that v8M Baseline is a subset of v6T2!
154   return (STI.hasFeature(ARM::HasV8MBaselineOps) &&
155           !STI.hasFeature(ARM::HasV6T2Ops)) ||
156          STI.hasFeature(ARM::HasV8MMainlineOps);
157 }
158 
159 /// Emit the build attributes that only depend on the hardware that we expect
160 // /to be available, and not on the ABI, or any source-language choices.
emitTargetAttributes(const MCSubtargetInfo & STI)161 void ARMTargetStreamer::emitTargetAttributes(const MCSubtargetInfo &STI) {
162   switchVendor("aeabi");
163 
164   const StringRef CPUString = STI.getCPU();
165   if (!CPUString.empty() && !CPUString.startswith("generic")) {
166     // FIXME: remove krait check when GNU tools support krait cpu
167     if (STI.hasFeature(ARM::ProcKrait)) {
168       emitTextAttribute(ARMBuildAttrs::CPU_name, "cortex-a9");
169       // We consider krait as a "cortex-a9" + hwdiv CPU
170       // Enable hwdiv through ".arch_extension idiv"
171       if (STI.hasFeature(ARM::FeatureHWDivThumb) ||
172           STI.hasFeature(ARM::FeatureHWDivARM))
173         emitArchExtension(ARM::AEK_HWDIVTHUMB | ARM::AEK_HWDIVARM);
174     } else {
175       emitTextAttribute(ARMBuildAttrs::CPU_name, CPUString);
176     }
177   }
178 
179   emitAttribute(ARMBuildAttrs::CPU_arch, getArchForCPU(STI));
180 
181   if (STI.hasFeature(ARM::FeatureAClass)) {
182     emitAttribute(ARMBuildAttrs::CPU_arch_profile,
183                       ARMBuildAttrs::ApplicationProfile);
184   } else if (STI.hasFeature(ARM::FeatureRClass)) {
185     emitAttribute(ARMBuildAttrs::CPU_arch_profile,
186                       ARMBuildAttrs::RealTimeProfile);
187   } else if (STI.hasFeature(ARM::FeatureMClass)) {
188     emitAttribute(ARMBuildAttrs::CPU_arch_profile,
189                       ARMBuildAttrs::MicroControllerProfile);
190   }
191 
192   emitAttribute(ARMBuildAttrs::ARM_ISA_use, STI.hasFeature(ARM::FeatureNoARM)
193                                                 ? ARMBuildAttrs::Not_Allowed
194                                                 : ARMBuildAttrs::Allowed);
195 
196   if (isV8M(STI)) {
197     emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
198                       ARMBuildAttrs::AllowThumbDerived);
199   } else if (STI.hasFeature(ARM::FeatureThumb2)) {
200     emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
201                       ARMBuildAttrs::AllowThumb32);
202   } else if (STI.hasFeature(ARM::HasV4TOps)) {
203     emitAttribute(ARMBuildAttrs::THUMB_ISA_use, ARMBuildAttrs::Allowed);
204   }
205 
206   if (STI.hasFeature(ARM::FeatureNEON)) {
207     /* NEON is not exactly a VFP architecture, but GAS emit one of
208      * neon/neon-fp-armv8/neon-vfpv4/vfpv3/vfpv2 for .fpu parameters */
209     if (STI.hasFeature(ARM::FeatureFPARMv8)) {
210       if (STI.hasFeature(ARM::FeatureCrypto))
211         emitFPU(ARM::FK_CRYPTO_NEON_FP_ARMV8);
212       else
213         emitFPU(ARM::FK_NEON_FP_ARMV8);
214     } else if (STI.hasFeature(ARM::FeatureVFP4))
215       emitFPU(ARM::FK_NEON_VFPV4);
216     else
217       emitFPU(STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_NEON_FP16
218                                                : ARM::FK_NEON);
219     // Emit Tag_Advanced_SIMD_arch for ARMv8 architecture
220     if (STI.hasFeature(ARM::HasV8Ops))
221       emitAttribute(ARMBuildAttrs::Advanced_SIMD_arch,
222                     STI.hasFeature(ARM::HasV8_1aOps)
223                         ? ARMBuildAttrs::AllowNeonARMv8_1a
224                         : ARMBuildAttrs::AllowNeonARMv8);
225   } else {
226     if (STI.hasFeature(ARM::FeatureFPARMv8))
227       // FPv5 and FP-ARMv8 have the same instructions, so are modeled as one
228       // FPU, but there are two different names for it depending on the CPU.
229       emitFPU(STI.hasFeature(ARM::FeatureD16)
230                   ? (STI.hasFeature(ARM::FeatureVFPOnlySP) ? ARM::FK_FPV5_SP_D16
231                                                            : ARM::FK_FPV5_D16)
232                   : ARM::FK_FP_ARMV8);
233     else if (STI.hasFeature(ARM::FeatureVFP4))
234       emitFPU(STI.hasFeature(ARM::FeatureD16)
235                   ? (STI.hasFeature(ARM::FeatureVFPOnlySP) ? ARM::FK_FPV4_SP_D16
236                                                            : ARM::FK_VFPV4_D16)
237                   : ARM::FK_VFPV4);
238     else if (STI.hasFeature(ARM::FeatureVFP3))
239       emitFPU(
240           STI.hasFeature(ARM::FeatureD16)
241               // +d16
242               ? (STI.hasFeature(ARM::FeatureVFPOnlySP)
243                      ? (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3XD_FP16
244                                                          : ARM::FK_VFPV3XD)
245                      : (STI.hasFeature(ARM::FeatureFP16)
246                             ? ARM::FK_VFPV3_D16_FP16
247                             : ARM::FK_VFPV3_D16))
248               // -d16
249               : (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3_FP16
250                                                   : ARM::FK_VFPV3));
251     else if (STI.hasFeature(ARM::FeatureVFP2))
252       emitFPU(ARM::FK_VFPV2);
253   }
254 
255   // ABI_HardFP_use attribute to indicate single precision FP.
256   if (STI.hasFeature(ARM::FeatureVFPOnlySP))
257     emitAttribute(ARMBuildAttrs::ABI_HardFP_use,
258                   ARMBuildAttrs::HardFPSinglePrecision);
259 
260   if (STI.hasFeature(ARM::FeatureFP16))
261     emitAttribute(ARMBuildAttrs::FP_HP_extension, ARMBuildAttrs::AllowHPFP);
262 
263   if (STI.hasFeature(ARM::FeatureMP))
264     emitAttribute(ARMBuildAttrs::MPextension_use, ARMBuildAttrs::AllowMP);
265 
266   // Hardware divide in ARM mode is part of base arch, starting from ARMv8.
267   // If only Thumb hwdiv is present, it must also be in base arch (ARMv7-R/M).
268   // It is not possible to produce DisallowDIV: if hwdiv is present in the base
269   // arch, supplying -hwdiv downgrades the effective arch, via ClearImpliedBits.
270   // AllowDIVExt is only emitted if hwdiv isn't available in the base arch;
271   // otherwise, the default value (AllowDIVIfExists) applies.
272   if (STI.hasFeature(ARM::FeatureHWDivARM) && !STI.hasFeature(ARM::HasV8Ops))
273     emitAttribute(ARMBuildAttrs::DIV_use, ARMBuildAttrs::AllowDIVExt);
274 
275   if (STI.hasFeature(ARM::FeatureDSP) && isV8M(STI))
276     emitAttribute(ARMBuildAttrs::DSP_extension, ARMBuildAttrs::Allowed);
277 
278   if (STI.hasFeature(ARM::FeatureStrictAlign))
279     emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
280                   ARMBuildAttrs::Not_Allowed);
281   else
282     emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
283                   ARMBuildAttrs::Allowed);
284 
285   if (STI.hasFeature(ARM::FeatureTrustZone) &&
286       STI.hasFeature(ARM::FeatureVirtualization))
287     emitAttribute(ARMBuildAttrs::Virtualization_use,
288                   ARMBuildAttrs::AllowTZVirtualization);
289   else if (STI.hasFeature(ARM::FeatureTrustZone))
290     emitAttribute(ARMBuildAttrs::Virtualization_use, ARMBuildAttrs::AllowTZ);
291   else if (STI.hasFeature(ARM::FeatureVirtualization))
292     emitAttribute(ARMBuildAttrs::Virtualization_use,
293                   ARMBuildAttrs::AllowVirtualization);
294 }
295