xref: /freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp (revision 1db9f3b21e39176dd5b67cf8ac378633b172463e)
1 //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file handles ELF-specific object emission, converting LLVM's internal
10 // fixups into the appropriate relocations.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "MCTargetDesc/AArch64FixupKinds.h"
15 #include "MCTargetDesc/AArch64MCExpr.h"
16 #include "MCTargetDesc/AArch64MCTargetDesc.h"
17 #include "llvm/BinaryFormat/ELF.h"
18 #include "llvm/MC/MCContext.h"
19 #include "llvm/MC/MCELFObjectWriter.h"
20 #include "llvm/MC/MCFixup.h"
21 #include "llvm/MC/MCObjectWriter.h"
22 #include "llvm/MC/MCValue.h"
23 #include "llvm/Support/ErrorHandling.h"
24 #include <cassert>
25 #include <cstdint>
26 
27 using namespace llvm;
28 
29 namespace {
30 
31 class AArch64ELFObjectWriter : public MCELFObjectTargetWriter {
32 public:
33   AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32);
34 
35   ~AArch64ELFObjectWriter() override = default;
36 
37   MCSectionELF *getMemtagRelocsSection(MCContext &Ctx) const override;
38 
39 protected:
40   unsigned getRelocType(MCContext &Ctx, const MCValue &Target,
41                         const MCFixup &Fixup, bool IsPCRel) const override;
42   bool needsRelocateWithSymbol(const MCValue &Val, const MCSymbol &Sym,
43                                unsigned Type) const override;
44   bool IsILP32;
45 };
46 
47 } // end anonymous namespace
48 
49 AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32)
50     : MCELFObjectTargetWriter(/*Is64Bit*/ !IsILP32, OSABI, ELF::EM_AARCH64,
51                               /*HasRelocationAddend*/ true),
52       IsILP32(IsILP32) {}
53 
54 #define R_CLS(rtype)                                                           \
55   IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype
56 #define BAD_ILP32_MOV(lp64rtype)                                               \
57   "ILP32 absolute MOV relocation not "                                         \
58   "supported (LP64 eqv: " #lp64rtype ")"
59 
60 // assumes IsILP32 is true
61 static bool isNonILP32reloc(const MCFixup &Fixup,
62                             AArch64MCExpr::VariantKind RefKind,
63                             MCContext &Ctx) {
64   if (Fixup.getTargetKind() != AArch64::fixup_aarch64_movw)
65     return false;
66   switch (RefKind) {
67   case AArch64MCExpr::VK_ABS_G3:
68     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3));
69     return true;
70   case AArch64MCExpr::VK_ABS_G2:
71     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2));
72     return true;
73   case AArch64MCExpr::VK_ABS_G2_S:
74     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2));
75     return true;
76   case AArch64MCExpr::VK_ABS_G2_NC:
77     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC));
78     return true;
79   case AArch64MCExpr::VK_ABS_G1_S:
80     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1));
81     return true;
82   case AArch64MCExpr::VK_ABS_G1_NC:
83     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC));
84     return true;
85   case AArch64MCExpr::VK_DTPREL_G2:
86     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2));
87     return true;
88   case AArch64MCExpr::VK_DTPREL_G1_NC:
89     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC));
90     return true;
91   case AArch64MCExpr::VK_TPREL_G2:
92     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2));
93     return true;
94   case AArch64MCExpr::VK_TPREL_G1_NC:
95     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC));
96     return true;
97   case AArch64MCExpr::VK_GOTTPREL_G1:
98     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1));
99     return true;
100   case AArch64MCExpr::VK_GOTTPREL_G0_NC:
101     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC));
102     return true;
103   default:
104     return false;
105   }
106   return false;
107 }
108 
109 unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
110                                               const MCValue &Target,
111                                               const MCFixup &Fixup,
112                                               bool IsPCRel) const {
113   unsigned Kind = Fixup.getTargetKind();
114   if (Kind >= FirstLiteralRelocationKind)
115     return Kind - FirstLiteralRelocationKind;
116   AArch64MCExpr::VariantKind RefKind =
117       static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
118   AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
119   bool IsNC = AArch64MCExpr::isNotChecked(RefKind);
120 
121   assert((!Target.getSymA() ||
122           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None ||
123           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_PLT) &&
124          "Should only be expression-level modifiers here");
125 
126   assert((!Target.getSymB() ||
127           Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
128          "Should only be expression-level modifiers here");
129 
130   if (IsPCRel) {
131     switch (Kind) {
132     case FK_Data_1:
133       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
134       return ELF::R_AARCH64_NONE;
135     case FK_Data_2:
136       return R_CLS(PREL16);
137     case FK_Data_4: {
138       return Target.getAccessVariant() == MCSymbolRefExpr::VK_PLT
139                  ? R_CLS(PLT32)
140                  : R_CLS(PREL32);
141     }
142     case FK_Data_8:
143       if (IsILP32) {
144         Ctx.reportError(Fixup.getLoc(),
145                         "ILP32 8 byte PC relative data "
146                         "relocation not supported (LP64 eqv: PREL64)");
147         return ELF::R_AARCH64_NONE;
148       } else
149         return ELF::R_AARCH64_PREL64;
150     case AArch64::fixup_aarch64_pcrel_adr_imm21:
151       if (SymLoc != AArch64MCExpr::VK_ABS)
152         Ctx.reportError(Fixup.getLoc(),
153                         "invalid symbol kind for ADR relocation");
154       return R_CLS(ADR_PREL_LO21);
155     case AArch64::fixup_aarch64_pcrel_adrp_imm21:
156       if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
157         return R_CLS(ADR_PREL_PG_HI21);
158       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) {
159         if (IsILP32) {
160           Ctx.reportError(Fixup.getLoc(),
161                           "invalid fixup for 32-bit pcrel ADRP instruction "
162                           "VK_ABS VK_NC");
163           return ELF::R_AARCH64_NONE;
164         } else {
165           return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC;
166         }
167       }
168       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
169         return R_CLS(ADR_GOT_PAGE);
170       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
171         return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
172       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
173         return R_CLS(TLSDESC_ADR_PAGE21);
174       Ctx.reportError(Fixup.getLoc(),
175                       "invalid symbol kind for ADRP relocation");
176       return ELF::R_AARCH64_NONE;
177     case AArch64::fixup_aarch64_pcrel_branch26:
178       return R_CLS(JUMP26);
179     case AArch64::fixup_aarch64_pcrel_call26:
180       return R_CLS(CALL26);
181     case AArch64::fixup_aarch64_ldr_pcrel_imm19:
182       if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
183         return R_CLS(TLSIE_LD_GOTTPREL_PREL19);
184       if (SymLoc == AArch64MCExpr::VK_GOT)
185         return R_CLS(GOT_LD_PREL19);
186       return R_CLS(LD_PREL_LO19);
187     case AArch64::fixup_aarch64_pcrel_branch14:
188       return R_CLS(TSTBR14);
189     case AArch64::fixup_aarch64_pcrel_branch16:
190       Ctx.reportError(Fixup.getLoc(),
191                       "relocation of PAC/AUT instructions is not supported");
192       return ELF::R_AARCH64_NONE;
193     case AArch64::fixup_aarch64_pcrel_branch19:
194       return R_CLS(CONDBR19);
195     default:
196       Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
197       return ELF::R_AARCH64_NONE;
198     }
199   } else {
200     if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx))
201       return ELF::R_AARCH64_NONE;
202     switch (Fixup.getTargetKind()) {
203     case FK_Data_1:
204       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
205       return ELF::R_AARCH64_NONE;
206     case FK_Data_2:
207       return R_CLS(ABS16);
208     case FK_Data_4:
209       return R_CLS(ABS32);
210     case FK_Data_8:
211       if (IsILP32) {
212         Ctx.reportError(Fixup.getLoc(),
213                         "ILP32 8 byte absolute data "
214                         "relocation not supported (LP64 eqv: ABS64)");
215         return ELF::R_AARCH64_NONE;
216       } else {
217         if (RefKind == AArch64MCExpr::VK_AUTH ||
218             RefKind == AArch64MCExpr::VK_AUTHADDR)
219           return ELF::R_AARCH64_AUTH_ABS64;
220         return ELF::R_AARCH64_ABS64;
221       }
222     case AArch64::fixup_aarch64_add_imm12:
223       if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
224         return R_CLS(TLSLD_ADD_DTPREL_HI12);
225       if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
226         return R_CLS(TLSLE_ADD_TPREL_HI12);
227       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
228         return R_CLS(TLSLD_ADD_DTPREL_LO12_NC);
229       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
230         return R_CLS(TLSLD_ADD_DTPREL_LO12);
231       if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
232         return R_CLS(TLSLE_ADD_TPREL_LO12_NC);
233       if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
234         return R_CLS(TLSLE_ADD_TPREL_LO12);
235       if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
236         return R_CLS(TLSDESC_ADD_LO12);
237       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
238         return R_CLS(ADD_ABS_LO12_NC);
239 
240       Ctx.reportError(Fixup.getLoc(),
241                       "invalid fixup for add (uimm12) instruction");
242       return ELF::R_AARCH64_NONE;
243     case AArch64::fixup_aarch64_ldst_imm12_scale1:
244       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
245         return R_CLS(LDST8_ABS_LO12_NC);
246       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
247         return R_CLS(TLSLD_LDST8_DTPREL_LO12);
248       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
249         return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC);
250       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
251         return R_CLS(TLSLE_LDST8_TPREL_LO12);
252       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
253         return R_CLS(TLSLE_LDST8_TPREL_LO12_NC);
254 
255       Ctx.reportError(Fixup.getLoc(),
256                       "invalid fixup for 8-bit load/store instruction");
257       return ELF::R_AARCH64_NONE;
258     case AArch64::fixup_aarch64_ldst_imm12_scale2:
259       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
260         return R_CLS(LDST16_ABS_LO12_NC);
261       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
262         return R_CLS(TLSLD_LDST16_DTPREL_LO12);
263       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
264         return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC);
265       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
266         return R_CLS(TLSLE_LDST16_TPREL_LO12);
267       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
268         return R_CLS(TLSLE_LDST16_TPREL_LO12_NC);
269 
270       Ctx.reportError(Fixup.getLoc(),
271                       "invalid fixup for 16-bit load/store instruction");
272       return ELF::R_AARCH64_NONE;
273     case AArch64::fixup_aarch64_ldst_imm12_scale4:
274       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
275         return R_CLS(LDST32_ABS_LO12_NC);
276       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
277         return R_CLS(TLSLD_LDST32_DTPREL_LO12);
278       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
279         return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC);
280       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
281         return R_CLS(TLSLE_LDST32_TPREL_LO12);
282       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
283         return R_CLS(TLSLE_LDST32_TPREL_LO12_NC);
284       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
285         if (IsILP32) {
286           return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC;
287         } else {
288           Ctx.reportError(Fixup.getLoc(),
289                           "LP64 4 byte unchecked GOT load/store relocation "
290                           "not supported (ILP32 eqv: LD32_GOT_LO12_NC");
291           return ELF::R_AARCH64_NONE;
292         }
293       }
294       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) {
295         if (IsILP32) {
296           Ctx.reportError(Fixup.getLoc(),
297                           "ILP32 4 byte checked GOT load/store relocation "
298                           "not supported (unchecked eqv: LD32_GOT_LO12_NC)");
299         } else {
300           Ctx.reportError(Fixup.getLoc(),
301                           "LP64 4 byte checked GOT load/store relocation "
302                           "not supported (unchecked/ILP32 eqv: "
303                           "LD32_GOT_LO12_NC)");
304         }
305         return ELF::R_AARCH64_NONE;
306       }
307       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
308         if (IsILP32) {
309           return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC;
310         } else {
311           Ctx.reportError(Fixup.getLoc(),
312                           "LP64 32-bit load/store "
313                           "relocation not supported (ILP32 eqv: "
314                           "TLSIE_LD32_GOTTPREL_LO12_NC)");
315           return ELF::R_AARCH64_NONE;
316         }
317       }
318       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) {
319         if (IsILP32) {
320           return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12;
321         } else {
322           Ctx.reportError(Fixup.getLoc(),
323                           "LP64 4 byte TLSDESC load/store relocation "
324                           "not supported (ILP32 eqv: TLSDESC_LD64_LO12)");
325           return ELF::R_AARCH64_NONE;
326         }
327       }
328 
329       Ctx.reportError(Fixup.getLoc(),
330                       "invalid fixup for 32-bit load/store instruction "
331                       "fixup_aarch64_ldst_imm12_scale4");
332       return ELF::R_AARCH64_NONE;
333     case AArch64::fixup_aarch64_ldst_imm12_scale8:
334       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
335         return R_CLS(LDST64_ABS_LO12_NC);
336       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
337         AArch64MCExpr::VariantKind AddressLoc =
338             AArch64MCExpr::getAddressFrag(RefKind);
339         if (!IsILP32) {
340           if (AddressLoc == AArch64MCExpr::VK_LO15)
341             return ELF::R_AARCH64_LD64_GOTPAGE_LO15;
342           return ELF::R_AARCH64_LD64_GOT_LO12_NC;
343         } else {
344           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
345                                           "relocation not supported (LP64 eqv: "
346                                           "LD64_GOT_LO12_NC)");
347           return ELF::R_AARCH64_NONE;
348         }
349       }
350       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
351         return R_CLS(TLSLD_LDST64_DTPREL_LO12);
352       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
353         return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC);
354       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
355         return R_CLS(TLSLE_LDST64_TPREL_LO12);
356       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
357         return R_CLS(TLSLE_LDST64_TPREL_LO12_NC);
358       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
359         if (!IsILP32) {
360           return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
361         } else {
362           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
363                                           "relocation not supported (LP64 eqv: "
364                                           "TLSIE_LD64_GOTTPREL_LO12_NC)");
365           return ELF::R_AARCH64_NONE;
366         }
367       }
368       if (SymLoc == AArch64MCExpr::VK_TLSDESC) {
369         if (!IsILP32) {
370           return ELF::R_AARCH64_TLSDESC_LD64_LO12;
371         } else {
372           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
373                                           "relocation not supported (LP64 eqv: "
374                                           "TLSDESC_LD64_LO12)");
375           return ELF::R_AARCH64_NONE;
376         }
377       }
378       Ctx.reportError(Fixup.getLoc(),
379                       "invalid fixup for 64-bit load/store instruction");
380       return ELF::R_AARCH64_NONE;
381     case AArch64::fixup_aarch64_ldst_imm12_scale16:
382       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
383         return R_CLS(LDST128_ABS_LO12_NC);
384       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
385         return R_CLS(TLSLD_LDST128_DTPREL_LO12);
386       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
387         return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC);
388       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
389         return R_CLS(TLSLE_LDST128_TPREL_LO12);
390       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
391         return R_CLS(TLSLE_LDST128_TPREL_LO12_NC);
392 
393       Ctx.reportError(Fixup.getLoc(),
394                       "invalid fixup for 128-bit load/store instruction");
395       return ELF::R_AARCH64_NONE;
396     // ILP32 case not reached here, tested with isNonILP32reloc
397     case AArch64::fixup_aarch64_movw:
398       if (RefKind == AArch64MCExpr::VK_ABS_G3)
399         return ELF::R_AARCH64_MOVW_UABS_G3;
400       if (RefKind == AArch64MCExpr::VK_ABS_G2)
401         return ELF::R_AARCH64_MOVW_UABS_G2;
402       if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
403         return ELF::R_AARCH64_MOVW_SABS_G2;
404       if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
405         return ELF::R_AARCH64_MOVW_UABS_G2_NC;
406       if (RefKind == AArch64MCExpr::VK_ABS_G1)
407         return R_CLS(MOVW_UABS_G1);
408       if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
409         return ELF::R_AARCH64_MOVW_SABS_G1;
410       if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
411         return ELF::R_AARCH64_MOVW_UABS_G1_NC;
412       if (RefKind == AArch64MCExpr::VK_ABS_G0)
413         return R_CLS(MOVW_UABS_G0);
414       if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
415         return R_CLS(MOVW_SABS_G0);
416       if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
417         return R_CLS(MOVW_UABS_G0_NC);
418       if (RefKind == AArch64MCExpr::VK_PREL_G3)
419         return ELF::R_AARCH64_MOVW_PREL_G3;
420       if (RefKind == AArch64MCExpr::VK_PREL_G2)
421         return ELF::R_AARCH64_MOVW_PREL_G2;
422       if (RefKind == AArch64MCExpr::VK_PREL_G2_NC)
423         return ELF::R_AARCH64_MOVW_PREL_G2_NC;
424       if (RefKind == AArch64MCExpr::VK_PREL_G1)
425         return R_CLS(MOVW_PREL_G1);
426       if (RefKind == AArch64MCExpr::VK_PREL_G1_NC)
427         return ELF::R_AARCH64_MOVW_PREL_G1_NC;
428       if (RefKind == AArch64MCExpr::VK_PREL_G0)
429         return R_CLS(MOVW_PREL_G0);
430       if (RefKind == AArch64MCExpr::VK_PREL_G0_NC)
431         return R_CLS(MOVW_PREL_G0_NC);
432       if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
433         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
434       if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
435         return R_CLS(TLSLD_MOVW_DTPREL_G1);
436       if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
437         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
438       if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
439         return R_CLS(TLSLD_MOVW_DTPREL_G0);
440       if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
441         return R_CLS(TLSLD_MOVW_DTPREL_G0_NC);
442       if (RefKind == AArch64MCExpr::VK_TPREL_G2)
443         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
444       if (RefKind == AArch64MCExpr::VK_TPREL_G1)
445         return R_CLS(TLSLE_MOVW_TPREL_G1);
446       if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
447         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
448       if (RefKind == AArch64MCExpr::VK_TPREL_G0)
449         return R_CLS(TLSLE_MOVW_TPREL_G0);
450       if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
451         return R_CLS(TLSLE_MOVW_TPREL_G0_NC);
452       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
453         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
454       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
455         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
456       Ctx.reportError(Fixup.getLoc(),
457                       "invalid fixup for movz/movk instruction");
458       return ELF::R_AARCH64_NONE;
459     default:
460       Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
461       return ELF::R_AARCH64_NONE;
462     }
463   }
464 
465   llvm_unreachable("Unimplemented fixup -> relocation");
466 }
467 
468 bool AArch64ELFObjectWriter::needsRelocateWithSymbol(const MCValue &Val,
469                                                      const MCSymbol &,
470                                                      unsigned) const {
471   return (Val.getRefKind() & AArch64MCExpr::VK_GOT) == AArch64MCExpr::VK_GOT;
472 }
473 
474 MCSectionELF *
475 AArch64ELFObjectWriter::getMemtagRelocsSection(MCContext &Ctx) const {
476   return Ctx.getELFSection(".memtag.globals.static",
477                            ELF::SHT_AARCH64_MEMTAG_GLOBALS_STATIC, 0);
478 }
479 
480 std::unique_ptr<MCObjectTargetWriter>
481 llvm::createAArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32) {
482   return std::make_unique<AArch64ELFObjectWriter>(OSABI, IsILP32);
483 }
484