xref: /freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp (revision 5b56413d04e608379c9a306373554a8e4d321bc0)
1 //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file handles ELF-specific object emission, converting LLVM's internal
10 // fixups into the appropriate relocations.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "MCTargetDesc/AArch64FixupKinds.h"
15 #include "MCTargetDesc/AArch64MCExpr.h"
16 #include "MCTargetDesc/AArch64MCTargetDesc.h"
17 #include "llvm/BinaryFormat/ELF.h"
18 #include "llvm/MC/MCContext.h"
19 #include "llvm/MC/MCELFObjectWriter.h"
20 #include "llvm/MC/MCFixup.h"
21 #include "llvm/MC/MCObjectWriter.h"
22 #include "llvm/MC/MCValue.h"
23 #include "llvm/Support/ErrorHandling.h"
24 #include <cassert>
25 #include <cstdint>
26 
27 using namespace llvm;
28 
29 namespace {
30 
31 class AArch64ELFObjectWriter : public MCELFObjectTargetWriter {
32 public:
33   AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32);
34 
35   ~AArch64ELFObjectWriter() override = default;
36 
37   MCSectionELF *getMemtagRelocsSection(MCContext &Ctx) const override;
38 
39 protected:
40   unsigned getRelocType(MCContext &Ctx, const MCValue &Target,
41                         const MCFixup &Fixup, bool IsPCRel) const override;
42   bool needsRelocateWithSymbol(const MCValue &Val, const MCSymbol &Sym,
43                                unsigned Type) const override;
44   bool IsILP32;
45 };
46 
47 } // end anonymous namespace
48 
49 AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32)
50     : MCELFObjectTargetWriter(/*Is64Bit*/ !IsILP32, OSABI, ELF::EM_AARCH64,
51                               /*HasRelocationAddend*/ true),
52       IsILP32(IsILP32) {}
53 
54 #define R_CLS(rtype)                                                           \
55   IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype
56 #define BAD_ILP32_MOV(lp64rtype)                                               \
57   "ILP32 absolute MOV relocation not "                                         \
58   "supported (LP64 eqv: " #lp64rtype ")"
59 
60 // assumes IsILP32 is true
61 static bool isNonILP32reloc(const MCFixup &Fixup,
62                             AArch64MCExpr::VariantKind RefKind,
63                             MCContext &Ctx) {
64   if (Fixup.getTargetKind() != AArch64::fixup_aarch64_movw)
65     return false;
66   switch (RefKind) {
67   case AArch64MCExpr::VK_ABS_G3:
68     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3));
69     return true;
70   case AArch64MCExpr::VK_ABS_G2:
71     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2));
72     return true;
73   case AArch64MCExpr::VK_ABS_G2_S:
74     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2));
75     return true;
76   case AArch64MCExpr::VK_ABS_G2_NC:
77     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC));
78     return true;
79   case AArch64MCExpr::VK_ABS_G1_S:
80     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1));
81     return true;
82   case AArch64MCExpr::VK_ABS_G1_NC:
83     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC));
84     return true;
85   case AArch64MCExpr::VK_DTPREL_G2:
86     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2));
87     return true;
88   case AArch64MCExpr::VK_DTPREL_G1_NC:
89     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC));
90     return true;
91   case AArch64MCExpr::VK_TPREL_G2:
92     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2));
93     return true;
94   case AArch64MCExpr::VK_TPREL_G1_NC:
95     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC));
96     return true;
97   case AArch64MCExpr::VK_GOTTPREL_G1:
98     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1));
99     return true;
100   case AArch64MCExpr::VK_GOTTPREL_G0_NC:
101     Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC));
102     return true;
103   default:
104     return false;
105   }
106   return false;
107 }
108 
109 unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
110                                               const MCValue &Target,
111                                               const MCFixup &Fixup,
112                                               bool IsPCRel) const {
113   unsigned Kind = Fixup.getTargetKind();
114   if (Kind >= FirstLiteralRelocationKind)
115     return Kind - FirstLiteralRelocationKind;
116   AArch64MCExpr::VariantKind RefKind =
117       static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
118   AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
119   bool IsNC = AArch64MCExpr::isNotChecked(RefKind);
120 
121   assert((!Target.getSymA() ||
122           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None ||
123           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_PLT ||
124           Target.getSymA()->getKind() == MCSymbolRefExpr::VK_GOTPCREL) &&
125          "Should only be expression-level modifiers here");
126 
127   assert((!Target.getSymB() ||
128           Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
129          "Should only be expression-level modifiers here");
130 
131   if (IsPCRel) {
132     switch (Kind) {
133     case FK_Data_1:
134       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
135       return ELF::R_AARCH64_NONE;
136     case FK_Data_2:
137       return R_CLS(PREL16);
138     case FK_Data_4: {
139       return Target.getAccessVariant() == MCSymbolRefExpr::VK_PLT
140                  ? R_CLS(PLT32)
141                  : R_CLS(PREL32);
142     }
143     case FK_Data_8:
144       if (IsILP32) {
145         Ctx.reportError(Fixup.getLoc(),
146                         "ILP32 8 byte PC relative data "
147                         "relocation not supported (LP64 eqv: PREL64)");
148         return ELF::R_AARCH64_NONE;
149       } else
150         return ELF::R_AARCH64_PREL64;
151     case AArch64::fixup_aarch64_pcrel_adr_imm21:
152       if (SymLoc != AArch64MCExpr::VK_ABS)
153         Ctx.reportError(Fixup.getLoc(),
154                         "invalid symbol kind for ADR relocation");
155       return R_CLS(ADR_PREL_LO21);
156     case AArch64::fixup_aarch64_pcrel_adrp_imm21:
157       if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
158         return R_CLS(ADR_PREL_PG_HI21);
159       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) {
160         if (IsILP32) {
161           Ctx.reportError(Fixup.getLoc(),
162                           "invalid fixup for 32-bit pcrel ADRP instruction "
163                           "VK_ABS VK_NC");
164           return ELF::R_AARCH64_NONE;
165         } else {
166           return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC;
167         }
168       }
169       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
170         return R_CLS(ADR_GOT_PAGE);
171       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
172         return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
173       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
174         return R_CLS(TLSDESC_ADR_PAGE21);
175       Ctx.reportError(Fixup.getLoc(),
176                       "invalid symbol kind for ADRP relocation");
177       return ELF::R_AARCH64_NONE;
178     case AArch64::fixup_aarch64_pcrel_branch26:
179       return R_CLS(JUMP26);
180     case AArch64::fixup_aarch64_pcrel_call26:
181       return R_CLS(CALL26);
182     case AArch64::fixup_aarch64_ldr_pcrel_imm19:
183       if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
184         return R_CLS(TLSIE_LD_GOTTPREL_PREL19);
185       if (SymLoc == AArch64MCExpr::VK_GOT)
186         return R_CLS(GOT_LD_PREL19);
187       return R_CLS(LD_PREL_LO19);
188     case AArch64::fixup_aarch64_pcrel_branch14:
189       return R_CLS(TSTBR14);
190     case AArch64::fixup_aarch64_pcrel_branch16:
191       Ctx.reportError(Fixup.getLoc(),
192                       "relocation of PAC/AUT instructions is not supported");
193       return ELF::R_AARCH64_NONE;
194     case AArch64::fixup_aarch64_pcrel_branch19:
195       return R_CLS(CONDBR19);
196     default:
197       Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
198       return ELF::R_AARCH64_NONE;
199     }
200   } else {
201     if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx))
202       return ELF::R_AARCH64_NONE;
203     switch (Fixup.getTargetKind()) {
204     case FK_Data_1:
205       Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
206       return ELF::R_AARCH64_NONE;
207     case FK_Data_2:
208       return R_CLS(ABS16);
209     case FK_Data_4:
210       return (!IsILP32 &&
211               Target.getAccessVariant() == MCSymbolRefExpr::VK_GOTPCREL)
212                  ? ELF::R_AARCH64_GOTPCREL32
213                  : R_CLS(ABS32);
214     case FK_Data_8:
215       if (IsILP32) {
216         Ctx.reportError(Fixup.getLoc(),
217                         "ILP32 8 byte absolute data "
218                         "relocation not supported (LP64 eqv: ABS64)");
219         return ELF::R_AARCH64_NONE;
220       } else {
221         if (RefKind == AArch64MCExpr::VK_AUTH ||
222             RefKind == AArch64MCExpr::VK_AUTHADDR)
223           return ELF::R_AARCH64_AUTH_ABS64;
224         return ELF::R_AARCH64_ABS64;
225       }
226     case AArch64::fixup_aarch64_add_imm12:
227       if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
228         return R_CLS(TLSLD_ADD_DTPREL_HI12);
229       if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
230         return R_CLS(TLSLE_ADD_TPREL_HI12);
231       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
232         return R_CLS(TLSLD_ADD_DTPREL_LO12_NC);
233       if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
234         return R_CLS(TLSLD_ADD_DTPREL_LO12);
235       if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
236         return R_CLS(TLSLE_ADD_TPREL_LO12_NC);
237       if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
238         return R_CLS(TLSLE_ADD_TPREL_LO12);
239       if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
240         return R_CLS(TLSDESC_ADD_LO12);
241       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
242         return R_CLS(ADD_ABS_LO12_NC);
243 
244       Ctx.reportError(Fixup.getLoc(),
245                       "invalid fixup for add (uimm12) instruction");
246       return ELF::R_AARCH64_NONE;
247     case AArch64::fixup_aarch64_ldst_imm12_scale1:
248       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
249         return R_CLS(LDST8_ABS_LO12_NC);
250       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
251         return R_CLS(TLSLD_LDST8_DTPREL_LO12);
252       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
253         return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC);
254       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
255         return R_CLS(TLSLE_LDST8_TPREL_LO12);
256       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
257         return R_CLS(TLSLE_LDST8_TPREL_LO12_NC);
258 
259       Ctx.reportError(Fixup.getLoc(),
260                       "invalid fixup for 8-bit load/store instruction");
261       return ELF::R_AARCH64_NONE;
262     case AArch64::fixup_aarch64_ldst_imm12_scale2:
263       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
264         return R_CLS(LDST16_ABS_LO12_NC);
265       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
266         return R_CLS(TLSLD_LDST16_DTPREL_LO12);
267       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
268         return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC);
269       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
270         return R_CLS(TLSLE_LDST16_TPREL_LO12);
271       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
272         return R_CLS(TLSLE_LDST16_TPREL_LO12_NC);
273 
274       Ctx.reportError(Fixup.getLoc(),
275                       "invalid fixup for 16-bit load/store instruction");
276       return ELF::R_AARCH64_NONE;
277     case AArch64::fixup_aarch64_ldst_imm12_scale4:
278       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
279         return R_CLS(LDST32_ABS_LO12_NC);
280       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
281         return R_CLS(TLSLD_LDST32_DTPREL_LO12);
282       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
283         return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC);
284       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
285         return R_CLS(TLSLE_LDST32_TPREL_LO12);
286       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
287         return R_CLS(TLSLE_LDST32_TPREL_LO12_NC);
288       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
289         if (IsILP32) {
290           return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC;
291         } else {
292           Ctx.reportError(Fixup.getLoc(),
293                           "LP64 4 byte unchecked GOT load/store relocation "
294                           "not supported (ILP32 eqv: LD32_GOT_LO12_NC");
295           return ELF::R_AARCH64_NONE;
296         }
297       }
298       if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) {
299         if (IsILP32) {
300           Ctx.reportError(Fixup.getLoc(),
301                           "ILP32 4 byte checked GOT load/store relocation "
302                           "not supported (unchecked eqv: LD32_GOT_LO12_NC)");
303         } else {
304           Ctx.reportError(Fixup.getLoc(),
305                           "LP64 4 byte checked GOT load/store relocation "
306                           "not supported (unchecked/ILP32 eqv: "
307                           "LD32_GOT_LO12_NC)");
308         }
309         return ELF::R_AARCH64_NONE;
310       }
311       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
312         if (IsILP32) {
313           return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC;
314         } else {
315           Ctx.reportError(Fixup.getLoc(),
316                           "LP64 32-bit load/store "
317                           "relocation not supported (ILP32 eqv: "
318                           "TLSIE_LD32_GOTTPREL_LO12_NC)");
319           return ELF::R_AARCH64_NONE;
320         }
321       }
322       if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) {
323         if (IsILP32) {
324           return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12;
325         } else {
326           Ctx.reportError(Fixup.getLoc(),
327                           "LP64 4 byte TLSDESC load/store relocation "
328                           "not supported (ILP32 eqv: TLSDESC_LD64_LO12)");
329           return ELF::R_AARCH64_NONE;
330         }
331       }
332 
333       Ctx.reportError(Fixup.getLoc(),
334                       "invalid fixup for 32-bit load/store instruction "
335                       "fixup_aarch64_ldst_imm12_scale4");
336       return ELF::R_AARCH64_NONE;
337     case AArch64::fixup_aarch64_ldst_imm12_scale8:
338       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
339         return R_CLS(LDST64_ABS_LO12_NC);
340       if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
341         AArch64MCExpr::VariantKind AddressLoc =
342             AArch64MCExpr::getAddressFrag(RefKind);
343         if (!IsILP32) {
344           if (AddressLoc == AArch64MCExpr::VK_LO15)
345             return ELF::R_AARCH64_LD64_GOTPAGE_LO15;
346           return ELF::R_AARCH64_LD64_GOT_LO12_NC;
347         } else {
348           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
349                                           "relocation not supported (LP64 eqv: "
350                                           "LD64_GOT_LO12_NC)");
351           return ELF::R_AARCH64_NONE;
352         }
353       }
354       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
355         return R_CLS(TLSLD_LDST64_DTPREL_LO12);
356       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
357         return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC);
358       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
359         return R_CLS(TLSLE_LDST64_TPREL_LO12);
360       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
361         return R_CLS(TLSLE_LDST64_TPREL_LO12_NC);
362       if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
363         if (!IsILP32) {
364           return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
365         } else {
366           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
367                                           "relocation not supported (LP64 eqv: "
368                                           "TLSIE_LD64_GOTTPREL_LO12_NC)");
369           return ELF::R_AARCH64_NONE;
370         }
371       }
372       if (SymLoc == AArch64MCExpr::VK_TLSDESC) {
373         if (!IsILP32) {
374           return ELF::R_AARCH64_TLSDESC_LD64_LO12;
375         } else {
376           Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
377                                           "relocation not supported (LP64 eqv: "
378                                           "TLSDESC_LD64_LO12)");
379           return ELF::R_AARCH64_NONE;
380         }
381       }
382       Ctx.reportError(Fixup.getLoc(),
383                       "invalid fixup for 64-bit load/store instruction");
384       return ELF::R_AARCH64_NONE;
385     case AArch64::fixup_aarch64_ldst_imm12_scale16:
386       if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
387         return R_CLS(LDST128_ABS_LO12_NC);
388       if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
389         return R_CLS(TLSLD_LDST128_DTPREL_LO12);
390       if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
391         return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC);
392       if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
393         return R_CLS(TLSLE_LDST128_TPREL_LO12);
394       if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
395         return R_CLS(TLSLE_LDST128_TPREL_LO12_NC);
396 
397       Ctx.reportError(Fixup.getLoc(),
398                       "invalid fixup for 128-bit load/store instruction");
399       return ELF::R_AARCH64_NONE;
400     // ILP32 case not reached here, tested with isNonILP32reloc
401     case AArch64::fixup_aarch64_movw:
402       if (RefKind == AArch64MCExpr::VK_ABS_G3)
403         return ELF::R_AARCH64_MOVW_UABS_G3;
404       if (RefKind == AArch64MCExpr::VK_ABS_G2)
405         return ELF::R_AARCH64_MOVW_UABS_G2;
406       if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
407         return ELF::R_AARCH64_MOVW_SABS_G2;
408       if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
409         return ELF::R_AARCH64_MOVW_UABS_G2_NC;
410       if (RefKind == AArch64MCExpr::VK_ABS_G1)
411         return R_CLS(MOVW_UABS_G1);
412       if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
413         return ELF::R_AARCH64_MOVW_SABS_G1;
414       if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
415         return ELF::R_AARCH64_MOVW_UABS_G1_NC;
416       if (RefKind == AArch64MCExpr::VK_ABS_G0)
417         return R_CLS(MOVW_UABS_G0);
418       if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
419         return R_CLS(MOVW_SABS_G0);
420       if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
421         return R_CLS(MOVW_UABS_G0_NC);
422       if (RefKind == AArch64MCExpr::VK_PREL_G3)
423         return ELF::R_AARCH64_MOVW_PREL_G3;
424       if (RefKind == AArch64MCExpr::VK_PREL_G2)
425         return ELF::R_AARCH64_MOVW_PREL_G2;
426       if (RefKind == AArch64MCExpr::VK_PREL_G2_NC)
427         return ELF::R_AARCH64_MOVW_PREL_G2_NC;
428       if (RefKind == AArch64MCExpr::VK_PREL_G1)
429         return R_CLS(MOVW_PREL_G1);
430       if (RefKind == AArch64MCExpr::VK_PREL_G1_NC)
431         return ELF::R_AARCH64_MOVW_PREL_G1_NC;
432       if (RefKind == AArch64MCExpr::VK_PREL_G0)
433         return R_CLS(MOVW_PREL_G0);
434       if (RefKind == AArch64MCExpr::VK_PREL_G0_NC)
435         return R_CLS(MOVW_PREL_G0_NC);
436       if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
437         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
438       if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
439         return R_CLS(TLSLD_MOVW_DTPREL_G1);
440       if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
441         return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
442       if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
443         return R_CLS(TLSLD_MOVW_DTPREL_G0);
444       if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
445         return R_CLS(TLSLD_MOVW_DTPREL_G0_NC);
446       if (RefKind == AArch64MCExpr::VK_TPREL_G2)
447         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
448       if (RefKind == AArch64MCExpr::VK_TPREL_G1)
449         return R_CLS(TLSLE_MOVW_TPREL_G1);
450       if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
451         return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
452       if (RefKind == AArch64MCExpr::VK_TPREL_G0)
453         return R_CLS(TLSLE_MOVW_TPREL_G0);
454       if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
455         return R_CLS(TLSLE_MOVW_TPREL_G0_NC);
456       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
457         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
458       if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
459         return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
460       Ctx.reportError(Fixup.getLoc(),
461                       "invalid fixup for movz/movk instruction");
462       return ELF::R_AARCH64_NONE;
463     default:
464       Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
465       return ELF::R_AARCH64_NONE;
466     }
467   }
468 
469   llvm_unreachable("Unimplemented fixup -> relocation");
470 }
471 
472 bool AArch64ELFObjectWriter::needsRelocateWithSymbol(const MCValue &Val,
473                                                      const MCSymbol &,
474                                                      unsigned) const {
475   return (Val.getRefKind() & AArch64MCExpr::VK_GOT) == AArch64MCExpr::VK_GOT;
476 }
477 
478 MCSectionELF *
479 AArch64ELFObjectWriter::getMemtagRelocsSection(MCContext &Ctx) const {
480   return Ctx.getELFSection(".memtag.globals.static",
481                            ELF::SHT_AARCH64_MEMTAG_GLOBALS_STATIC, 0);
482 }
483 
484 std::unique_ptr<MCObjectTargetWriter>
485 llvm::createAArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32) {
486   return std::make_unique<AArch64ELFObjectWriter>(OSABI, IsILP32);
487 }
488