1 //===-- AArch64ELFObjectWriter.cpp - AArch64 ELF Writer -------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file handles ELF-specific object emission, converting LLVM's internal 10 // fixups into the appropriate relocations. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "MCTargetDesc/AArch64FixupKinds.h" 15 #include "MCTargetDesc/AArch64MCExpr.h" 16 #include "MCTargetDesc/AArch64MCTargetDesc.h" 17 #include "llvm/BinaryFormat/ELF.h" 18 #include "llvm/MC/MCContext.h" 19 #include "llvm/MC/MCELFObjectWriter.h" 20 #include "llvm/MC/MCFixup.h" 21 #include "llvm/MC/MCObjectWriter.h" 22 #include "llvm/MC/MCValue.h" 23 #include "llvm/Support/ErrorHandling.h" 24 #include <cassert> 25 #include <cstdint> 26 27 using namespace llvm; 28 29 namespace { 30 31 class AArch64ELFObjectWriter : public MCELFObjectTargetWriter { 32 public: 33 AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32); 34 35 ~AArch64ELFObjectWriter() override = default; 36 37 protected: 38 unsigned getRelocType(MCContext &Ctx, const MCValue &Target, 39 const MCFixup &Fixup, bool IsPCRel) const override; 40 bool IsILP32; 41 }; 42 43 } // end anonymous namespace 44 45 AArch64ELFObjectWriter::AArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32) 46 : MCELFObjectTargetWriter(/*Is64Bit*/ !IsILP32, OSABI, ELF::EM_AARCH64, 47 /*HasRelocationAddend*/ true), 48 IsILP32(IsILP32) {} 49 50 #define R_CLS(rtype) \ 51 IsILP32 ? ELF::R_AARCH64_P32_##rtype : ELF::R_AARCH64_##rtype 52 #define BAD_ILP32_MOV(lp64rtype) \ 53 "ILP32 absolute MOV relocation not " \ 54 "supported (LP64 eqv: " #lp64rtype ")" 55 56 // assumes IsILP32 is true 57 static bool isNonILP32reloc(const MCFixup &Fixup, 58 AArch64MCExpr::VariantKind RefKind, 59 MCContext &Ctx) { 60 if (Fixup.getTargetKind() != AArch64::fixup_aarch64_movw) 61 return false; 62 switch (RefKind) { 63 case AArch64MCExpr::VK_ABS_G3: 64 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3)); 65 return true; 66 case AArch64MCExpr::VK_ABS_G2: 67 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2)); 68 return true; 69 case AArch64MCExpr::VK_ABS_G2_S: 70 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2)); 71 return true; 72 case AArch64MCExpr::VK_ABS_G2_NC: 73 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC)); 74 return true; 75 case AArch64MCExpr::VK_ABS_G1_S: 76 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1)); 77 return true; 78 case AArch64MCExpr::VK_ABS_G1_NC: 79 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC)); 80 return true; 81 case AArch64MCExpr::VK_DTPREL_G2: 82 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2)); 83 return true; 84 case AArch64MCExpr::VK_DTPREL_G1_NC: 85 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC)); 86 return true; 87 case AArch64MCExpr::VK_TPREL_G2: 88 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2)); 89 return true; 90 case AArch64MCExpr::VK_TPREL_G1_NC: 91 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC)); 92 return true; 93 case AArch64MCExpr::VK_GOTTPREL_G1: 94 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1)); 95 return true; 96 case AArch64MCExpr::VK_GOTTPREL_G0_NC: 97 Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC)); 98 return true; 99 default: 100 return false; 101 } 102 return false; 103 } 104 105 unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx, 106 const MCValue &Target, 107 const MCFixup &Fixup, 108 bool IsPCRel) const { 109 unsigned Kind = Fixup.getTargetKind(); 110 if (Kind >= FirstLiteralRelocationKind) 111 return Kind - FirstLiteralRelocationKind; 112 AArch64MCExpr::VariantKind RefKind = 113 static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind()); 114 AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind); 115 bool IsNC = AArch64MCExpr::isNotChecked(RefKind); 116 117 assert((!Target.getSymA() || 118 Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None || 119 Target.getSymA()->getKind() == MCSymbolRefExpr::VK_PLT) && 120 "Should only be expression-level modifiers here"); 121 122 assert((!Target.getSymB() || 123 Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) && 124 "Should only be expression-level modifiers here"); 125 126 if (IsPCRel) { 127 switch (Kind) { 128 case FK_Data_1: 129 Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported"); 130 return ELF::R_AARCH64_NONE; 131 case FK_Data_2: 132 return R_CLS(PREL16); 133 case FK_Data_4: { 134 return Target.getAccessVariant() == MCSymbolRefExpr::VK_PLT 135 ? R_CLS(PLT32) 136 : R_CLS(PREL32); 137 } 138 case FK_Data_8: 139 if (IsILP32) { 140 Ctx.reportError(Fixup.getLoc(), 141 "ILP32 8 byte PC relative data " 142 "relocation not supported (LP64 eqv: PREL64)"); 143 return ELF::R_AARCH64_NONE; 144 } else 145 return ELF::R_AARCH64_PREL64; 146 case AArch64::fixup_aarch64_pcrel_adr_imm21: 147 if (SymLoc != AArch64MCExpr::VK_ABS) 148 Ctx.reportError(Fixup.getLoc(), 149 "invalid symbol kind for ADR relocation"); 150 return R_CLS(ADR_PREL_LO21); 151 case AArch64::fixup_aarch64_pcrel_adrp_imm21: 152 if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC) 153 return R_CLS(ADR_PREL_PG_HI21); 154 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) { 155 if (IsILP32) { 156 Ctx.reportError(Fixup.getLoc(), 157 "invalid fixup for 32-bit pcrel ADRP instruction " 158 "VK_ABS VK_NC"); 159 return ELF::R_AARCH64_NONE; 160 } else { 161 return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC; 162 } 163 } 164 if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) 165 return R_CLS(ADR_GOT_PAGE); 166 if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC) 167 return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21); 168 if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) 169 return R_CLS(TLSDESC_ADR_PAGE21); 170 Ctx.reportError(Fixup.getLoc(), 171 "invalid symbol kind for ADRP relocation"); 172 return ELF::R_AARCH64_NONE; 173 case AArch64::fixup_aarch64_pcrel_branch26: 174 return R_CLS(JUMP26); 175 case AArch64::fixup_aarch64_pcrel_call26: 176 return R_CLS(CALL26); 177 case AArch64::fixup_aarch64_ldr_pcrel_imm19: 178 if (SymLoc == AArch64MCExpr::VK_GOTTPREL) 179 return R_CLS(TLSIE_LD_GOTTPREL_PREL19); 180 if (SymLoc == AArch64MCExpr::VK_GOT) 181 return R_CLS(GOT_LD_PREL19); 182 return R_CLS(LD_PREL_LO19); 183 case AArch64::fixup_aarch64_pcrel_branch14: 184 return R_CLS(TSTBR14); 185 case AArch64::fixup_aarch64_pcrel_branch19: 186 return R_CLS(CONDBR19); 187 default: 188 Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind"); 189 return ELF::R_AARCH64_NONE; 190 } 191 } else { 192 if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx)) 193 return ELF::R_AARCH64_NONE; 194 switch (Fixup.getTargetKind()) { 195 case FK_Data_1: 196 Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported"); 197 return ELF::R_AARCH64_NONE; 198 case FK_Data_2: 199 return R_CLS(ABS16); 200 case FK_Data_4: 201 return R_CLS(ABS32); 202 case FK_Data_8: 203 if (IsILP32) { 204 Ctx.reportError(Fixup.getLoc(), 205 "ILP32 8 byte absolute data " 206 "relocation not supported (LP64 eqv: ABS64)"); 207 return ELF::R_AARCH64_NONE; 208 } else 209 return ELF::R_AARCH64_ABS64; 210 case AArch64::fixup_aarch64_add_imm12: 211 if (RefKind == AArch64MCExpr::VK_DTPREL_HI12) 212 return R_CLS(TLSLD_ADD_DTPREL_HI12); 213 if (RefKind == AArch64MCExpr::VK_TPREL_HI12) 214 return R_CLS(TLSLE_ADD_TPREL_HI12); 215 if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC) 216 return R_CLS(TLSLD_ADD_DTPREL_LO12_NC); 217 if (RefKind == AArch64MCExpr::VK_DTPREL_LO12) 218 return R_CLS(TLSLD_ADD_DTPREL_LO12); 219 if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC) 220 return R_CLS(TLSLE_ADD_TPREL_LO12_NC); 221 if (RefKind == AArch64MCExpr::VK_TPREL_LO12) 222 return R_CLS(TLSLE_ADD_TPREL_LO12); 223 if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12) 224 return R_CLS(TLSDESC_ADD_LO12); 225 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) 226 return R_CLS(ADD_ABS_LO12_NC); 227 228 Ctx.reportError(Fixup.getLoc(), 229 "invalid fixup for add (uimm12) instruction"); 230 return ELF::R_AARCH64_NONE; 231 case AArch64::fixup_aarch64_ldst_imm12_scale1: 232 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) 233 return R_CLS(LDST8_ABS_LO12_NC); 234 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) 235 return R_CLS(TLSLD_LDST8_DTPREL_LO12); 236 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) 237 return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC); 238 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) 239 return R_CLS(TLSLE_LDST8_TPREL_LO12); 240 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) 241 return R_CLS(TLSLE_LDST8_TPREL_LO12_NC); 242 243 Ctx.reportError(Fixup.getLoc(), 244 "invalid fixup for 8-bit load/store instruction"); 245 return ELF::R_AARCH64_NONE; 246 case AArch64::fixup_aarch64_ldst_imm12_scale2: 247 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) 248 return R_CLS(LDST16_ABS_LO12_NC); 249 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) 250 return R_CLS(TLSLD_LDST16_DTPREL_LO12); 251 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) 252 return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC); 253 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) 254 return R_CLS(TLSLE_LDST16_TPREL_LO12); 255 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) 256 return R_CLS(TLSLE_LDST16_TPREL_LO12_NC); 257 258 Ctx.reportError(Fixup.getLoc(), 259 "invalid fixup for 16-bit load/store instruction"); 260 return ELF::R_AARCH64_NONE; 261 case AArch64::fixup_aarch64_ldst_imm12_scale4: 262 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) 263 return R_CLS(LDST32_ABS_LO12_NC); 264 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) 265 return R_CLS(TLSLD_LDST32_DTPREL_LO12); 266 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) 267 return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC); 268 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) 269 return R_CLS(TLSLE_LDST32_TPREL_LO12); 270 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) 271 return R_CLS(TLSLE_LDST32_TPREL_LO12_NC); 272 if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) { 273 if (IsILP32) { 274 return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC; 275 } else { 276 Ctx.reportError(Fixup.getLoc(), 277 "LP64 4 byte unchecked GOT load/store relocation " 278 "not supported (ILP32 eqv: LD32_GOT_LO12_NC"); 279 return ELF::R_AARCH64_NONE; 280 } 281 } 282 if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) { 283 if (IsILP32) { 284 Ctx.reportError(Fixup.getLoc(), 285 "ILP32 4 byte checked GOT load/store relocation " 286 "not supported (unchecked eqv: LD32_GOT_LO12_NC)"); 287 } else { 288 Ctx.reportError(Fixup.getLoc(), 289 "LP64 4 byte checked GOT load/store relocation " 290 "not supported (unchecked/ILP32 eqv: " 291 "LD32_GOT_LO12_NC)"); 292 } 293 return ELF::R_AARCH64_NONE; 294 } 295 if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) { 296 if (IsILP32) { 297 return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC; 298 } else { 299 Ctx.reportError(Fixup.getLoc(), 300 "LP64 32-bit load/store " 301 "relocation not supported (ILP32 eqv: " 302 "TLSIE_LD32_GOTTPREL_LO12_NC)"); 303 return ELF::R_AARCH64_NONE; 304 } 305 } 306 if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) { 307 if (IsILP32) { 308 return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12; 309 } else { 310 Ctx.reportError(Fixup.getLoc(), 311 "LP64 4 byte TLSDESC load/store relocation " 312 "not supported (ILP32 eqv: TLSDESC_LD64_LO12)"); 313 return ELF::R_AARCH64_NONE; 314 } 315 } 316 317 Ctx.reportError(Fixup.getLoc(), 318 "invalid fixup for 32-bit load/store instruction " 319 "fixup_aarch64_ldst_imm12_scale4"); 320 return ELF::R_AARCH64_NONE; 321 case AArch64::fixup_aarch64_ldst_imm12_scale8: 322 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) 323 return R_CLS(LDST64_ABS_LO12_NC); 324 if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) { 325 AArch64MCExpr::VariantKind AddressLoc = 326 AArch64MCExpr::getAddressFrag(RefKind); 327 if (!IsILP32) { 328 if (AddressLoc == AArch64MCExpr::VK_LO15) 329 return ELF::R_AARCH64_LD64_GOTPAGE_LO15; 330 return ELF::R_AARCH64_LD64_GOT_LO12_NC; 331 } else { 332 Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " 333 "relocation not supported (LP64 eqv: " 334 "LD64_GOT_LO12_NC)"); 335 return ELF::R_AARCH64_NONE; 336 } 337 } 338 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) 339 return R_CLS(TLSLD_LDST64_DTPREL_LO12); 340 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) 341 return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC); 342 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) 343 return R_CLS(TLSLE_LDST64_TPREL_LO12); 344 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) 345 return R_CLS(TLSLE_LDST64_TPREL_LO12_NC); 346 if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) { 347 if (!IsILP32) { 348 return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC; 349 } else { 350 Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " 351 "relocation not supported (LP64 eqv: " 352 "TLSIE_LD64_GOTTPREL_LO12_NC)"); 353 return ELF::R_AARCH64_NONE; 354 } 355 } 356 if (SymLoc == AArch64MCExpr::VK_TLSDESC) { 357 if (!IsILP32) { 358 return ELF::R_AARCH64_TLSDESC_LD64_LO12; 359 } else { 360 Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store " 361 "relocation not supported (LP64 eqv: " 362 "TLSDESC_LD64_LO12)"); 363 return ELF::R_AARCH64_NONE; 364 } 365 } 366 Ctx.reportError(Fixup.getLoc(), 367 "invalid fixup for 64-bit load/store instruction"); 368 return ELF::R_AARCH64_NONE; 369 case AArch64::fixup_aarch64_ldst_imm12_scale16: 370 if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) 371 return R_CLS(LDST128_ABS_LO12_NC); 372 if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC) 373 return R_CLS(TLSLD_LDST128_DTPREL_LO12); 374 if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC) 375 return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC); 376 if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC) 377 return R_CLS(TLSLE_LDST128_TPREL_LO12); 378 if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC) 379 return R_CLS(TLSLE_LDST128_TPREL_LO12_NC); 380 381 Ctx.reportError(Fixup.getLoc(), 382 "invalid fixup for 128-bit load/store instruction"); 383 return ELF::R_AARCH64_NONE; 384 // ILP32 case not reached here, tested with isNonILP32reloc 385 case AArch64::fixup_aarch64_movw: 386 if (RefKind == AArch64MCExpr::VK_ABS_G3) 387 return ELF::R_AARCH64_MOVW_UABS_G3; 388 if (RefKind == AArch64MCExpr::VK_ABS_G2) 389 return ELF::R_AARCH64_MOVW_UABS_G2; 390 if (RefKind == AArch64MCExpr::VK_ABS_G2_S) 391 return ELF::R_AARCH64_MOVW_SABS_G2; 392 if (RefKind == AArch64MCExpr::VK_ABS_G2_NC) 393 return ELF::R_AARCH64_MOVW_UABS_G2_NC; 394 if (RefKind == AArch64MCExpr::VK_ABS_G1) 395 return R_CLS(MOVW_UABS_G1); 396 if (RefKind == AArch64MCExpr::VK_ABS_G1_S) 397 return ELF::R_AARCH64_MOVW_SABS_G1; 398 if (RefKind == AArch64MCExpr::VK_ABS_G1_NC) 399 return ELF::R_AARCH64_MOVW_UABS_G1_NC; 400 if (RefKind == AArch64MCExpr::VK_ABS_G0) 401 return R_CLS(MOVW_UABS_G0); 402 if (RefKind == AArch64MCExpr::VK_ABS_G0_S) 403 return R_CLS(MOVW_SABS_G0); 404 if (RefKind == AArch64MCExpr::VK_ABS_G0_NC) 405 return R_CLS(MOVW_UABS_G0_NC); 406 if (RefKind == AArch64MCExpr::VK_PREL_G3) 407 return ELF::R_AARCH64_MOVW_PREL_G3; 408 if (RefKind == AArch64MCExpr::VK_PREL_G2) 409 return ELF::R_AARCH64_MOVW_PREL_G2; 410 if (RefKind == AArch64MCExpr::VK_PREL_G2_NC) 411 return ELF::R_AARCH64_MOVW_PREL_G2_NC; 412 if (RefKind == AArch64MCExpr::VK_PREL_G1) 413 return R_CLS(MOVW_PREL_G1); 414 if (RefKind == AArch64MCExpr::VK_PREL_G1_NC) 415 return ELF::R_AARCH64_MOVW_PREL_G1_NC; 416 if (RefKind == AArch64MCExpr::VK_PREL_G0) 417 return R_CLS(MOVW_PREL_G0); 418 if (RefKind == AArch64MCExpr::VK_PREL_G0_NC) 419 return R_CLS(MOVW_PREL_G0_NC); 420 if (RefKind == AArch64MCExpr::VK_DTPREL_G2) 421 return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2; 422 if (RefKind == AArch64MCExpr::VK_DTPREL_G1) 423 return R_CLS(TLSLD_MOVW_DTPREL_G1); 424 if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC) 425 return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC; 426 if (RefKind == AArch64MCExpr::VK_DTPREL_G0) 427 return R_CLS(TLSLD_MOVW_DTPREL_G0); 428 if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC) 429 return R_CLS(TLSLD_MOVW_DTPREL_G0_NC); 430 if (RefKind == AArch64MCExpr::VK_TPREL_G2) 431 return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2; 432 if (RefKind == AArch64MCExpr::VK_TPREL_G1) 433 return R_CLS(TLSLE_MOVW_TPREL_G1); 434 if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC) 435 return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC; 436 if (RefKind == AArch64MCExpr::VK_TPREL_G0) 437 return R_CLS(TLSLE_MOVW_TPREL_G0); 438 if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC) 439 return R_CLS(TLSLE_MOVW_TPREL_G0_NC); 440 if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1) 441 return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1; 442 if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC) 443 return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC; 444 Ctx.reportError(Fixup.getLoc(), 445 "invalid fixup for movz/movk instruction"); 446 return ELF::R_AARCH64_NONE; 447 default: 448 Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type"); 449 return ELF::R_AARCH64_NONE; 450 } 451 } 452 453 llvm_unreachable("Unimplemented fixup -> relocation"); 454 } 455 456 std::unique_ptr<MCObjectTargetWriter> 457 llvm::createAArch64ELFObjectWriter(uint8_t OSABI, bool IsILP32) { 458 return std::make_unique<AArch64ELFObjectWriter>(OSABI, IsILP32); 459 } 460