1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <stdio.h> 7 #include <stdlib.h> 8 9 #define unlikely(cond) (cond) 10 #include <asm/insn.h> 11 #include "../../../arch/x86/lib/inat.c" 12 #include "../../../arch/x86/lib/insn.c" 13 14 #define CONFIG_64BIT 1 15 #include <asm/nops.h> 16 17 #include <asm/orc_types.h> 18 #include <objtool/check.h> 19 #include <objtool/elf.h> 20 #include <objtool/arch.h> 21 #include <objtool/warn.h> 22 #include <objtool/endianness.h> 23 #include <objtool/builtin.h> 24 #include <arch/elf.h> 25 26 int arch_ftrace_match(char *name) 27 { 28 return !strcmp(name, "__fentry__"); 29 } 30 31 static int is_x86_64(const struct elf *elf) 32 { 33 switch (elf->ehdr.e_machine) { 34 case EM_X86_64: 35 return 1; 36 case EM_386: 37 return 0; 38 default: 39 ERROR("unexpected ELF machine type %d", elf->ehdr.e_machine); 40 return -1; 41 } 42 } 43 44 bool arch_callee_saved_reg(unsigned char reg) 45 { 46 switch (reg) { 47 case CFI_BP: 48 case CFI_BX: 49 case CFI_R12: 50 case CFI_R13: 51 case CFI_R14: 52 case CFI_R15: 53 return true; 54 55 case CFI_AX: 56 case CFI_CX: 57 case CFI_DX: 58 case CFI_SI: 59 case CFI_DI: 60 case CFI_SP: 61 case CFI_R8: 62 case CFI_R9: 63 case CFI_R10: 64 case CFI_R11: 65 case CFI_RA: 66 default: 67 return false; 68 } 69 } 70 71 unsigned long arch_dest_reloc_offset(int addend) 72 { 73 return addend + 4; 74 } 75 76 unsigned long arch_jump_destination(struct instruction *insn) 77 { 78 return insn->offset + insn->len + insn->immediate; 79 } 80 81 bool arch_pc_relative_reloc(struct reloc *reloc) 82 { 83 /* 84 * All relocation types where P (the address of the target) 85 * is included in the computation. 86 */ 87 switch (reloc_type(reloc)) { 88 case R_X86_64_PC8: 89 case R_X86_64_PC16: 90 case R_X86_64_PC32: 91 case R_X86_64_PC64: 92 93 case R_X86_64_PLT32: 94 case R_X86_64_GOTPC32: 95 case R_X86_64_GOTPCREL: 96 return true; 97 98 default: 99 break; 100 } 101 102 return false; 103 } 104 105 #define ADD_OP(op) \ 106 if (!(op = calloc(1, sizeof(*op)))) \ 107 return -1; \ 108 else for (*ops_list = op, ops_list = &op->next; op; op = NULL) 109 110 /* 111 * Helpers to decode ModRM/SIB: 112 * 113 * r/m| AX CX DX BX | SP | BP | SI DI | 114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 | 115 * Mod+----------------+-----+-----+---------+ 116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] | 117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] | 118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] | 119 * 11 | r/ m | 120 */ 121 122 #define mod_is_mem() (modrm_mod != 3) 123 #define mod_is_reg() (modrm_mod == 3) 124 125 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0) 126 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem()) 127 128 /* 129 * Check the ModRM register. If there is a SIB byte then check with 130 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and 131 * ModRM mod is 0 then there is no base register. 132 */ 133 #define rm_is(reg) (have_SIB() ? \ 134 sib_base == (reg) && sib_index == CFI_SP && \ 135 (sib_base != CFI_BP || modrm_mod != 0) : \ 136 modrm_rm == (reg)) 137 138 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg)) 139 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg)) 140 141 static bool has_notrack_prefix(struct insn *insn) 142 { 143 int i; 144 145 for (i = 0; i < insn->prefixes.nbytes; i++) { 146 if (insn->prefixes.bytes[i] == 0x3e) 147 return true; 148 } 149 150 return false; 151 } 152 153 int arch_decode_instruction(struct objtool_file *file, const struct section *sec, 154 unsigned long offset, unsigned int maxlen, 155 struct instruction *insn) 156 { 157 struct stack_op **ops_list = &insn->stack_ops; 158 const struct elf *elf = file->elf; 159 struct insn ins; 160 int x86_64, ret; 161 unsigned char op1, op2, op3, prefix, 162 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0, 163 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0, 164 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0; 165 struct stack_op *op = NULL; 166 struct symbol *sym; 167 u64 imm; 168 169 x86_64 = is_x86_64(elf); 170 if (x86_64 == -1) 171 return -1; 172 173 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen, 174 x86_64 ? INSN_MODE_64 : INSN_MODE_32); 175 if (ret < 0) { 176 ERROR("can't decode instruction at %s:0x%lx", sec->name, offset); 177 return -1; 178 } 179 180 insn->len = ins.length; 181 insn->type = INSN_OTHER; 182 183 if (ins.vex_prefix.nbytes) 184 return 0; 185 186 prefix = ins.prefixes.bytes[0]; 187 188 op1 = ins.opcode.bytes[0]; 189 op2 = ins.opcode.bytes[1]; 190 op3 = ins.opcode.bytes[2]; 191 192 if (ins.rex_prefix.nbytes) { 193 rex = ins.rex_prefix.bytes[0]; 194 rex_w = X86_REX_W(rex) >> 3; 195 rex_r = X86_REX_R(rex) >> 2; 196 rex_x = X86_REX_X(rex) >> 1; 197 rex_b = X86_REX_B(rex); 198 } 199 200 if (ins.modrm.nbytes) { 201 modrm = ins.modrm.bytes[0]; 202 modrm_mod = X86_MODRM_MOD(modrm); 203 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r; 204 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b; 205 } 206 207 if (ins.sib.nbytes) { 208 sib = ins.sib.bytes[0]; 209 /* sib_scale = X86_SIB_SCALE(sib); */ 210 sib_index = X86_SIB_INDEX(sib) + 8*rex_x; 211 sib_base = X86_SIB_BASE(sib) + 8*rex_b; 212 } 213 214 switch (op1) { 215 216 case 0x1: 217 case 0x29: 218 if (rex_w && rm_is_reg(CFI_SP)) { 219 220 /* add/sub reg, %rsp */ 221 ADD_OP(op) { 222 op->src.type = OP_SRC_ADD; 223 op->src.reg = modrm_reg; 224 op->dest.type = OP_DEST_REG; 225 op->dest.reg = CFI_SP; 226 } 227 } 228 break; 229 230 case 0x50 ... 0x57: 231 232 /* push reg */ 233 ADD_OP(op) { 234 op->src.type = OP_SRC_REG; 235 op->src.reg = (op1 & 0x7) + 8*rex_b; 236 op->dest.type = OP_DEST_PUSH; 237 } 238 239 break; 240 241 case 0x58 ... 0x5f: 242 243 /* pop reg */ 244 ADD_OP(op) { 245 op->src.type = OP_SRC_POP; 246 op->dest.type = OP_DEST_REG; 247 op->dest.reg = (op1 & 0x7) + 8*rex_b; 248 } 249 250 break; 251 252 case 0x68: 253 case 0x6a: 254 /* push immediate */ 255 ADD_OP(op) { 256 op->src.type = OP_SRC_CONST; 257 op->dest.type = OP_DEST_PUSH; 258 } 259 break; 260 261 case 0x70 ... 0x7f: 262 insn->type = INSN_JUMP_CONDITIONAL; 263 break; 264 265 case 0x80 ... 0x83: 266 /* 267 * 1000 00sw : mod OP r/m : immediate 268 * 269 * s - sign extend immediate 270 * w - imm8 / imm32 271 * 272 * OP: 000 ADD 100 AND 273 * 001 OR 101 SUB 274 * 010 ADC 110 XOR 275 * 011 SBB 111 CMP 276 */ 277 278 /* 64bit only */ 279 if (!rex_w) 280 break; 281 282 /* %rsp target only */ 283 if (!rm_is_reg(CFI_SP)) 284 break; 285 286 imm = ins.immediate.value; 287 if (op1 & 2) { /* sign extend */ 288 if (op1 & 1) { /* imm32 */ 289 imm <<= 32; 290 imm = (s64)imm >> 32; 291 } else { /* imm8 */ 292 imm <<= 56; 293 imm = (s64)imm >> 56; 294 } 295 } 296 297 switch (modrm_reg & 7) { 298 case 5: 299 imm = -imm; 300 fallthrough; 301 case 0: 302 /* add/sub imm, %rsp */ 303 ADD_OP(op) { 304 op->src.type = OP_SRC_ADD; 305 op->src.reg = CFI_SP; 306 op->src.offset = imm; 307 op->dest.type = OP_DEST_REG; 308 op->dest.reg = CFI_SP; 309 } 310 break; 311 312 case 4: 313 /* and imm, %rsp */ 314 ADD_OP(op) { 315 op->src.type = OP_SRC_AND; 316 op->src.reg = CFI_SP; 317 op->src.offset = ins.immediate.value; 318 op->dest.type = OP_DEST_REG; 319 op->dest.reg = CFI_SP; 320 } 321 break; 322 323 default: 324 /* ERROR ? */ 325 break; 326 } 327 328 break; 329 330 case 0x89: 331 if (!rex_w) 332 break; 333 334 if (modrm_reg == CFI_SP) { 335 336 if (mod_is_reg()) { 337 /* mov %rsp, reg */ 338 ADD_OP(op) { 339 op->src.type = OP_SRC_REG; 340 op->src.reg = CFI_SP; 341 op->dest.type = OP_DEST_REG; 342 op->dest.reg = modrm_rm; 343 } 344 break; 345 346 } else { 347 /* skip RIP relative displacement */ 348 if (is_RIP()) 349 break; 350 351 /* skip nontrivial SIB */ 352 if (have_SIB()) { 353 modrm_rm = sib_base; 354 if (sib_index != CFI_SP) 355 break; 356 } 357 358 /* mov %rsp, disp(%reg) */ 359 ADD_OP(op) { 360 op->src.type = OP_SRC_REG; 361 op->src.reg = CFI_SP; 362 op->dest.type = OP_DEST_REG_INDIRECT; 363 op->dest.reg = modrm_rm; 364 op->dest.offset = ins.displacement.value; 365 } 366 break; 367 } 368 369 break; 370 } 371 372 if (rm_is_reg(CFI_SP)) { 373 374 /* mov reg, %rsp */ 375 ADD_OP(op) { 376 op->src.type = OP_SRC_REG; 377 op->src.reg = modrm_reg; 378 op->dest.type = OP_DEST_REG; 379 op->dest.reg = CFI_SP; 380 } 381 break; 382 } 383 384 fallthrough; 385 case 0x88: 386 if (!rex_w) 387 break; 388 389 if (rm_is_mem(CFI_BP)) { 390 391 /* mov reg, disp(%rbp) */ 392 ADD_OP(op) { 393 op->src.type = OP_SRC_REG; 394 op->src.reg = modrm_reg; 395 op->dest.type = OP_DEST_REG_INDIRECT; 396 op->dest.reg = CFI_BP; 397 op->dest.offset = ins.displacement.value; 398 } 399 break; 400 } 401 402 if (rm_is_mem(CFI_SP)) { 403 404 /* mov reg, disp(%rsp) */ 405 ADD_OP(op) { 406 op->src.type = OP_SRC_REG; 407 op->src.reg = modrm_reg; 408 op->dest.type = OP_DEST_REG_INDIRECT; 409 op->dest.reg = CFI_SP; 410 op->dest.offset = ins.displacement.value; 411 } 412 break; 413 } 414 415 break; 416 417 case 0x8b: 418 if (!rex_w) 419 break; 420 421 if (rm_is_mem(CFI_BP)) { 422 423 /* mov disp(%rbp), reg */ 424 ADD_OP(op) { 425 op->src.type = OP_SRC_REG_INDIRECT; 426 op->src.reg = CFI_BP; 427 op->src.offset = ins.displacement.value; 428 op->dest.type = OP_DEST_REG; 429 op->dest.reg = modrm_reg; 430 } 431 break; 432 } 433 434 if (rm_is_mem(CFI_SP)) { 435 436 /* mov disp(%rsp), reg */ 437 ADD_OP(op) { 438 op->src.type = OP_SRC_REG_INDIRECT; 439 op->src.reg = CFI_SP; 440 op->src.offset = ins.displacement.value; 441 op->dest.type = OP_DEST_REG; 442 op->dest.reg = modrm_reg; 443 } 444 break; 445 } 446 447 break; 448 449 case 0x8d: 450 if (mod_is_reg()) { 451 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset); 452 break; 453 } 454 455 /* skip non 64bit ops */ 456 if (!rex_w) 457 break; 458 459 /* skip nontrivial SIB */ 460 if (have_SIB()) { 461 modrm_rm = sib_base; 462 if (sib_index != CFI_SP) 463 break; 464 } 465 466 /* lea disp(%rip), %dst */ 467 if (is_RIP()) { 468 insn->type = INSN_LEA_RIP; 469 break; 470 } 471 472 /* lea disp(%src), %dst */ 473 ADD_OP(op) { 474 op->src.offset = ins.displacement.value; 475 if (!op->src.offset) { 476 /* lea (%src), %dst */ 477 op->src.type = OP_SRC_REG; 478 } else { 479 /* lea disp(%src), %dst */ 480 op->src.type = OP_SRC_ADD; 481 } 482 op->src.reg = modrm_rm; 483 op->dest.type = OP_DEST_REG; 484 op->dest.reg = modrm_reg; 485 } 486 break; 487 488 case 0x8f: 489 /* pop to mem */ 490 ADD_OP(op) { 491 op->src.type = OP_SRC_POP; 492 op->dest.type = OP_DEST_MEM; 493 } 494 break; 495 496 case 0x90: 497 insn->type = INSN_NOP; 498 break; 499 500 case 0x9c: 501 /* pushf */ 502 ADD_OP(op) { 503 op->src.type = OP_SRC_CONST; 504 op->dest.type = OP_DEST_PUSHF; 505 } 506 break; 507 508 case 0x9d: 509 /* popf */ 510 ADD_OP(op) { 511 op->src.type = OP_SRC_POPF; 512 op->dest.type = OP_DEST_MEM; 513 } 514 break; 515 516 case 0x0f: 517 518 if (op2 == 0x01) { 519 520 switch (insn_last_prefix_id(&ins)) { 521 case INAT_PFX_REPE: 522 case INAT_PFX_REPNE: 523 if (modrm == 0xca) 524 /* eretu/erets */ 525 insn->type = INSN_SYSRET; 526 break; 527 default: 528 if (modrm == 0xca) 529 insn->type = INSN_CLAC; 530 else if (modrm == 0xcb) 531 insn->type = INSN_STAC; 532 break; 533 } 534 } else if (op2 >= 0x80 && op2 <= 0x8f) { 535 536 insn->type = INSN_JUMP_CONDITIONAL; 537 538 } else if (op2 == 0x05 || op2 == 0x34) { 539 540 /* syscall, sysenter */ 541 insn->type = INSN_SYSCALL; 542 543 } else if (op2 == 0x07 || op2 == 0x35) { 544 545 /* sysret, sysexit */ 546 insn->type = INSN_SYSRET; 547 548 } else if (op2 == 0x0b || op2 == 0xb9) { 549 550 /* ud2 */ 551 insn->type = INSN_BUG; 552 553 } else if (op2 == 0x0d || op2 == 0x1f) { 554 555 /* nopl/nopw */ 556 insn->type = INSN_NOP; 557 558 } else if (op2 == 0x1e) { 559 560 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb)) 561 insn->type = INSN_ENDBR; 562 563 564 } else if (op2 == 0x38 && op3 == 0xf8) { 565 if (ins.prefixes.nbytes == 1 && 566 ins.prefixes.bytes[0] == 0xf2) { 567 /* ENQCMD cannot be used in the kernel. */ 568 WARN("ENQCMD instruction at %s:%lx", sec->name, offset); 569 } 570 571 } else if (op2 == 0xa0 || op2 == 0xa8) { 572 573 /* push fs/gs */ 574 ADD_OP(op) { 575 op->src.type = OP_SRC_CONST; 576 op->dest.type = OP_DEST_PUSH; 577 } 578 579 } else if (op2 == 0xa1 || op2 == 0xa9) { 580 581 /* pop fs/gs */ 582 ADD_OP(op) { 583 op->src.type = OP_SRC_POP; 584 op->dest.type = OP_DEST_MEM; 585 } 586 } 587 588 break; 589 590 case 0xc9: 591 /* 592 * leave 593 * 594 * equivalent to: 595 * mov bp, sp 596 * pop bp 597 */ 598 ADD_OP(op) { 599 op->src.type = OP_SRC_REG; 600 op->src.reg = CFI_BP; 601 op->dest.type = OP_DEST_REG; 602 op->dest.reg = CFI_SP; 603 } 604 ADD_OP(op) { 605 op->src.type = OP_SRC_POP; 606 op->dest.type = OP_DEST_REG; 607 op->dest.reg = CFI_BP; 608 } 609 break; 610 611 case 0xcc: 612 /* int3 */ 613 insn->type = INSN_TRAP; 614 break; 615 616 case 0xe3: 617 /* jecxz/jrcxz */ 618 insn->type = INSN_JUMP_CONDITIONAL; 619 break; 620 621 case 0xe9: 622 case 0xeb: 623 insn->type = INSN_JUMP_UNCONDITIONAL; 624 break; 625 626 case 0xc2: 627 case 0xc3: 628 insn->type = INSN_RETURN; 629 break; 630 631 case 0xc7: /* mov imm, r/m */ 632 if (!opts.noinstr) 633 break; 634 635 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) { 636 struct reloc *immr, *disp; 637 struct symbol *func; 638 int idx; 639 640 immr = find_reloc_by_dest(elf, (void *)sec, offset+3); 641 disp = find_reloc_by_dest(elf, (void *)sec, offset+7); 642 643 if (!immr || strcmp(immr->sym->name, "pv_ops")) 644 break; 645 646 idx = (reloc_addend(immr) + 8) / sizeof(void *); 647 648 func = disp->sym; 649 if (disp->sym->type == STT_SECTION) 650 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp)); 651 if (!func) { 652 ERROR("no func for pv_ops[]"); 653 return -1; 654 } 655 656 objtool_pv_add(file, idx, func); 657 } 658 659 break; 660 661 case 0xcf: /* iret */ 662 /* 663 * Handle sync_core(), which has an IRET to self. 664 * All other IRET are in STT_NONE entry code. 665 */ 666 sym = find_symbol_containing(sec, offset); 667 if (sym && sym->type == STT_FUNC) { 668 ADD_OP(op) { 669 /* add $40, %rsp */ 670 op->src.type = OP_SRC_ADD; 671 op->src.reg = CFI_SP; 672 op->src.offset = 5*8; 673 op->dest.type = OP_DEST_REG; 674 op->dest.reg = CFI_SP; 675 } 676 break; 677 } 678 679 fallthrough; 680 681 case 0xca: /* retf */ 682 case 0xcb: /* retf */ 683 insn->type = INSN_SYSRET; 684 break; 685 686 case 0xe0: /* loopne */ 687 case 0xe1: /* loope */ 688 case 0xe2: /* loop */ 689 insn->type = INSN_JUMP_CONDITIONAL; 690 break; 691 692 case 0xe8: 693 insn->type = INSN_CALL; 694 /* 695 * For the impact on the stack, a CALL behaves like 696 * a PUSH of an immediate value (the return address). 697 */ 698 ADD_OP(op) { 699 op->src.type = OP_SRC_CONST; 700 op->dest.type = OP_DEST_PUSH; 701 } 702 break; 703 704 case 0xfc: 705 insn->type = INSN_CLD; 706 break; 707 708 case 0xfd: 709 insn->type = INSN_STD; 710 break; 711 712 case 0xff: 713 if (modrm_reg == 2 || modrm_reg == 3) { 714 715 insn->type = INSN_CALL_DYNAMIC; 716 if (has_notrack_prefix(&ins)) 717 WARN("notrack prefix found at %s:0x%lx", sec->name, offset); 718 719 } else if (modrm_reg == 4) { 720 721 insn->type = INSN_JUMP_DYNAMIC; 722 if (has_notrack_prefix(&ins)) 723 WARN("notrack prefix found at %s:0x%lx", sec->name, offset); 724 725 } else if (modrm_reg == 5) { 726 727 /* jmpf */ 728 insn->type = INSN_SYSRET; 729 730 } else if (modrm_reg == 6) { 731 732 /* push from mem */ 733 ADD_OP(op) { 734 op->src.type = OP_SRC_CONST; 735 op->dest.type = OP_DEST_PUSH; 736 } 737 } 738 739 break; 740 741 default: 742 break; 743 } 744 745 if (ins.immediate.nbytes) 746 insn->immediate = ins.immediate.value; 747 else if (ins.displacement.nbytes) 748 insn->immediate = ins.displacement.value; 749 750 return 0; 751 } 752 753 void arch_initial_func_cfi_state(struct cfi_init_state *state) 754 { 755 int i; 756 757 for (i = 0; i < CFI_NUM_REGS; i++) { 758 state->regs[i].base = CFI_UNDEFINED; 759 state->regs[i].offset = 0; 760 } 761 762 /* initial CFA (call frame address) */ 763 state->cfa.base = CFI_SP; 764 state->cfa.offset = 8; 765 766 /* initial RA (return address) */ 767 state->regs[CFI_RA].base = CFI_CFA; 768 state->regs[CFI_RA].offset = -8; 769 } 770 771 const char *arch_nop_insn(int len) 772 { 773 static const char nops[5][5] = { 774 { BYTES_NOP1 }, 775 { BYTES_NOP2 }, 776 { BYTES_NOP3 }, 777 { BYTES_NOP4 }, 778 { BYTES_NOP5 }, 779 }; 780 781 if (len < 1 || len > 5) { 782 ERROR("invalid NOP size: %d\n", len); 783 return NULL; 784 } 785 786 return nops[len-1]; 787 } 788 789 #define BYTE_RET 0xC3 790 791 const char *arch_ret_insn(int len) 792 { 793 static const char ret[5][5] = { 794 { BYTE_RET }, 795 { BYTE_RET, 0xcc }, 796 { BYTE_RET, 0xcc, BYTES_NOP1 }, 797 { BYTE_RET, 0xcc, BYTES_NOP2 }, 798 { BYTE_RET, 0xcc, BYTES_NOP3 }, 799 }; 800 801 if (len < 1 || len > 5) { 802 ERROR("invalid RET size: %d\n", len); 803 return NULL; 804 } 805 806 return ret[len-1]; 807 } 808 809 int arch_decode_hint_reg(u8 sp_reg, int *base) 810 { 811 switch (sp_reg) { 812 case ORC_REG_UNDEFINED: 813 *base = CFI_UNDEFINED; 814 break; 815 case ORC_REG_SP: 816 *base = CFI_SP; 817 break; 818 case ORC_REG_BP: 819 *base = CFI_BP; 820 break; 821 case ORC_REG_SP_INDIRECT: 822 *base = CFI_SP_INDIRECT; 823 break; 824 case ORC_REG_R10: 825 *base = CFI_R10; 826 break; 827 case ORC_REG_R13: 828 *base = CFI_R13; 829 break; 830 case ORC_REG_DI: 831 *base = CFI_DI; 832 break; 833 case ORC_REG_DX: 834 *base = CFI_DX; 835 break; 836 default: 837 return -1; 838 } 839 840 return 0; 841 } 842 843 bool arch_is_retpoline(struct symbol *sym) 844 { 845 return !strncmp(sym->name, "__x86_indirect_", 15); 846 } 847 848 bool arch_is_rethunk(struct symbol *sym) 849 { 850 return !strcmp(sym->name, "__x86_return_thunk"); 851 } 852 853 bool arch_is_embedded_insn(struct symbol *sym) 854 { 855 return !strcmp(sym->name, "retbleed_return_thunk") || 856 !strcmp(sym->name, "srso_alias_safe_ret") || 857 !strcmp(sym->name, "srso_safe_ret"); 858 } 859 860 unsigned int arch_reloc_size(struct reloc *reloc) 861 { 862 switch (reloc_type(reloc)) { 863 case R_X86_64_32: 864 case R_X86_64_32S: 865 case R_X86_64_PC32: 866 case R_X86_64_PLT32: 867 return 4; 868 default: 869 return 8; 870 } 871 } 872