1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <stdio.h> 7 #include <stdlib.h> 8 9 #define unlikely(cond) (cond) 10 #include <asm/insn.h> 11 #include "../../../arch/x86/lib/inat.c" 12 #include "../../../arch/x86/lib/insn.c" 13 14 #define CONFIG_64BIT 1 15 #include <asm/nops.h> 16 17 #include <asm/orc_types.h> 18 #include <objtool/check.h> 19 #include <objtool/elf.h> 20 #include <objtool/arch.h> 21 #include <objtool/warn.h> 22 #include <objtool/builtin.h> 23 #include <arch/elf.h> 24 25 int arch_ftrace_match(const char *name) 26 { 27 return !strcmp(name, "__fentry__"); 28 } 29 30 static int is_x86_64(const struct elf *elf) 31 { 32 switch (elf->ehdr.e_machine) { 33 case EM_X86_64: 34 return 1; 35 case EM_386: 36 return 0; 37 default: 38 ERROR("unexpected ELF machine type %d", elf->ehdr.e_machine); 39 return -1; 40 } 41 } 42 43 bool arch_callee_saved_reg(unsigned char reg) 44 { 45 switch (reg) { 46 case CFI_BP: 47 case CFI_BX: 48 case CFI_R12: 49 case CFI_R13: 50 case CFI_R14: 51 case CFI_R15: 52 return true; 53 54 case CFI_AX: 55 case CFI_CX: 56 case CFI_DX: 57 case CFI_SI: 58 case CFI_DI: 59 case CFI_SP: 60 case CFI_R8: 61 case CFI_R9: 62 case CFI_R10: 63 case CFI_R11: 64 case CFI_RA: 65 default: 66 return false; 67 } 68 } 69 70 /* Undo the effects of __pa_symbol() if necessary */ 71 static unsigned long phys_to_virt(unsigned long pa) 72 { 73 s64 va = pa; 74 75 if (va > 0) 76 va &= ~(0x80000000); 77 78 return va; 79 } 80 81 s64 arch_insn_adjusted_addend(struct instruction *insn, struct reloc *reloc) 82 { 83 s64 addend = reloc_addend(reloc); 84 85 if (arch_pc_relative_reloc(reloc)) 86 addend += insn->offset + insn->len - reloc_offset(reloc); 87 88 return phys_to_virt(addend); 89 } 90 91 static void scan_for_insn(struct section *sec, unsigned long offset, 92 unsigned long *insn_off, unsigned int *insn_len) 93 { 94 unsigned long o = 0; 95 struct insn insn; 96 97 while (1) { 98 99 insn_decode(&insn, sec->data->d_buf + o, sec_size(sec) - o, 100 INSN_MODE_64); 101 102 if (o + insn.length > offset) { 103 *insn_off = o; 104 *insn_len = insn.length; 105 return; 106 } 107 108 o += insn.length; 109 } 110 } 111 112 u64 arch_adjusted_addend(struct reloc *reloc) 113 { 114 unsigned int type = reloc_type(reloc); 115 s64 addend = reloc_addend(reloc); 116 unsigned long insn_off; 117 unsigned int insn_len; 118 119 if (type == R_X86_64_PLT32) 120 return addend + 4; 121 122 if (type != R_X86_64_PC32 || !is_text_sec(reloc->sec->base)) 123 return addend; 124 125 scan_for_insn(reloc->sec->base, reloc_offset(reloc), 126 &insn_off, &insn_len); 127 128 return addend + insn_off + insn_len - reloc_offset(reloc); 129 } 130 131 unsigned long arch_jump_destination(struct instruction *insn) 132 { 133 return insn->offset + insn->len + insn->immediate; 134 } 135 136 bool arch_pc_relative_reloc(struct reloc *reloc) 137 { 138 /* 139 * All relocation types where P (the address of the target) 140 * is included in the computation. 141 */ 142 switch (reloc_type(reloc)) { 143 case R_X86_64_PC8: 144 case R_X86_64_PC16: 145 case R_X86_64_PC32: 146 case R_X86_64_PC64: 147 148 case R_X86_64_PLT32: 149 case R_X86_64_GOTPC32: 150 case R_X86_64_GOTPCREL: 151 return true; 152 153 default: 154 break; 155 } 156 157 return false; 158 } 159 160 #define ADD_OP(op) \ 161 if (!(op = calloc(1, sizeof(*op)))) \ 162 return -1; \ 163 else for (*ops_list = op, ops_list = &op->next; op; op = NULL) 164 165 /* 166 * Helpers to decode ModRM/SIB: 167 * 168 * r/m| AX CX DX BX | SP | BP | SI DI | 169 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 | 170 * Mod+----------------+-----+-----+---------+ 171 * 00 | [r/m] |[SIB]|[IP+]| [r/m] | 172 * 01 | [r/m + d8] |[S+d]| [r/m + d8] | 173 * 10 | [r/m + d32] |[S+D]| [r/m + d32] | 174 * 11 | r/ m | 175 */ 176 177 #define mod_is_mem() (modrm_mod != 3) 178 #define mod_is_reg() (modrm_mod == 3) 179 180 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0) 181 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem()) 182 183 /* 184 * Check the ModRM register. If there is a SIB byte then check with 185 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and 186 * ModRM mod is 0 then there is no base register. 187 */ 188 #define rm_is(reg) (have_SIB() ? \ 189 sib_base == (reg) && sib_index == CFI_SP && \ 190 (sib_base != CFI_BP || modrm_mod != 0) : \ 191 modrm_rm == (reg)) 192 193 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg)) 194 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg)) 195 196 static bool has_notrack_prefix(struct insn *insn) 197 { 198 int i; 199 200 for (i = 0; i < insn->prefixes.nbytes; i++) { 201 if (insn->prefixes.bytes[i] == 0x3e) 202 return true; 203 } 204 205 return false; 206 } 207 208 int arch_decode_instruction(struct objtool_file *file, const struct section *sec, 209 unsigned long offset, unsigned int maxlen, 210 struct instruction *insn) 211 { 212 struct stack_op **ops_list = &insn->stack_ops; 213 const struct elf *elf = file->elf; 214 struct insn ins; 215 int x86_64, ret; 216 unsigned char op1, op2, op3, prefix, 217 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0, 218 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0, 219 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0; 220 struct stack_op *op = NULL; 221 struct symbol *sym; 222 u64 imm; 223 224 x86_64 = is_x86_64(elf); 225 if (x86_64 == -1) 226 return -1; 227 228 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen, 229 x86_64 ? INSN_MODE_64 : INSN_MODE_32); 230 if (ret < 0) { 231 ERROR("can't decode instruction at %s:0x%lx", sec->name, offset); 232 return -1; 233 } 234 235 insn->len = ins.length; 236 insn->type = INSN_OTHER; 237 238 if (ins.vex_prefix.nbytes) 239 return 0; 240 241 prefix = ins.prefixes.bytes[0]; 242 243 op1 = ins.opcode.bytes[0]; 244 op2 = ins.opcode.bytes[1]; 245 op3 = ins.opcode.bytes[2]; 246 247 if (ins.rex_prefix.nbytes) { 248 rex = ins.rex_prefix.bytes[0]; 249 rex_w = X86_REX_W(rex) >> 3; 250 rex_r = X86_REX_R(rex) >> 2; 251 rex_x = X86_REX_X(rex) >> 1; 252 rex_b = X86_REX_B(rex); 253 } 254 255 if (ins.modrm.nbytes) { 256 modrm = ins.modrm.bytes[0]; 257 modrm_mod = X86_MODRM_MOD(modrm); 258 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r; 259 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b; 260 } 261 262 if (ins.sib.nbytes) { 263 sib = ins.sib.bytes[0]; 264 /* sib_scale = X86_SIB_SCALE(sib); */ 265 sib_index = X86_SIB_INDEX(sib) + 8*rex_x; 266 sib_base = X86_SIB_BASE(sib) + 8*rex_b; 267 } 268 269 switch (op1) { 270 271 case 0x1: 272 case 0x29: 273 if (rex_w && rm_is_reg(CFI_SP)) { 274 275 /* add/sub reg, %rsp */ 276 ADD_OP(op) { 277 op->src.type = OP_SRC_ADD; 278 op->src.reg = modrm_reg; 279 op->dest.type = OP_DEST_REG; 280 op->dest.reg = CFI_SP; 281 } 282 } 283 break; 284 285 case 0x50 ... 0x57: 286 287 /* push reg */ 288 ADD_OP(op) { 289 op->src.type = OP_SRC_REG; 290 op->src.reg = (op1 & 0x7) + 8*rex_b; 291 op->dest.type = OP_DEST_PUSH; 292 } 293 294 break; 295 296 case 0x58 ... 0x5f: 297 298 /* pop reg */ 299 ADD_OP(op) { 300 op->src.type = OP_SRC_POP; 301 op->dest.type = OP_DEST_REG; 302 op->dest.reg = (op1 & 0x7) + 8*rex_b; 303 } 304 305 break; 306 307 case 0x68: 308 case 0x6a: 309 /* push immediate */ 310 ADD_OP(op) { 311 op->src.type = OP_SRC_CONST; 312 op->dest.type = OP_DEST_PUSH; 313 } 314 break; 315 316 case 0x70 ... 0x7f: 317 insn->type = INSN_JUMP_CONDITIONAL; 318 break; 319 320 case 0x80 ... 0x83: 321 /* 322 * 1000 00sw : mod OP r/m : immediate 323 * 324 * s - sign extend immediate 325 * w - imm8 / imm32 326 * 327 * OP: 000 ADD 100 AND 328 * 001 OR 101 SUB 329 * 010 ADC 110 XOR 330 * 011 SBB 111 CMP 331 */ 332 333 /* 64bit only */ 334 if (!rex_w) 335 break; 336 337 /* %rsp target only */ 338 if (!rm_is_reg(CFI_SP)) 339 break; 340 341 imm = ins.immediate.value; 342 if (op1 & 2) { /* sign extend */ 343 if (op1 & 1) { /* imm32 */ 344 imm <<= 32; 345 imm = (s64)imm >> 32; 346 } else { /* imm8 */ 347 imm <<= 56; 348 imm = (s64)imm >> 56; 349 } 350 } 351 352 switch (modrm_reg & 7) { 353 case 5: 354 imm = -imm; 355 fallthrough; 356 case 0: 357 /* add/sub imm, %rsp */ 358 ADD_OP(op) { 359 op->src.type = OP_SRC_ADD; 360 op->src.reg = CFI_SP; 361 op->src.offset = imm; 362 op->dest.type = OP_DEST_REG; 363 op->dest.reg = CFI_SP; 364 } 365 break; 366 367 case 4: 368 /* and imm, %rsp */ 369 ADD_OP(op) { 370 op->src.type = OP_SRC_AND; 371 op->src.reg = CFI_SP; 372 op->src.offset = ins.immediate.value; 373 op->dest.type = OP_DEST_REG; 374 op->dest.reg = CFI_SP; 375 } 376 break; 377 378 default: 379 /* ERROR ? */ 380 break; 381 } 382 383 break; 384 385 case 0x89: 386 if (!rex_w) 387 break; 388 389 if (modrm_reg == CFI_SP) { 390 391 if (mod_is_reg()) { 392 /* mov %rsp, reg */ 393 ADD_OP(op) { 394 op->src.type = OP_SRC_REG; 395 op->src.reg = CFI_SP; 396 op->dest.type = OP_DEST_REG; 397 op->dest.reg = modrm_rm; 398 } 399 break; 400 401 } else { 402 /* skip RIP relative displacement */ 403 if (is_RIP()) 404 break; 405 406 /* skip nontrivial SIB */ 407 if (have_SIB()) { 408 modrm_rm = sib_base; 409 if (sib_index != CFI_SP) 410 break; 411 } 412 413 /* mov %rsp, disp(%reg) */ 414 ADD_OP(op) { 415 op->src.type = OP_SRC_REG; 416 op->src.reg = CFI_SP; 417 op->dest.type = OP_DEST_REG_INDIRECT; 418 op->dest.reg = modrm_rm; 419 op->dest.offset = ins.displacement.value; 420 } 421 break; 422 } 423 424 break; 425 } 426 427 if (rm_is_reg(CFI_SP)) { 428 429 /* mov reg, %rsp */ 430 ADD_OP(op) { 431 op->src.type = OP_SRC_REG; 432 op->src.reg = modrm_reg; 433 op->dest.type = OP_DEST_REG; 434 op->dest.reg = CFI_SP; 435 } 436 break; 437 } 438 439 fallthrough; 440 case 0x88: 441 if (!rex_w) 442 break; 443 444 if (rm_is_mem(CFI_BP)) { 445 446 /* mov reg, disp(%rbp) */ 447 ADD_OP(op) { 448 op->src.type = OP_SRC_REG; 449 op->src.reg = modrm_reg; 450 op->dest.type = OP_DEST_REG_INDIRECT; 451 op->dest.reg = CFI_BP; 452 op->dest.offset = ins.displacement.value; 453 } 454 break; 455 } 456 457 if (rm_is_mem(CFI_SP)) { 458 459 /* mov reg, disp(%rsp) */ 460 ADD_OP(op) { 461 op->src.type = OP_SRC_REG; 462 op->src.reg = modrm_reg; 463 op->dest.type = OP_DEST_REG_INDIRECT; 464 op->dest.reg = CFI_SP; 465 op->dest.offset = ins.displacement.value; 466 } 467 break; 468 } 469 470 break; 471 472 case 0x8b: 473 if (!rex_w) 474 break; 475 476 if (rm_is_mem(CFI_BP)) { 477 478 /* mov disp(%rbp), reg */ 479 ADD_OP(op) { 480 op->src.type = OP_SRC_REG_INDIRECT; 481 op->src.reg = CFI_BP; 482 op->src.offset = ins.displacement.value; 483 op->dest.type = OP_DEST_REG; 484 op->dest.reg = modrm_reg; 485 } 486 break; 487 } 488 489 if (rm_is_mem(CFI_SP)) { 490 491 /* mov disp(%rsp), reg */ 492 ADD_OP(op) { 493 op->src.type = OP_SRC_REG_INDIRECT; 494 op->src.reg = CFI_SP; 495 op->src.offset = ins.displacement.value; 496 op->dest.type = OP_DEST_REG; 497 op->dest.reg = modrm_reg; 498 } 499 break; 500 } 501 502 break; 503 504 case 0x8d: 505 if (mod_is_reg()) { 506 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset); 507 break; 508 } 509 510 /* skip non 64bit ops */ 511 if (!rex_w) 512 break; 513 514 /* skip nontrivial SIB */ 515 if (have_SIB()) { 516 modrm_rm = sib_base; 517 if (sib_index != CFI_SP) 518 break; 519 } 520 521 /* lea disp(%rip), %dst */ 522 if (is_RIP()) { 523 insn->type = INSN_LEA_RIP; 524 break; 525 } 526 527 /* lea disp(%src), %dst */ 528 ADD_OP(op) { 529 op->src.offset = ins.displacement.value; 530 if (!op->src.offset) { 531 /* lea (%src), %dst */ 532 op->src.type = OP_SRC_REG; 533 } else { 534 /* lea disp(%src), %dst */ 535 op->src.type = OP_SRC_ADD; 536 } 537 op->src.reg = modrm_rm; 538 op->dest.type = OP_DEST_REG; 539 op->dest.reg = modrm_reg; 540 } 541 break; 542 543 case 0x8f: 544 /* pop to mem */ 545 ADD_OP(op) { 546 op->src.type = OP_SRC_POP; 547 op->dest.type = OP_DEST_MEM; 548 } 549 break; 550 551 case 0x90: 552 if (rex_b) /* XCHG %r8, %rax */ 553 break; 554 555 if (prefix == 0xf3) /* REP NOP := PAUSE */ 556 break; 557 558 insn->type = INSN_NOP; 559 break; 560 561 case 0x9c: 562 /* pushf */ 563 ADD_OP(op) { 564 op->src.type = OP_SRC_CONST; 565 op->dest.type = OP_DEST_PUSHF; 566 } 567 break; 568 569 case 0x9d: 570 /* popf */ 571 ADD_OP(op) { 572 op->src.type = OP_SRC_POPF; 573 op->dest.type = OP_DEST_MEM; 574 } 575 break; 576 577 case 0x0f: 578 579 if (op2 == 0x01) { 580 581 switch (insn_last_prefix_id(&ins)) { 582 case INAT_PFX_REPE: 583 case INAT_PFX_REPNE: 584 if (modrm == 0xca) 585 /* eretu/erets */ 586 insn->type = INSN_SYSRET; 587 break; 588 default: 589 if (modrm == 0xca) 590 insn->type = INSN_CLAC; 591 else if (modrm == 0xcb) 592 insn->type = INSN_STAC; 593 break; 594 } 595 } else if (op2 >= 0x80 && op2 <= 0x8f) { 596 597 insn->type = INSN_JUMP_CONDITIONAL; 598 599 } else if (op2 == 0x05 || op2 == 0x34) { 600 601 /* syscall, sysenter */ 602 insn->type = INSN_SYSCALL; 603 604 } else if (op2 == 0x07 || op2 == 0x35) { 605 606 /* sysret, sysexit */ 607 insn->type = INSN_SYSRET; 608 609 } else if (op2 == 0x0b || op2 == 0xb9) { 610 611 /* ud2, ud1 */ 612 insn->type = INSN_BUG; 613 614 } else if (op2 == 0x1f) { 615 616 /* 0f 1f /0 := NOPL */ 617 if (modrm_reg == 0) 618 insn->type = INSN_NOP; 619 620 } else if (op2 == 0x1e) { 621 622 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb)) 623 insn->type = INSN_ENDBR; 624 625 626 } else if (op2 == 0x38 && op3 == 0xf8) { 627 if (ins.prefixes.nbytes == 1 && 628 ins.prefixes.bytes[0] == 0xf2) { 629 /* ENQCMD cannot be used in the kernel. */ 630 WARN("ENQCMD instruction at %s:%lx", sec->name, offset); 631 } 632 633 } else if (op2 == 0xa0 || op2 == 0xa8) { 634 635 /* push fs/gs */ 636 ADD_OP(op) { 637 op->src.type = OP_SRC_CONST; 638 op->dest.type = OP_DEST_PUSH; 639 } 640 641 } else if (op2 == 0xa1 || op2 == 0xa9) { 642 643 /* pop fs/gs */ 644 ADD_OP(op) { 645 op->src.type = OP_SRC_POP; 646 op->dest.type = OP_DEST_MEM; 647 } 648 } 649 650 break; 651 652 case 0xc9: 653 /* 654 * leave 655 * 656 * equivalent to: 657 * mov bp, sp 658 * pop bp 659 */ 660 ADD_OP(op) { 661 op->src.type = OP_SRC_REG; 662 op->src.reg = CFI_BP; 663 op->dest.type = OP_DEST_REG; 664 op->dest.reg = CFI_SP; 665 } 666 ADD_OP(op) { 667 op->src.type = OP_SRC_POP; 668 op->dest.type = OP_DEST_REG; 669 op->dest.reg = CFI_BP; 670 } 671 break; 672 673 case 0xcc: 674 /* int3 */ 675 insn->type = INSN_TRAP; 676 break; 677 678 case 0xe3: 679 /* jecxz/jrcxz */ 680 insn->type = INSN_JUMP_CONDITIONAL; 681 break; 682 683 case 0xe9: 684 case 0xeb: 685 insn->type = INSN_JUMP_UNCONDITIONAL; 686 break; 687 688 case 0xc2: 689 case 0xc3: 690 insn->type = INSN_RETURN; 691 break; 692 693 case 0xc7: /* mov imm, r/m */ 694 if (!opts.noinstr) 695 break; 696 697 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) { 698 struct reloc *immr, *disp; 699 struct symbol *func; 700 int idx; 701 702 immr = find_reloc_by_dest(elf, (void *)sec, offset+3); 703 disp = find_reloc_by_dest(elf, (void *)sec, offset+7); 704 705 if (!immr || strcmp(immr->sym->name, "pv_ops")) 706 break; 707 708 idx = (reloc_addend(immr) + 8) / sizeof(void *); 709 710 func = disp->sym; 711 if (disp->sym->type == STT_SECTION) 712 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp)); 713 if (!func) { 714 ERROR("no func for pv_ops[]"); 715 return -1; 716 } 717 718 objtool_pv_add(file, idx, func); 719 } 720 721 break; 722 723 case 0xcf: /* iret */ 724 /* 725 * Handle sync_core(), which has an IRET to self. 726 * All other IRET are in STT_NONE entry code. 727 */ 728 sym = find_symbol_containing(sec, offset); 729 if (sym && sym->type == STT_FUNC) { 730 ADD_OP(op) { 731 /* add $40, %rsp */ 732 op->src.type = OP_SRC_ADD; 733 op->src.reg = CFI_SP; 734 op->src.offset = 5*8; 735 op->dest.type = OP_DEST_REG; 736 op->dest.reg = CFI_SP; 737 } 738 break; 739 } 740 741 fallthrough; 742 743 case 0xca: /* retf */ 744 case 0xcb: /* retf */ 745 insn->type = INSN_SYSRET; 746 break; 747 748 case 0xd6: /* udb */ 749 insn->type = INSN_BUG; 750 break; 751 752 case 0xe0: /* loopne */ 753 case 0xe1: /* loope */ 754 case 0xe2: /* loop */ 755 insn->type = INSN_JUMP_CONDITIONAL; 756 break; 757 758 case 0xe8: 759 insn->type = INSN_CALL; 760 /* 761 * For the impact on the stack, a CALL behaves like 762 * a PUSH of an immediate value (the return address). 763 */ 764 ADD_OP(op) { 765 op->src.type = OP_SRC_CONST; 766 op->dest.type = OP_DEST_PUSH; 767 } 768 break; 769 770 case 0xfc: 771 insn->type = INSN_CLD; 772 break; 773 774 case 0xfd: 775 insn->type = INSN_STD; 776 break; 777 778 case 0xff: 779 if (modrm_reg == 2 || modrm_reg == 3) { 780 781 insn->type = INSN_CALL_DYNAMIC; 782 if (has_notrack_prefix(&ins)) 783 WARN("notrack prefix found at %s:0x%lx", sec->name, offset); 784 785 } else if (modrm_reg == 4) { 786 787 insn->type = INSN_JUMP_DYNAMIC; 788 if (has_notrack_prefix(&ins)) 789 WARN("notrack prefix found at %s:0x%lx", sec->name, offset); 790 791 } else if (modrm_reg == 5) { 792 793 /* jmpf */ 794 insn->type = INSN_SYSRET; 795 796 } else if (modrm_reg == 6) { 797 798 /* push from mem */ 799 ADD_OP(op) { 800 op->src.type = OP_SRC_CONST; 801 op->dest.type = OP_DEST_PUSH; 802 } 803 } 804 805 break; 806 807 default: 808 break; 809 } 810 811 if (ins.immediate.nbytes) 812 insn->immediate = ins.immediate.value; 813 else if (ins.displacement.nbytes) 814 insn->immediate = ins.displacement.value; 815 816 return 0; 817 } 818 819 void arch_initial_func_cfi_state(struct cfi_init_state *state) 820 { 821 int i; 822 823 for (i = 0; i < CFI_NUM_REGS; i++) { 824 state->regs[i].base = CFI_UNDEFINED; 825 state->regs[i].offset = 0; 826 } 827 828 /* initial CFA (call frame address) */ 829 state->cfa.base = CFI_SP; 830 state->cfa.offset = 8; 831 832 /* initial RA (return address) */ 833 state->regs[CFI_RA].base = CFI_CFA; 834 state->regs[CFI_RA].offset = -8; 835 } 836 837 const char *arch_nop_insn(int len) 838 { 839 static const char nops[5][5] = { 840 { BYTES_NOP1 }, 841 { BYTES_NOP2 }, 842 { BYTES_NOP3 }, 843 { BYTES_NOP4 }, 844 { BYTES_NOP5 }, 845 }; 846 847 if (len < 1 || len > 5) { 848 ERROR("invalid NOP size: %d\n", len); 849 return NULL; 850 } 851 852 return nops[len-1]; 853 } 854 855 #define BYTE_RET 0xC3 856 857 const char *arch_ret_insn(int len) 858 { 859 static const char ret[5][5] = { 860 { BYTE_RET }, 861 { BYTE_RET, 0xcc }, 862 { BYTE_RET, 0xcc, BYTES_NOP1 }, 863 { BYTE_RET, 0xcc, BYTES_NOP2 }, 864 { BYTE_RET, 0xcc, BYTES_NOP3 }, 865 }; 866 867 if (len < 1 || len > 5) { 868 ERROR("invalid RET size: %d\n", len); 869 return NULL; 870 } 871 872 return ret[len-1]; 873 } 874 875 int arch_decode_hint_reg(u8 sp_reg, int *base) 876 { 877 switch (sp_reg) { 878 case ORC_REG_UNDEFINED: 879 *base = CFI_UNDEFINED; 880 break; 881 case ORC_REG_SP: 882 *base = CFI_SP; 883 break; 884 case ORC_REG_BP: 885 *base = CFI_BP; 886 break; 887 case ORC_REG_SP_INDIRECT: 888 *base = CFI_SP_INDIRECT; 889 break; 890 case ORC_REG_R10: 891 *base = CFI_R10; 892 break; 893 case ORC_REG_R13: 894 *base = CFI_R13; 895 break; 896 case ORC_REG_DI: 897 *base = CFI_DI; 898 break; 899 case ORC_REG_DX: 900 *base = CFI_DX; 901 break; 902 default: 903 return -1; 904 } 905 906 return 0; 907 } 908 909 bool arch_is_retpoline(struct symbol *sym) 910 { 911 return !strncmp(sym->name, "__x86_indirect_", 15) || 912 !strncmp(sym->name, "__pi___x86_indirect_", 20); 913 } 914 915 bool arch_is_rethunk(struct symbol *sym) 916 { 917 return !strcmp(sym->name, "__x86_return_thunk") || 918 !strcmp(sym->name, "__pi___x86_return_thunk"); 919 } 920 921 bool arch_is_embedded_insn(struct symbol *sym) 922 { 923 return !strcmp(sym->name, "retbleed_return_thunk") || 924 !strcmp(sym->name, "srso_alias_safe_ret") || 925 !strcmp(sym->name, "srso_safe_ret"); 926 } 927 928 unsigned int arch_reloc_size(struct reloc *reloc) 929 { 930 switch (reloc_type(reloc)) { 931 case R_X86_64_32: 932 case R_X86_64_32S: 933 case R_X86_64_PC32: 934 case R_X86_64_PLT32: 935 return 4; 936 default: 937 return 8; 938 } 939 } 940 941 bool arch_absolute_reloc(struct elf *elf, struct reloc *reloc) 942 { 943 switch (reloc_type(reloc)) { 944 case R_X86_64_32: 945 case R_X86_64_32S: 946 case R_X86_64_64: 947 return true; 948 default: 949 return false; 950 } 951 } 952