1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <stdio.h> 7 #include <stdlib.h> 8 9 #define unlikely(cond) (cond) 10 #include <asm/insn.h> 11 #include "../../../arch/x86/lib/inat.c" 12 #include "../../../arch/x86/lib/insn.c" 13 14 #define CONFIG_64BIT 1 15 #include <asm/nops.h> 16 17 #include <asm/orc_types.h> 18 #include <objtool/check.h> 19 #include <objtool/elf.h> 20 #include <objtool/arch.h> 21 #include <objtool/warn.h> 22 #include <objtool/endianness.h> 23 #include <objtool/builtin.h> 24 #include <arch/elf.h> 25 26 int arch_ftrace_match(char *name) 27 { 28 return !strcmp(name, "__fentry__"); 29 } 30 31 static int is_x86_64(const struct elf *elf) 32 { 33 switch (elf->ehdr.e_machine) { 34 case EM_X86_64: 35 return 1; 36 case EM_386: 37 return 0; 38 default: 39 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine); 40 return -1; 41 } 42 } 43 44 bool arch_callee_saved_reg(unsigned char reg) 45 { 46 switch (reg) { 47 case CFI_BP: 48 case CFI_BX: 49 case CFI_R12: 50 case CFI_R13: 51 case CFI_R14: 52 case CFI_R15: 53 return true; 54 55 case CFI_AX: 56 case CFI_CX: 57 case CFI_DX: 58 case CFI_SI: 59 case CFI_DI: 60 case CFI_SP: 61 case CFI_R8: 62 case CFI_R9: 63 case CFI_R10: 64 case CFI_R11: 65 case CFI_RA: 66 default: 67 return false; 68 } 69 } 70 71 unsigned long arch_dest_reloc_offset(int addend) 72 { 73 return addend + 4; 74 } 75 76 unsigned long arch_jump_destination(struct instruction *insn) 77 { 78 return insn->offset + insn->len + insn->immediate; 79 } 80 81 bool arch_pc_relative_reloc(struct reloc *reloc) 82 { 83 /* 84 * All relocation types where P (the address of the target) 85 * is included in the computation. 86 */ 87 switch (reloc_type(reloc)) { 88 case R_X86_64_PC8: 89 case R_X86_64_PC16: 90 case R_X86_64_PC32: 91 case R_X86_64_PC64: 92 93 case R_X86_64_PLT32: 94 case R_X86_64_GOTPC32: 95 case R_X86_64_GOTPCREL: 96 return true; 97 98 default: 99 break; 100 } 101 102 return false; 103 } 104 105 #define ADD_OP(op) \ 106 if (!(op = calloc(1, sizeof(*op)))) \ 107 return -1; \ 108 else for (*ops_list = op, ops_list = &op->next; op; op = NULL) 109 110 /* 111 * Helpers to decode ModRM/SIB: 112 * 113 * r/m| AX CX DX BX | SP | BP | SI DI | 114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 | 115 * Mod+----------------+-----+-----+---------+ 116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] | 117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] | 118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] | 119 * 11 | r/ m | 120 */ 121 122 #define mod_is_mem() (modrm_mod != 3) 123 #define mod_is_reg() (modrm_mod == 3) 124 125 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0) 126 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem()) 127 128 /* 129 * Check the ModRM register. If there is a SIB byte then check with 130 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and 131 * ModRM mod is 0 then there is no base register. 132 */ 133 #define rm_is(reg) (have_SIB() ? \ 134 sib_base == (reg) && sib_index == CFI_SP && \ 135 (sib_base != CFI_BP || modrm_mod != 0) : \ 136 modrm_rm == (reg)) 137 138 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg)) 139 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg)) 140 141 static bool has_notrack_prefix(struct insn *insn) 142 { 143 int i; 144 145 for (i = 0; i < insn->prefixes.nbytes; i++) { 146 if (insn->prefixes.bytes[i] == 0x3e) 147 return true; 148 } 149 150 return false; 151 } 152 153 int arch_decode_instruction(struct objtool_file *file, const struct section *sec, 154 unsigned long offset, unsigned int maxlen, 155 struct instruction *insn) 156 { 157 struct stack_op **ops_list = &insn->stack_ops; 158 const struct elf *elf = file->elf; 159 struct insn ins; 160 int x86_64, ret; 161 unsigned char op1, op2, op3, prefix, 162 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0, 163 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0, 164 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0; 165 struct stack_op *op = NULL; 166 struct symbol *sym; 167 u64 imm; 168 169 x86_64 = is_x86_64(elf); 170 if (x86_64 == -1) 171 return -1; 172 173 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen, 174 x86_64 ? INSN_MODE_64 : INSN_MODE_32); 175 if (ret < 0) { 176 WARN("can't decode instruction at %s:0x%lx", sec->name, offset); 177 return -1; 178 } 179 180 insn->len = ins.length; 181 insn->type = INSN_OTHER; 182 183 if (ins.vex_prefix.nbytes) 184 return 0; 185 186 prefix = ins.prefixes.bytes[0]; 187 188 op1 = ins.opcode.bytes[0]; 189 op2 = ins.opcode.bytes[1]; 190 op3 = ins.opcode.bytes[2]; 191 192 if (ins.rex_prefix.nbytes) { 193 rex = ins.rex_prefix.bytes[0]; 194 rex_w = X86_REX_W(rex) >> 3; 195 rex_r = X86_REX_R(rex) >> 2; 196 rex_x = X86_REX_X(rex) >> 1; 197 rex_b = X86_REX_B(rex); 198 } 199 200 if (ins.modrm.nbytes) { 201 modrm = ins.modrm.bytes[0]; 202 modrm_mod = X86_MODRM_MOD(modrm); 203 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r; 204 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b; 205 } 206 207 if (ins.sib.nbytes) { 208 sib = ins.sib.bytes[0]; 209 /* sib_scale = X86_SIB_SCALE(sib); */ 210 sib_index = X86_SIB_INDEX(sib) + 8*rex_x; 211 sib_base = X86_SIB_BASE(sib) + 8*rex_b; 212 } 213 214 switch (op1) { 215 216 case 0x1: 217 case 0x29: 218 if (rex_w && rm_is_reg(CFI_SP)) { 219 220 /* add/sub reg, %rsp */ 221 ADD_OP(op) { 222 op->src.type = OP_SRC_ADD; 223 op->src.reg = modrm_reg; 224 op->dest.type = OP_DEST_REG; 225 op->dest.reg = CFI_SP; 226 } 227 } 228 break; 229 230 case 0x50 ... 0x57: 231 232 /* push reg */ 233 ADD_OP(op) { 234 op->src.type = OP_SRC_REG; 235 op->src.reg = (op1 & 0x7) + 8*rex_b; 236 op->dest.type = OP_DEST_PUSH; 237 } 238 239 break; 240 241 case 0x58 ... 0x5f: 242 243 /* pop reg */ 244 ADD_OP(op) { 245 op->src.type = OP_SRC_POP; 246 op->dest.type = OP_DEST_REG; 247 op->dest.reg = (op1 & 0x7) + 8*rex_b; 248 } 249 250 break; 251 252 case 0x68: 253 case 0x6a: 254 /* push immediate */ 255 ADD_OP(op) { 256 op->src.type = OP_SRC_CONST; 257 op->dest.type = OP_DEST_PUSH; 258 } 259 break; 260 261 case 0x70 ... 0x7f: 262 insn->type = INSN_JUMP_CONDITIONAL; 263 break; 264 265 case 0x80 ... 0x83: 266 /* 267 * 1000 00sw : mod OP r/m : immediate 268 * 269 * s - sign extend immediate 270 * w - imm8 / imm32 271 * 272 * OP: 000 ADD 100 AND 273 * 001 OR 101 SUB 274 * 010 ADC 110 XOR 275 * 011 SBB 111 CMP 276 */ 277 278 /* 64bit only */ 279 if (!rex_w) 280 break; 281 282 /* %rsp target only */ 283 if (!rm_is_reg(CFI_SP)) 284 break; 285 286 imm = ins.immediate.value; 287 if (op1 & 2) { /* sign extend */ 288 if (op1 & 1) { /* imm32 */ 289 imm <<= 32; 290 imm = (s64)imm >> 32; 291 } else { /* imm8 */ 292 imm <<= 56; 293 imm = (s64)imm >> 56; 294 } 295 } 296 297 switch (modrm_reg & 7) { 298 case 5: 299 imm = -imm; 300 fallthrough; 301 case 0: 302 /* add/sub imm, %rsp */ 303 ADD_OP(op) { 304 op->src.type = OP_SRC_ADD; 305 op->src.reg = CFI_SP; 306 op->src.offset = imm; 307 op->dest.type = OP_DEST_REG; 308 op->dest.reg = CFI_SP; 309 } 310 break; 311 312 case 4: 313 /* and imm, %rsp */ 314 ADD_OP(op) { 315 op->src.type = OP_SRC_AND; 316 op->src.reg = CFI_SP; 317 op->src.offset = ins.immediate.value; 318 op->dest.type = OP_DEST_REG; 319 op->dest.reg = CFI_SP; 320 } 321 break; 322 323 default: 324 /* WARN ? */ 325 break; 326 } 327 328 break; 329 330 case 0x89: 331 if (!rex_w) 332 break; 333 334 if (modrm_reg == CFI_SP) { 335 336 if (mod_is_reg()) { 337 /* mov %rsp, reg */ 338 ADD_OP(op) { 339 op->src.type = OP_SRC_REG; 340 op->src.reg = CFI_SP; 341 op->dest.type = OP_DEST_REG; 342 op->dest.reg = modrm_rm; 343 } 344 break; 345 346 } else { 347 /* skip RIP relative displacement */ 348 if (is_RIP()) 349 break; 350 351 /* skip nontrivial SIB */ 352 if (have_SIB()) { 353 modrm_rm = sib_base; 354 if (sib_index != CFI_SP) 355 break; 356 } 357 358 /* mov %rsp, disp(%reg) */ 359 ADD_OP(op) { 360 op->src.type = OP_SRC_REG; 361 op->src.reg = CFI_SP; 362 op->dest.type = OP_DEST_REG_INDIRECT; 363 op->dest.reg = modrm_rm; 364 op->dest.offset = ins.displacement.value; 365 } 366 break; 367 } 368 369 break; 370 } 371 372 if (rm_is_reg(CFI_SP)) { 373 374 /* mov reg, %rsp */ 375 ADD_OP(op) { 376 op->src.type = OP_SRC_REG; 377 op->src.reg = modrm_reg; 378 op->dest.type = OP_DEST_REG; 379 op->dest.reg = CFI_SP; 380 } 381 break; 382 } 383 384 fallthrough; 385 case 0x88: 386 if (!rex_w) 387 break; 388 389 if (rm_is_mem(CFI_BP)) { 390 391 /* mov reg, disp(%rbp) */ 392 ADD_OP(op) { 393 op->src.type = OP_SRC_REG; 394 op->src.reg = modrm_reg; 395 op->dest.type = OP_DEST_REG_INDIRECT; 396 op->dest.reg = CFI_BP; 397 op->dest.offset = ins.displacement.value; 398 } 399 break; 400 } 401 402 if (rm_is_mem(CFI_SP)) { 403 404 /* mov reg, disp(%rsp) */ 405 ADD_OP(op) { 406 op->src.type = OP_SRC_REG; 407 op->src.reg = modrm_reg; 408 op->dest.type = OP_DEST_REG_INDIRECT; 409 op->dest.reg = CFI_SP; 410 op->dest.offset = ins.displacement.value; 411 } 412 break; 413 } 414 415 break; 416 417 case 0x8b: 418 if (!rex_w) 419 break; 420 421 if (rm_is_mem(CFI_BP)) { 422 423 /* mov disp(%rbp), reg */ 424 ADD_OP(op) { 425 op->src.type = OP_SRC_REG_INDIRECT; 426 op->src.reg = CFI_BP; 427 op->src.offset = ins.displacement.value; 428 op->dest.type = OP_DEST_REG; 429 op->dest.reg = modrm_reg; 430 } 431 break; 432 } 433 434 if (rm_is_mem(CFI_SP)) { 435 436 /* mov disp(%rsp), reg */ 437 ADD_OP(op) { 438 op->src.type = OP_SRC_REG_INDIRECT; 439 op->src.reg = CFI_SP; 440 op->src.offset = ins.displacement.value; 441 op->dest.type = OP_DEST_REG; 442 op->dest.reg = modrm_reg; 443 } 444 break; 445 } 446 447 break; 448 449 case 0x8d: 450 if (mod_is_reg()) { 451 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset); 452 break; 453 } 454 455 /* skip non 64bit ops */ 456 if (!rex_w) 457 break; 458 459 /* skip nontrivial SIB */ 460 if (have_SIB()) { 461 modrm_rm = sib_base; 462 if (sib_index != CFI_SP) 463 break; 464 } 465 466 /* lea disp(%rip), %dst */ 467 if (is_RIP()) { 468 insn->type = INSN_LEA_RIP; 469 break; 470 } 471 472 /* lea disp(%src), %dst */ 473 ADD_OP(op) { 474 op->src.offset = ins.displacement.value; 475 if (!op->src.offset) { 476 /* lea (%src), %dst */ 477 op->src.type = OP_SRC_REG; 478 } else { 479 /* lea disp(%src), %dst */ 480 op->src.type = OP_SRC_ADD; 481 } 482 op->src.reg = modrm_rm; 483 op->dest.type = OP_DEST_REG; 484 op->dest.reg = modrm_reg; 485 } 486 break; 487 488 case 0x8f: 489 /* pop to mem */ 490 ADD_OP(op) { 491 op->src.type = OP_SRC_POP; 492 op->dest.type = OP_DEST_MEM; 493 } 494 break; 495 496 case 0x90: 497 insn->type = INSN_NOP; 498 break; 499 500 case 0x9c: 501 /* pushf */ 502 ADD_OP(op) { 503 op->src.type = OP_SRC_CONST; 504 op->dest.type = OP_DEST_PUSHF; 505 } 506 break; 507 508 case 0x9d: 509 /* popf */ 510 ADD_OP(op) { 511 op->src.type = OP_SRC_POPF; 512 op->dest.type = OP_DEST_MEM; 513 } 514 break; 515 516 case 0x0f: 517 518 if (op2 == 0x01) { 519 520 switch (insn_last_prefix_id(&ins)) { 521 case INAT_PFX_REPE: 522 case INAT_PFX_REPNE: 523 if (modrm == 0xca) 524 /* eretu/erets */ 525 insn->type = INSN_CONTEXT_SWITCH; 526 break; 527 default: 528 if (modrm == 0xca) 529 insn->type = INSN_CLAC; 530 else if (modrm == 0xcb) 531 insn->type = INSN_STAC; 532 break; 533 } 534 } else if (op2 >= 0x80 && op2 <= 0x8f) { 535 536 insn->type = INSN_JUMP_CONDITIONAL; 537 538 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 || 539 op2 == 0x35) { 540 541 /* sysenter, sysret */ 542 insn->type = INSN_CONTEXT_SWITCH; 543 544 } else if (op2 == 0x0b || op2 == 0xb9) { 545 546 /* ud2 */ 547 insn->type = INSN_BUG; 548 549 } else if (op2 == 0x0d || op2 == 0x1f) { 550 551 /* nopl/nopw */ 552 insn->type = INSN_NOP; 553 554 } else if (op2 == 0x1e) { 555 556 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb)) 557 insn->type = INSN_ENDBR; 558 559 560 } else if (op2 == 0x38 && op3 == 0xf8) { 561 if (ins.prefixes.nbytes == 1 && 562 ins.prefixes.bytes[0] == 0xf2) { 563 /* ENQCMD cannot be used in the kernel. */ 564 WARN("ENQCMD instruction at %s:%lx", sec->name, 565 offset); 566 } 567 568 } else if (op2 == 0xa0 || op2 == 0xa8) { 569 570 /* push fs/gs */ 571 ADD_OP(op) { 572 op->src.type = OP_SRC_CONST; 573 op->dest.type = OP_DEST_PUSH; 574 } 575 576 } else if (op2 == 0xa1 || op2 == 0xa9) { 577 578 /* pop fs/gs */ 579 ADD_OP(op) { 580 op->src.type = OP_SRC_POP; 581 op->dest.type = OP_DEST_MEM; 582 } 583 } 584 585 break; 586 587 case 0xc9: 588 /* 589 * leave 590 * 591 * equivalent to: 592 * mov bp, sp 593 * pop bp 594 */ 595 ADD_OP(op) { 596 op->src.type = OP_SRC_REG; 597 op->src.reg = CFI_BP; 598 op->dest.type = OP_DEST_REG; 599 op->dest.reg = CFI_SP; 600 } 601 ADD_OP(op) { 602 op->src.type = OP_SRC_POP; 603 op->dest.type = OP_DEST_REG; 604 op->dest.reg = CFI_BP; 605 } 606 break; 607 608 case 0xcc: 609 /* int3 */ 610 insn->type = INSN_TRAP; 611 break; 612 613 case 0xe3: 614 /* jecxz/jrcxz */ 615 insn->type = INSN_JUMP_CONDITIONAL; 616 break; 617 618 case 0xe9: 619 case 0xeb: 620 insn->type = INSN_JUMP_UNCONDITIONAL; 621 break; 622 623 case 0xc2: 624 case 0xc3: 625 insn->type = INSN_RETURN; 626 break; 627 628 case 0xc7: /* mov imm, r/m */ 629 if (!opts.noinstr) 630 break; 631 632 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) { 633 struct reloc *immr, *disp; 634 struct symbol *func; 635 int idx; 636 637 immr = find_reloc_by_dest(elf, (void *)sec, offset+3); 638 disp = find_reloc_by_dest(elf, (void *)sec, offset+7); 639 640 if (!immr || strcmp(immr->sym->name, "pv_ops")) 641 break; 642 643 idx = (reloc_addend(immr) + 8) / sizeof(void *); 644 645 func = disp->sym; 646 if (disp->sym->type == STT_SECTION) 647 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp)); 648 if (!func) { 649 WARN("no func for pv_ops[]"); 650 return -1; 651 } 652 653 objtool_pv_add(file, idx, func); 654 } 655 656 break; 657 658 case 0xcf: /* iret */ 659 /* 660 * Handle sync_core(), which has an IRET to self. 661 * All other IRET are in STT_NONE entry code. 662 */ 663 sym = find_symbol_containing(sec, offset); 664 if (sym && sym->type == STT_FUNC) { 665 ADD_OP(op) { 666 /* add $40, %rsp */ 667 op->src.type = OP_SRC_ADD; 668 op->src.reg = CFI_SP; 669 op->src.offset = 5*8; 670 op->dest.type = OP_DEST_REG; 671 op->dest.reg = CFI_SP; 672 } 673 break; 674 } 675 676 fallthrough; 677 678 case 0xca: /* retf */ 679 case 0xcb: /* retf */ 680 insn->type = INSN_CONTEXT_SWITCH; 681 break; 682 683 case 0xe0: /* loopne */ 684 case 0xe1: /* loope */ 685 case 0xe2: /* loop */ 686 insn->type = INSN_JUMP_CONDITIONAL; 687 break; 688 689 case 0xe8: 690 insn->type = INSN_CALL; 691 /* 692 * For the impact on the stack, a CALL behaves like 693 * a PUSH of an immediate value (the return address). 694 */ 695 ADD_OP(op) { 696 op->src.type = OP_SRC_CONST; 697 op->dest.type = OP_DEST_PUSH; 698 } 699 break; 700 701 case 0xfc: 702 insn->type = INSN_CLD; 703 break; 704 705 case 0xfd: 706 insn->type = INSN_STD; 707 break; 708 709 case 0xff: 710 if (modrm_reg == 2 || modrm_reg == 3) { 711 712 insn->type = INSN_CALL_DYNAMIC; 713 if (has_notrack_prefix(&ins)) 714 WARN("notrack prefix found at %s:0x%lx", sec->name, offset); 715 716 } else if (modrm_reg == 4) { 717 718 insn->type = INSN_JUMP_DYNAMIC; 719 if (has_notrack_prefix(&ins)) 720 WARN("notrack prefix found at %s:0x%lx", sec->name, offset); 721 722 } else if (modrm_reg == 5) { 723 724 /* jmpf */ 725 insn->type = INSN_CONTEXT_SWITCH; 726 727 } else if (modrm_reg == 6) { 728 729 /* push from mem */ 730 ADD_OP(op) { 731 op->src.type = OP_SRC_CONST; 732 op->dest.type = OP_DEST_PUSH; 733 } 734 } 735 736 break; 737 738 default: 739 break; 740 } 741 742 if (ins.immediate.nbytes) 743 insn->immediate = ins.immediate.value; 744 else if (ins.displacement.nbytes) 745 insn->immediate = ins.displacement.value; 746 747 return 0; 748 } 749 750 void arch_initial_func_cfi_state(struct cfi_init_state *state) 751 { 752 int i; 753 754 for (i = 0; i < CFI_NUM_REGS; i++) { 755 state->regs[i].base = CFI_UNDEFINED; 756 state->regs[i].offset = 0; 757 } 758 759 /* initial CFA (call frame address) */ 760 state->cfa.base = CFI_SP; 761 state->cfa.offset = 8; 762 763 /* initial RA (return address) */ 764 state->regs[CFI_RA].base = CFI_CFA; 765 state->regs[CFI_RA].offset = -8; 766 } 767 768 const char *arch_nop_insn(int len) 769 { 770 static const char nops[5][5] = { 771 { BYTES_NOP1 }, 772 { BYTES_NOP2 }, 773 { BYTES_NOP3 }, 774 { BYTES_NOP4 }, 775 { BYTES_NOP5 }, 776 }; 777 778 if (len < 1 || len > 5) { 779 WARN("invalid NOP size: %d\n", len); 780 return NULL; 781 } 782 783 return nops[len-1]; 784 } 785 786 #define BYTE_RET 0xC3 787 788 const char *arch_ret_insn(int len) 789 { 790 static const char ret[5][5] = { 791 { BYTE_RET }, 792 { BYTE_RET, 0xcc }, 793 { BYTE_RET, 0xcc, BYTES_NOP1 }, 794 { BYTE_RET, 0xcc, BYTES_NOP2 }, 795 { BYTE_RET, 0xcc, BYTES_NOP3 }, 796 }; 797 798 if (len < 1 || len > 5) { 799 WARN("invalid RET size: %d\n", len); 800 return NULL; 801 } 802 803 return ret[len-1]; 804 } 805 806 int arch_decode_hint_reg(u8 sp_reg, int *base) 807 { 808 switch (sp_reg) { 809 case ORC_REG_UNDEFINED: 810 *base = CFI_UNDEFINED; 811 break; 812 case ORC_REG_SP: 813 *base = CFI_SP; 814 break; 815 case ORC_REG_BP: 816 *base = CFI_BP; 817 break; 818 case ORC_REG_SP_INDIRECT: 819 *base = CFI_SP_INDIRECT; 820 break; 821 case ORC_REG_R10: 822 *base = CFI_R10; 823 break; 824 case ORC_REG_R13: 825 *base = CFI_R13; 826 break; 827 case ORC_REG_DI: 828 *base = CFI_DI; 829 break; 830 case ORC_REG_DX: 831 *base = CFI_DX; 832 break; 833 default: 834 return -1; 835 } 836 837 return 0; 838 } 839 840 bool arch_is_retpoline(struct symbol *sym) 841 { 842 return !strncmp(sym->name, "__x86_indirect_", 15); 843 } 844 845 bool arch_is_rethunk(struct symbol *sym) 846 { 847 return !strcmp(sym->name, "__x86_return_thunk"); 848 } 849 850 bool arch_is_embedded_insn(struct symbol *sym) 851 { 852 return !strcmp(sym->name, "retbleed_return_thunk") || 853 !strcmp(sym->name, "srso_safe_ret"); 854 } 855