1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <string.h> 7 #include <stdlib.h> 8 9 #include "builtin.h" 10 #include "check.h" 11 #include "elf.h" 12 #include "special.h" 13 #include "arch.h" 14 #include "warn.h" 15 16 #include <linux/hashtable.h> 17 #include <linux/kernel.h> 18 19 #define FAKE_JUMP_OFFSET -1 20 21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table" 22 23 struct alternative { 24 struct list_head list; 25 struct instruction *insn; 26 bool skip_orig; 27 }; 28 29 const char *objname; 30 struct cfi_init_state initial_func_cfi; 31 32 struct instruction *find_insn(struct objtool_file *file, 33 struct section *sec, unsigned long offset) 34 { 35 struct instruction *insn; 36 37 hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) { 38 if (insn->sec == sec && insn->offset == offset) 39 return insn; 40 } 41 42 return NULL; 43 } 44 45 static struct instruction *next_insn_same_sec(struct objtool_file *file, 46 struct instruction *insn) 47 { 48 struct instruction *next = list_next_entry(insn, list); 49 50 if (!next || &next->list == &file->insn_list || next->sec != insn->sec) 51 return NULL; 52 53 return next; 54 } 55 56 static struct instruction *next_insn_same_func(struct objtool_file *file, 57 struct instruction *insn) 58 { 59 struct instruction *next = list_next_entry(insn, list); 60 struct symbol *func = insn->func; 61 62 if (!func) 63 return NULL; 64 65 if (&next->list != &file->insn_list && next->func == func) 66 return next; 67 68 /* Check if we're already in the subfunction: */ 69 if (func == func->cfunc) 70 return NULL; 71 72 /* Move to the subfunction: */ 73 return find_insn(file, func->cfunc->sec, func->cfunc->offset); 74 } 75 76 #define func_for_each_insn(file, func, insn) \ 77 for (insn = find_insn(file, func->sec, func->offset); \ 78 insn; \ 79 insn = next_insn_same_func(file, insn)) 80 81 #define sym_for_each_insn(file, sym, insn) \ 82 for (insn = find_insn(file, sym->sec, sym->offset); \ 83 insn && &insn->list != &file->insn_list && \ 84 insn->sec == sym->sec && \ 85 insn->offset < sym->offset + sym->len; \ 86 insn = list_next_entry(insn, list)) 87 88 #define sym_for_each_insn_continue_reverse(file, sym, insn) \ 89 for (insn = list_prev_entry(insn, list); \ 90 &insn->list != &file->insn_list && \ 91 insn->sec == sym->sec && insn->offset >= sym->offset; \ 92 insn = list_prev_entry(insn, list)) 93 94 #define sec_for_each_insn_from(file, insn) \ 95 for (; insn; insn = next_insn_same_sec(file, insn)) 96 97 #define sec_for_each_insn_continue(file, insn) \ 98 for (insn = next_insn_same_sec(file, insn); insn; \ 99 insn = next_insn_same_sec(file, insn)) 100 101 static bool is_static_jump(struct instruction *insn) 102 { 103 return insn->type == INSN_JUMP_CONDITIONAL || 104 insn->type == INSN_JUMP_UNCONDITIONAL; 105 } 106 107 static bool is_sibling_call(struct instruction *insn) 108 { 109 /* An indirect jump is either a sibling call or a jump to a table. */ 110 if (insn->type == INSN_JUMP_DYNAMIC) 111 return list_empty(&insn->alts); 112 113 if (!is_static_jump(insn)) 114 return false; 115 116 /* add_jump_destinations() sets insn->call_dest for sibling calls. */ 117 return !!insn->call_dest; 118 } 119 120 /* 121 * This checks to see if the given function is a "noreturn" function. 122 * 123 * For global functions which are outside the scope of this object file, we 124 * have to keep a manual list of them. 125 * 126 * For local functions, we have to detect them manually by simply looking for 127 * the lack of a return instruction. 128 */ 129 static bool __dead_end_function(struct objtool_file *file, struct symbol *func, 130 int recursion) 131 { 132 int i; 133 struct instruction *insn; 134 bool empty = true; 135 136 /* 137 * Unfortunately these have to be hard coded because the noreturn 138 * attribute isn't provided in ELF data. 139 */ 140 static const char * const global_noreturns[] = { 141 "__stack_chk_fail", 142 "panic", 143 "do_exit", 144 "do_task_dead", 145 "__module_put_and_exit", 146 "complete_and_exit", 147 "__reiserfs_panic", 148 "lbug_with_loc", 149 "fortify_panic", 150 "usercopy_abort", 151 "machine_real_restart", 152 "rewind_stack_do_exit", 153 "kunit_try_catch_throw", 154 }; 155 156 if (!func) 157 return false; 158 159 if (func->bind == STB_WEAK) 160 return false; 161 162 if (func->bind == STB_GLOBAL) 163 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++) 164 if (!strcmp(func->name, global_noreturns[i])) 165 return true; 166 167 if (!func->len) 168 return false; 169 170 insn = find_insn(file, func->sec, func->offset); 171 if (!insn->func) 172 return false; 173 174 func_for_each_insn(file, func, insn) { 175 empty = false; 176 177 if (insn->type == INSN_RETURN) 178 return false; 179 } 180 181 if (empty) 182 return false; 183 184 /* 185 * A function can have a sibling call instead of a return. In that 186 * case, the function's dead-end status depends on whether the target 187 * of the sibling call returns. 188 */ 189 func_for_each_insn(file, func, insn) { 190 if (is_sibling_call(insn)) { 191 struct instruction *dest = insn->jump_dest; 192 193 if (!dest) 194 /* sibling call to another file */ 195 return false; 196 197 /* local sibling call */ 198 if (recursion == 5) { 199 /* 200 * Infinite recursion: two functions have 201 * sibling calls to each other. This is a very 202 * rare case. It means they aren't dead ends. 203 */ 204 return false; 205 } 206 207 return __dead_end_function(file, dest->func, recursion+1); 208 } 209 } 210 211 return true; 212 } 213 214 static bool dead_end_function(struct objtool_file *file, struct symbol *func) 215 { 216 return __dead_end_function(file, func, 0); 217 } 218 219 static void init_cfi_state(struct cfi_state *cfi) 220 { 221 int i; 222 223 for (i = 0; i < CFI_NUM_REGS; i++) { 224 cfi->regs[i].base = CFI_UNDEFINED; 225 cfi->vals[i].base = CFI_UNDEFINED; 226 } 227 cfi->cfa.base = CFI_UNDEFINED; 228 cfi->drap_reg = CFI_UNDEFINED; 229 cfi->drap_offset = -1; 230 } 231 232 static void init_insn_state(struct insn_state *state, struct section *sec) 233 { 234 memset(state, 0, sizeof(*state)); 235 init_cfi_state(&state->cfi); 236 237 /* 238 * We need the full vmlinux for noinstr validation, otherwise we can 239 * not correctly determine insn->call_dest->sec (external symbols do 240 * not have a section). 241 */ 242 if (vmlinux && sec) 243 state->noinstr = sec->noinstr; 244 } 245 246 /* 247 * Call the arch-specific instruction decoder for all the instructions and add 248 * them to the global instruction list. 249 */ 250 static int decode_instructions(struct objtool_file *file) 251 { 252 struct section *sec; 253 struct symbol *func; 254 unsigned long offset; 255 struct instruction *insn; 256 unsigned long nr_insns = 0; 257 int ret; 258 259 for_each_sec(file, sec) { 260 261 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 262 continue; 263 264 if (strcmp(sec->name, ".altinstr_replacement") && 265 strcmp(sec->name, ".altinstr_aux") && 266 strncmp(sec->name, ".discard.", 9)) 267 sec->text = true; 268 269 if (!strcmp(sec->name, ".noinstr.text") || 270 !strcmp(sec->name, ".entry.text")) 271 sec->noinstr = true; 272 273 for (offset = 0; offset < sec->len; offset += insn->len) { 274 insn = malloc(sizeof(*insn)); 275 if (!insn) { 276 WARN("malloc failed"); 277 return -1; 278 } 279 memset(insn, 0, sizeof(*insn)); 280 INIT_LIST_HEAD(&insn->alts); 281 INIT_LIST_HEAD(&insn->stack_ops); 282 init_cfi_state(&insn->cfi); 283 284 insn->sec = sec; 285 insn->offset = offset; 286 287 ret = arch_decode_instruction(file->elf, sec, offset, 288 sec->len - offset, 289 &insn->len, &insn->type, 290 &insn->immediate, 291 &insn->stack_ops); 292 if (ret) 293 goto err; 294 295 hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset)); 296 list_add_tail(&insn->list, &file->insn_list); 297 nr_insns++; 298 } 299 300 list_for_each_entry(func, &sec->symbol_list, list) { 301 if (func->type != STT_FUNC || func->alias != func) 302 continue; 303 304 if (!find_insn(file, sec, func->offset)) { 305 WARN("%s(): can't find starting instruction", 306 func->name); 307 return -1; 308 } 309 310 sym_for_each_insn(file, func, insn) 311 insn->func = func; 312 } 313 } 314 315 if (stats) 316 printf("nr_insns: %lu\n", nr_insns); 317 318 return 0; 319 320 err: 321 free(insn); 322 return ret; 323 } 324 325 /* 326 * Mark "ud2" instructions and manually annotated dead ends. 327 */ 328 static int add_dead_ends(struct objtool_file *file) 329 { 330 struct section *sec; 331 struct rela *rela; 332 struct instruction *insn; 333 bool found; 334 335 /* 336 * By default, "ud2" is a dead end unless otherwise annotated, because 337 * GCC 7 inserts it for certain divide-by-zero cases. 338 */ 339 for_each_insn(file, insn) 340 if (insn->type == INSN_BUG) 341 insn->dead_end = true; 342 343 /* 344 * Check for manually annotated dead ends. 345 */ 346 sec = find_section_by_name(file->elf, ".rela.discard.unreachable"); 347 if (!sec) 348 goto reachable; 349 350 list_for_each_entry(rela, &sec->rela_list, list) { 351 if (rela->sym->type != STT_SECTION) { 352 WARN("unexpected relocation symbol type in %s", sec->name); 353 return -1; 354 } 355 insn = find_insn(file, rela->sym->sec, rela->addend); 356 if (insn) 357 insn = list_prev_entry(insn, list); 358 else if (rela->addend == rela->sym->sec->len) { 359 found = false; 360 list_for_each_entry_reverse(insn, &file->insn_list, list) { 361 if (insn->sec == rela->sym->sec) { 362 found = true; 363 break; 364 } 365 } 366 367 if (!found) { 368 WARN("can't find unreachable insn at %s+0x%x", 369 rela->sym->sec->name, rela->addend); 370 return -1; 371 } 372 } else { 373 WARN("can't find unreachable insn at %s+0x%x", 374 rela->sym->sec->name, rela->addend); 375 return -1; 376 } 377 378 insn->dead_end = true; 379 } 380 381 reachable: 382 /* 383 * These manually annotated reachable checks are needed for GCC 4.4, 384 * where the Linux unreachable() macro isn't supported. In that case 385 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's 386 * not a dead end. 387 */ 388 sec = find_section_by_name(file->elf, ".rela.discard.reachable"); 389 if (!sec) 390 return 0; 391 392 list_for_each_entry(rela, &sec->rela_list, list) { 393 if (rela->sym->type != STT_SECTION) { 394 WARN("unexpected relocation symbol type in %s", sec->name); 395 return -1; 396 } 397 insn = find_insn(file, rela->sym->sec, rela->addend); 398 if (insn) 399 insn = list_prev_entry(insn, list); 400 else if (rela->addend == rela->sym->sec->len) { 401 found = false; 402 list_for_each_entry_reverse(insn, &file->insn_list, list) { 403 if (insn->sec == rela->sym->sec) { 404 found = true; 405 break; 406 } 407 } 408 409 if (!found) { 410 WARN("can't find reachable insn at %s+0x%x", 411 rela->sym->sec->name, rela->addend); 412 return -1; 413 } 414 } else { 415 WARN("can't find reachable insn at %s+0x%x", 416 rela->sym->sec->name, rela->addend); 417 return -1; 418 } 419 420 insn->dead_end = false; 421 } 422 423 return 0; 424 } 425 426 /* 427 * Warnings shouldn't be reported for ignored functions. 428 */ 429 static void add_ignores(struct objtool_file *file) 430 { 431 struct instruction *insn; 432 struct section *sec; 433 struct symbol *func; 434 struct rela *rela; 435 436 sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard"); 437 if (!sec) 438 return; 439 440 list_for_each_entry(rela, &sec->rela_list, list) { 441 switch (rela->sym->type) { 442 case STT_FUNC: 443 func = rela->sym; 444 break; 445 446 case STT_SECTION: 447 func = find_func_by_offset(rela->sym->sec, rela->addend); 448 if (!func) 449 continue; 450 break; 451 452 default: 453 WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type); 454 continue; 455 } 456 457 func_for_each_insn(file, func, insn) 458 insn->ignore = true; 459 } 460 } 461 462 /* 463 * This is a whitelist of functions that is allowed to be called with AC set. 464 * The list is meant to be minimal and only contains compiler instrumentation 465 * ABI and a few functions used to implement *_{to,from}_user() functions. 466 * 467 * These functions must not directly change AC, but may PUSHF/POPF. 468 */ 469 static const char *uaccess_safe_builtin[] = { 470 /* KASAN */ 471 "kasan_report", 472 "check_memory_region", 473 /* KASAN out-of-line */ 474 "__asan_loadN_noabort", 475 "__asan_load1_noabort", 476 "__asan_load2_noabort", 477 "__asan_load4_noabort", 478 "__asan_load8_noabort", 479 "__asan_load16_noabort", 480 "__asan_storeN_noabort", 481 "__asan_store1_noabort", 482 "__asan_store2_noabort", 483 "__asan_store4_noabort", 484 "__asan_store8_noabort", 485 "__asan_store16_noabort", 486 /* KASAN in-line */ 487 "__asan_report_load_n_noabort", 488 "__asan_report_load1_noabort", 489 "__asan_report_load2_noabort", 490 "__asan_report_load4_noabort", 491 "__asan_report_load8_noabort", 492 "__asan_report_load16_noabort", 493 "__asan_report_store_n_noabort", 494 "__asan_report_store1_noabort", 495 "__asan_report_store2_noabort", 496 "__asan_report_store4_noabort", 497 "__asan_report_store8_noabort", 498 "__asan_report_store16_noabort", 499 /* KCOV */ 500 "write_comp_data", 501 "__sanitizer_cov_trace_pc", 502 "__sanitizer_cov_trace_const_cmp1", 503 "__sanitizer_cov_trace_const_cmp2", 504 "__sanitizer_cov_trace_const_cmp4", 505 "__sanitizer_cov_trace_const_cmp8", 506 "__sanitizer_cov_trace_cmp1", 507 "__sanitizer_cov_trace_cmp2", 508 "__sanitizer_cov_trace_cmp4", 509 "__sanitizer_cov_trace_cmp8", 510 "__sanitizer_cov_trace_switch", 511 /* UBSAN */ 512 "ubsan_type_mismatch_common", 513 "__ubsan_handle_type_mismatch", 514 "__ubsan_handle_type_mismatch_v1", 515 "__ubsan_handle_shift_out_of_bounds", 516 /* misc */ 517 "csum_partial_copy_generic", 518 "__memcpy_mcsafe", 519 "mcsafe_handle_tail", 520 "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */ 521 NULL 522 }; 523 524 static void add_uaccess_safe(struct objtool_file *file) 525 { 526 struct symbol *func; 527 const char **name; 528 529 if (!uaccess) 530 return; 531 532 for (name = uaccess_safe_builtin; *name; name++) { 533 func = find_symbol_by_name(file->elf, *name); 534 if (!func) 535 continue; 536 537 func->uaccess_safe = true; 538 } 539 } 540 541 /* 542 * FIXME: For now, just ignore any alternatives which add retpolines. This is 543 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline. 544 * But it at least allows objtool to understand the control flow *around* the 545 * retpoline. 546 */ 547 static int add_ignore_alternatives(struct objtool_file *file) 548 { 549 struct section *sec; 550 struct rela *rela; 551 struct instruction *insn; 552 553 sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts"); 554 if (!sec) 555 return 0; 556 557 list_for_each_entry(rela, &sec->rela_list, list) { 558 if (rela->sym->type != STT_SECTION) { 559 WARN("unexpected relocation symbol type in %s", sec->name); 560 return -1; 561 } 562 563 insn = find_insn(file, rela->sym->sec, rela->addend); 564 if (!insn) { 565 WARN("bad .discard.ignore_alts entry"); 566 return -1; 567 } 568 569 insn->ignore_alts = true; 570 } 571 572 return 0; 573 } 574 575 /* 576 * Find the destination instructions for all jumps. 577 */ 578 static int add_jump_destinations(struct objtool_file *file) 579 { 580 struct instruction *insn; 581 struct rela *rela; 582 struct section *dest_sec; 583 unsigned long dest_off; 584 585 for_each_insn(file, insn) { 586 if (!is_static_jump(insn)) 587 continue; 588 589 if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET) 590 continue; 591 592 rela = find_rela_by_dest_range(file->elf, insn->sec, 593 insn->offset, insn->len); 594 if (!rela) { 595 dest_sec = insn->sec; 596 dest_off = arch_jump_destination(insn); 597 } else if (rela->sym->type == STT_SECTION) { 598 dest_sec = rela->sym->sec; 599 dest_off = arch_dest_rela_offset(rela->addend); 600 } else if (rela->sym->sec->idx) { 601 dest_sec = rela->sym->sec; 602 dest_off = rela->sym->sym.st_value + 603 arch_dest_rela_offset(rela->addend); 604 } else if (strstr(rela->sym->name, "_indirect_thunk_")) { 605 /* 606 * Retpoline jumps are really dynamic jumps in 607 * disguise, so convert them accordingly. 608 */ 609 if (insn->type == INSN_JUMP_UNCONDITIONAL) 610 insn->type = INSN_JUMP_DYNAMIC; 611 else 612 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL; 613 614 insn->retpoline_safe = true; 615 continue; 616 } else { 617 /* external sibling call */ 618 insn->call_dest = rela->sym; 619 continue; 620 } 621 622 insn->jump_dest = find_insn(file, dest_sec, dest_off); 623 if (!insn->jump_dest) { 624 625 /* 626 * This is a special case where an alt instruction 627 * jumps past the end of the section. These are 628 * handled later in handle_group_alt(). 629 */ 630 if (!strcmp(insn->sec->name, ".altinstr_replacement")) 631 continue; 632 633 WARN_FUNC("can't find jump dest instruction at %s+0x%lx", 634 insn->sec, insn->offset, dest_sec->name, 635 dest_off); 636 return -1; 637 } 638 639 /* 640 * Cross-function jump. 641 */ 642 if (insn->func && insn->jump_dest->func && 643 insn->func != insn->jump_dest->func) { 644 645 /* 646 * For GCC 8+, create parent/child links for any cold 647 * subfunctions. This is _mostly_ redundant with a 648 * similar initialization in read_symbols(). 649 * 650 * If a function has aliases, we want the *first* such 651 * function in the symbol table to be the subfunction's 652 * parent. In that case we overwrite the 653 * initialization done in read_symbols(). 654 * 655 * However this code can't completely replace the 656 * read_symbols() code because this doesn't detect the 657 * case where the parent function's only reference to a 658 * subfunction is through a jump table. 659 */ 660 if (!strstr(insn->func->name, ".cold.") && 661 strstr(insn->jump_dest->func->name, ".cold.")) { 662 insn->func->cfunc = insn->jump_dest->func; 663 insn->jump_dest->func->pfunc = insn->func; 664 665 } else if (insn->jump_dest->func->pfunc != insn->func->pfunc && 666 insn->jump_dest->offset == insn->jump_dest->func->offset) { 667 668 /* internal sibling call */ 669 insn->call_dest = insn->jump_dest->func; 670 } 671 } 672 } 673 674 return 0; 675 } 676 677 static void remove_insn_ops(struct instruction *insn) 678 { 679 struct stack_op *op, *tmp; 680 681 list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) { 682 list_del(&op->list); 683 free(op); 684 } 685 } 686 687 /* 688 * Find the destination instructions for all calls. 689 */ 690 static int add_call_destinations(struct objtool_file *file) 691 { 692 struct instruction *insn; 693 unsigned long dest_off; 694 struct rela *rela; 695 696 for_each_insn(file, insn) { 697 if (insn->type != INSN_CALL) 698 continue; 699 700 rela = find_rela_by_dest_range(file->elf, insn->sec, 701 insn->offset, insn->len); 702 if (!rela) { 703 dest_off = arch_jump_destination(insn); 704 insn->call_dest = find_func_by_offset(insn->sec, dest_off); 705 if (!insn->call_dest) 706 insn->call_dest = find_symbol_by_offset(insn->sec, dest_off); 707 708 if (insn->ignore) 709 continue; 710 711 if (!insn->call_dest) { 712 WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset); 713 return -1; 714 } 715 716 if (insn->func && insn->call_dest->type != STT_FUNC) { 717 WARN_FUNC("unsupported call to non-function", 718 insn->sec, insn->offset); 719 return -1; 720 } 721 722 } else if (rela->sym->type == STT_SECTION) { 723 dest_off = arch_dest_rela_offset(rela->addend); 724 insn->call_dest = find_func_by_offset(rela->sym->sec, 725 dest_off); 726 if (!insn->call_dest) { 727 WARN_FUNC("can't find call dest symbol at %s+0x%lx", 728 insn->sec, insn->offset, 729 rela->sym->sec->name, 730 dest_off); 731 return -1; 732 } 733 } else 734 insn->call_dest = rela->sym; 735 736 /* 737 * Whatever stack impact regular CALLs have, should be undone 738 * by the RETURN of the called function. 739 * 740 * Annotated intra-function calls retain the stack_ops but 741 * are converted to JUMP, see read_intra_function_calls(). 742 */ 743 remove_insn_ops(insn); 744 } 745 746 return 0; 747 } 748 749 /* 750 * The .alternatives section requires some extra special care, over and above 751 * what other special sections require: 752 * 753 * 1. Because alternatives are patched in-place, we need to insert a fake jump 754 * instruction at the end so that validate_branch() skips all the original 755 * replaced instructions when validating the new instruction path. 756 * 757 * 2. An added wrinkle is that the new instruction length might be zero. In 758 * that case the old instructions are replaced with noops. We simulate that 759 * by creating a fake jump as the only new instruction. 760 * 761 * 3. In some cases, the alternative section includes an instruction which 762 * conditionally jumps to the _end_ of the entry. We have to modify these 763 * jumps' destinations to point back to .text rather than the end of the 764 * entry in .altinstr_replacement. 765 */ 766 static int handle_group_alt(struct objtool_file *file, 767 struct special_alt *special_alt, 768 struct instruction *orig_insn, 769 struct instruction **new_insn) 770 { 771 static unsigned int alt_group_next_index = 1; 772 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL; 773 unsigned int alt_group = alt_group_next_index++; 774 unsigned long dest_off; 775 776 last_orig_insn = NULL; 777 insn = orig_insn; 778 sec_for_each_insn_from(file, insn) { 779 if (insn->offset >= special_alt->orig_off + special_alt->orig_len) 780 break; 781 782 insn->alt_group = alt_group; 783 last_orig_insn = insn; 784 } 785 786 if (next_insn_same_sec(file, last_orig_insn)) { 787 fake_jump = malloc(sizeof(*fake_jump)); 788 if (!fake_jump) { 789 WARN("malloc failed"); 790 return -1; 791 } 792 memset(fake_jump, 0, sizeof(*fake_jump)); 793 INIT_LIST_HEAD(&fake_jump->alts); 794 INIT_LIST_HEAD(&fake_jump->stack_ops); 795 init_cfi_state(&fake_jump->cfi); 796 797 fake_jump->sec = special_alt->new_sec; 798 fake_jump->offset = FAKE_JUMP_OFFSET; 799 fake_jump->type = INSN_JUMP_UNCONDITIONAL; 800 fake_jump->jump_dest = list_next_entry(last_orig_insn, list); 801 fake_jump->func = orig_insn->func; 802 } 803 804 if (!special_alt->new_len) { 805 if (!fake_jump) { 806 WARN("%s: empty alternative at end of section", 807 special_alt->orig_sec->name); 808 return -1; 809 } 810 811 *new_insn = fake_jump; 812 return 0; 813 } 814 815 last_new_insn = NULL; 816 alt_group = alt_group_next_index++; 817 insn = *new_insn; 818 sec_for_each_insn_from(file, insn) { 819 if (insn->offset >= special_alt->new_off + special_alt->new_len) 820 break; 821 822 last_new_insn = insn; 823 824 insn->ignore = orig_insn->ignore_alts; 825 insn->func = orig_insn->func; 826 insn->alt_group = alt_group; 827 828 /* 829 * Since alternative replacement code is copy/pasted by the 830 * kernel after applying relocations, generally such code can't 831 * have relative-address relocation references to outside the 832 * .altinstr_replacement section, unless the arch's 833 * alternatives code can adjust the relative offsets 834 * accordingly. 835 * 836 * The x86 alternatives code adjusts the offsets only when it 837 * encounters a branch instruction at the very beginning of the 838 * replacement group. 839 */ 840 if ((insn->offset != special_alt->new_off || 841 (insn->type != INSN_CALL && !is_static_jump(insn))) && 842 find_rela_by_dest_range(file->elf, insn->sec, insn->offset, insn->len)) { 843 844 WARN_FUNC("unsupported relocation in alternatives section", 845 insn->sec, insn->offset); 846 return -1; 847 } 848 849 if (!is_static_jump(insn)) 850 continue; 851 852 if (!insn->immediate) 853 continue; 854 855 dest_off = arch_jump_destination(insn); 856 if (dest_off == special_alt->new_off + special_alt->new_len) { 857 if (!fake_jump) { 858 WARN("%s: alternative jump to end of section", 859 special_alt->orig_sec->name); 860 return -1; 861 } 862 insn->jump_dest = fake_jump; 863 } 864 865 if (!insn->jump_dest) { 866 WARN_FUNC("can't find alternative jump destination", 867 insn->sec, insn->offset); 868 return -1; 869 } 870 } 871 872 if (!last_new_insn) { 873 WARN_FUNC("can't find last new alternative instruction", 874 special_alt->new_sec, special_alt->new_off); 875 return -1; 876 } 877 878 if (fake_jump) 879 list_add(&fake_jump->list, &last_new_insn->list); 880 881 return 0; 882 } 883 884 /* 885 * A jump table entry can either convert a nop to a jump or a jump to a nop. 886 * If the original instruction is a jump, make the alt entry an effective nop 887 * by just skipping the original instruction. 888 */ 889 static int handle_jump_alt(struct objtool_file *file, 890 struct special_alt *special_alt, 891 struct instruction *orig_insn, 892 struct instruction **new_insn) 893 { 894 if (orig_insn->type == INSN_NOP) 895 return 0; 896 897 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) { 898 WARN_FUNC("unsupported instruction at jump label", 899 orig_insn->sec, orig_insn->offset); 900 return -1; 901 } 902 903 *new_insn = list_next_entry(orig_insn, list); 904 return 0; 905 } 906 907 /* 908 * Read all the special sections which have alternate instructions which can be 909 * patched in or redirected to at runtime. Each instruction having alternate 910 * instruction(s) has them added to its insn->alts list, which will be 911 * traversed in validate_branch(). 912 */ 913 static int add_special_section_alts(struct objtool_file *file) 914 { 915 struct list_head special_alts; 916 struct instruction *orig_insn, *new_insn; 917 struct special_alt *special_alt, *tmp; 918 struct alternative *alt; 919 int ret; 920 921 ret = special_get_alts(file->elf, &special_alts); 922 if (ret) 923 return ret; 924 925 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) { 926 927 orig_insn = find_insn(file, special_alt->orig_sec, 928 special_alt->orig_off); 929 if (!orig_insn) { 930 WARN_FUNC("special: can't find orig instruction", 931 special_alt->orig_sec, special_alt->orig_off); 932 ret = -1; 933 goto out; 934 } 935 936 new_insn = NULL; 937 if (!special_alt->group || special_alt->new_len) { 938 new_insn = find_insn(file, special_alt->new_sec, 939 special_alt->new_off); 940 if (!new_insn) { 941 WARN_FUNC("special: can't find new instruction", 942 special_alt->new_sec, 943 special_alt->new_off); 944 ret = -1; 945 goto out; 946 } 947 } 948 949 if (special_alt->group) { 950 if (!special_alt->orig_len) { 951 WARN_FUNC("empty alternative entry", 952 orig_insn->sec, orig_insn->offset); 953 continue; 954 } 955 956 ret = handle_group_alt(file, special_alt, orig_insn, 957 &new_insn); 958 if (ret) 959 goto out; 960 } else if (special_alt->jump_or_nop) { 961 ret = handle_jump_alt(file, special_alt, orig_insn, 962 &new_insn); 963 if (ret) 964 goto out; 965 } 966 967 alt = malloc(sizeof(*alt)); 968 if (!alt) { 969 WARN("malloc failed"); 970 ret = -1; 971 goto out; 972 } 973 974 alt->insn = new_insn; 975 alt->skip_orig = special_alt->skip_orig; 976 orig_insn->ignore_alts |= special_alt->skip_alt; 977 list_add_tail(&alt->list, &orig_insn->alts); 978 979 list_del(&special_alt->list); 980 free(special_alt); 981 } 982 983 out: 984 return ret; 985 } 986 987 static int add_jump_table(struct objtool_file *file, struct instruction *insn, 988 struct rela *table) 989 { 990 struct rela *rela = table; 991 struct instruction *dest_insn; 992 struct alternative *alt; 993 struct symbol *pfunc = insn->func->pfunc; 994 unsigned int prev_offset = 0; 995 996 /* 997 * Each @rela is a switch table relocation which points to the target 998 * instruction. 999 */ 1000 list_for_each_entry_from(rela, &table->sec->rela_list, list) { 1001 1002 /* Check for the end of the table: */ 1003 if (rela != table && rela->jump_table_start) 1004 break; 1005 1006 /* Make sure the table entries are consecutive: */ 1007 if (prev_offset && rela->offset != prev_offset + 8) 1008 break; 1009 1010 /* Detect function pointers from contiguous objects: */ 1011 if (rela->sym->sec == pfunc->sec && 1012 rela->addend == pfunc->offset) 1013 break; 1014 1015 dest_insn = find_insn(file, rela->sym->sec, rela->addend); 1016 if (!dest_insn) 1017 break; 1018 1019 /* Make sure the destination is in the same function: */ 1020 if (!dest_insn->func || dest_insn->func->pfunc != pfunc) 1021 break; 1022 1023 alt = malloc(sizeof(*alt)); 1024 if (!alt) { 1025 WARN("malloc failed"); 1026 return -1; 1027 } 1028 1029 alt->insn = dest_insn; 1030 list_add_tail(&alt->list, &insn->alts); 1031 prev_offset = rela->offset; 1032 } 1033 1034 if (!prev_offset) { 1035 WARN_FUNC("can't find switch jump table", 1036 insn->sec, insn->offset); 1037 return -1; 1038 } 1039 1040 return 0; 1041 } 1042 1043 /* 1044 * find_jump_table() - Given a dynamic jump, find the switch jump table in 1045 * .rodata associated with it. 1046 * 1047 * There are 3 basic patterns: 1048 * 1049 * 1. jmpq *[rodata addr](,%reg,8) 1050 * 1051 * This is the most common case by far. It jumps to an address in a simple 1052 * jump table which is stored in .rodata. 1053 * 1054 * 2. jmpq *[rodata addr](%rip) 1055 * 1056 * This is caused by a rare GCC quirk, currently only seen in three driver 1057 * functions in the kernel, only with certain obscure non-distro configs. 1058 * 1059 * As part of an optimization, GCC makes a copy of an existing switch jump 1060 * table, modifies it, and then hard-codes the jump (albeit with an indirect 1061 * jump) to use a single entry in the table. The rest of the jump table and 1062 * some of its jump targets remain as dead code. 1063 * 1064 * In such a case we can just crudely ignore all unreachable instruction 1065 * warnings for the entire object file. Ideally we would just ignore them 1066 * for the function, but that would require redesigning the code quite a 1067 * bit. And honestly that's just not worth doing: unreachable instruction 1068 * warnings are of questionable value anyway, and this is such a rare issue. 1069 * 1070 * 3. mov [rodata addr],%reg1 1071 * ... some instructions ... 1072 * jmpq *(%reg1,%reg2,8) 1073 * 1074 * This is a fairly uncommon pattern which is new for GCC 6. As of this 1075 * writing, there are 11 occurrences of it in the allmodconfig kernel. 1076 * 1077 * As of GCC 7 there are quite a few more of these and the 'in between' code 1078 * is significant. Esp. with KASAN enabled some of the code between the mov 1079 * and jmpq uses .rodata itself, which can confuse things. 1080 * 1081 * TODO: Once we have DWARF CFI and smarter instruction decoding logic, 1082 * ensure the same register is used in the mov and jump instructions. 1083 * 1084 * NOTE: RETPOLINE made it harder still to decode dynamic jumps. 1085 */ 1086 static struct rela *find_jump_table(struct objtool_file *file, 1087 struct symbol *func, 1088 struct instruction *insn) 1089 { 1090 struct rela *text_rela, *table_rela; 1091 struct instruction *dest_insn, *orig_insn = insn; 1092 struct section *table_sec; 1093 unsigned long table_offset; 1094 1095 /* 1096 * Backward search using the @first_jump_src links, these help avoid 1097 * much of the 'in between' code. Which avoids us getting confused by 1098 * it. 1099 */ 1100 for (; 1101 &insn->list != &file->insn_list && insn->func && insn->func->pfunc == func; 1102 insn = insn->first_jump_src ?: list_prev_entry(insn, list)) { 1103 1104 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC) 1105 break; 1106 1107 /* allow small jumps within the range */ 1108 if (insn->type == INSN_JUMP_UNCONDITIONAL && 1109 insn->jump_dest && 1110 (insn->jump_dest->offset <= insn->offset || 1111 insn->jump_dest->offset > orig_insn->offset)) 1112 break; 1113 1114 /* look for a relocation which references .rodata */ 1115 text_rela = find_rela_by_dest_range(file->elf, insn->sec, 1116 insn->offset, insn->len); 1117 if (!text_rela || text_rela->sym->type != STT_SECTION || 1118 !text_rela->sym->sec->rodata) 1119 continue; 1120 1121 table_offset = text_rela->addend; 1122 table_sec = text_rela->sym->sec; 1123 1124 if (text_rela->type == R_X86_64_PC32) 1125 table_offset += 4; 1126 1127 /* 1128 * Make sure the .rodata address isn't associated with a 1129 * symbol. GCC jump tables are anonymous data. 1130 * 1131 * Also support C jump tables which are in the same format as 1132 * switch jump tables. For objtool to recognize them, they 1133 * need to be placed in the C_JUMP_TABLE_SECTION section. They 1134 * have symbols associated with them. 1135 */ 1136 if (find_symbol_containing(table_sec, table_offset) && 1137 strcmp(table_sec->name, C_JUMP_TABLE_SECTION)) 1138 continue; 1139 1140 /* 1141 * Each table entry has a rela associated with it. The rela 1142 * should reference text in the same function as the original 1143 * instruction. 1144 */ 1145 table_rela = find_rela_by_dest(file->elf, table_sec, table_offset); 1146 if (!table_rela) 1147 continue; 1148 dest_insn = find_insn(file, table_rela->sym->sec, table_rela->addend); 1149 if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func) 1150 continue; 1151 1152 /* 1153 * Use of RIP-relative switch jumps is quite rare, and 1154 * indicates a rare GCC quirk/bug which can leave dead code 1155 * behind. 1156 */ 1157 if (text_rela->type == R_X86_64_PC32) 1158 file->ignore_unreachables = true; 1159 1160 return table_rela; 1161 } 1162 1163 return NULL; 1164 } 1165 1166 /* 1167 * First pass: Mark the head of each jump table so that in the next pass, 1168 * we know when a given jump table ends and the next one starts. 1169 */ 1170 static void mark_func_jump_tables(struct objtool_file *file, 1171 struct symbol *func) 1172 { 1173 struct instruction *insn, *last = NULL; 1174 struct rela *rela; 1175 1176 func_for_each_insn(file, func, insn) { 1177 if (!last) 1178 last = insn; 1179 1180 /* 1181 * Store back-pointers for unconditional forward jumps such 1182 * that find_jump_table() can back-track using those and 1183 * avoid some potentially confusing code. 1184 */ 1185 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest && 1186 insn->offset > last->offset && 1187 insn->jump_dest->offset > insn->offset && 1188 !insn->jump_dest->first_jump_src) { 1189 1190 insn->jump_dest->first_jump_src = insn; 1191 last = insn->jump_dest; 1192 } 1193 1194 if (insn->type != INSN_JUMP_DYNAMIC) 1195 continue; 1196 1197 rela = find_jump_table(file, func, insn); 1198 if (rela) { 1199 rela->jump_table_start = true; 1200 insn->jump_table = rela; 1201 } 1202 } 1203 } 1204 1205 static int add_func_jump_tables(struct objtool_file *file, 1206 struct symbol *func) 1207 { 1208 struct instruction *insn; 1209 int ret; 1210 1211 func_for_each_insn(file, func, insn) { 1212 if (!insn->jump_table) 1213 continue; 1214 1215 ret = add_jump_table(file, insn, insn->jump_table); 1216 if (ret) 1217 return ret; 1218 } 1219 1220 return 0; 1221 } 1222 1223 /* 1224 * For some switch statements, gcc generates a jump table in the .rodata 1225 * section which contains a list of addresses within the function to jump to. 1226 * This finds these jump tables and adds them to the insn->alts lists. 1227 */ 1228 static int add_jump_table_alts(struct objtool_file *file) 1229 { 1230 struct section *sec; 1231 struct symbol *func; 1232 int ret; 1233 1234 if (!file->rodata) 1235 return 0; 1236 1237 for_each_sec(file, sec) { 1238 list_for_each_entry(func, &sec->symbol_list, list) { 1239 if (func->type != STT_FUNC) 1240 continue; 1241 1242 mark_func_jump_tables(file, func); 1243 ret = add_func_jump_tables(file, func); 1244 if (ret) 1245 return ret; 1246 } 1247 } 1248 1249 return 0; 1250 } 1251 1252 static int read_unwind_hints(struct objtool_file *file) 1253 { 1254 struct section *sec, *relasec; 1255 struct rela *rela; 1256 struct unwind_hint *hint; 1257 struct instruction *insn; 1258 struct cfi_reg *cfa; 1259 int i; 1260 1261 sec = find_section_by_name(file->elf, ".discard.unwind_hints"); 1262 if (!sec) 1263 return 0; 1264 1265 relasec = sec->rela; 1266 if (!relasec) { 1267 WARN("missing .rela.discard.unwind_hints section"); 1268 return -1; 1269 } 1270 1271 if (sec->len % sizeof(struct unwind_hint)) { 1272 WARN("struct unwind_hint size mismatch"); 1273 return -1; 1274 } 1275 1276 file->hints = true; 1277 1278 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) { 1279 hint = (struct unwind_hint *)sec->data->d_buf + i; 1280 1281 rela = find_rela_by_dest(file->elf, sec, i * sizeof(*hint)); 1282 if (!rela) { 1283 WARN("can't find rela for unwind_hints[%d]", i); 1284 return -1; 1285 } 1286 1287 insn = find_insn(file, rela->sym->sec, rela->addend); 1288 if (!insn) { 1289 WARN("can't find insn for unwind_hints[%d]", i); 1290 return -1; 1291 } 1292 1293 cfa = &insn->cfi.cfa; 1294 1295 if (hint->type == UNWIND_HINT_TYPE_RET_OFFSET) { 1296 insn->ret_offset = hint->sp_offset; 1297 continue; 1298 } 1299 1300 insn->hint = true; 1301 1302 switch (hint->sp_reg) { 1303 case ORC_REG_UNDEFINED: 1304 cfa->base = CFI_UNDEFINED; 1305 break; 1306 case ORC_REG_SP: 1307 cfa->base = CFI_SP; 1308 break; 1309 case ORC_REG_BP: 1310 cfa->base = CFI_BP; 1311 break; 1312 case ORC_REG_SP_INDIRECT: 1313 cfa->base = CFI_SP_INDIRECT; 1314 break; 1315 case ORC_REG_R10: 1316 cfa->base = CFI_R10; 1317 break; 1318 case ORC_REG_R13: 1319 cfa->base = CFI_R13; 1320 break; 1321 case ORC_REG_DI: 1322 cfa->base = CFI_DI; 1323 break; 1324 case ORC_REG_DX: 1325 cfa->base = CFI_DX; 1326 break; 1327 default: 1328 WARN_FUNC("unsupported unwind_hint sp base reg %d", 1329 insn->sec, insn->offset, hint->sp_reg); 1330 return -1; 1331 } 1332 1333 cfa->offset = hint->sp_offset; 1334 insn->cfi.type = hint->type; 1335 insn->cfi.end = hint->end; 1336 } 1337 1338 return 0; 1339 } 1340 1341 static int read_retpoline_hints(struct objtool_file *file) 1342 { 1343 struct section *sec; 1344 struct instruction *insn; 1345 struct rela *rela; 1346 1347 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe"); 1348 if (!sec) 1349 return 0; 1350 1351 list_for_each_entry(rela, &sec->rela_list, list) { 1352 if (rela->sym->type != STT_SECTION) { 1353 WARN("unexpected relocation symbol type in %s", sec->name); 1354 return -1; 1355 } 1356 1357 insn = find_insn(file, rela->sym->sec, rela->addend); 1358 if (!insn) { 1359 WARN("bad .discard.retpoline_safe entry"); 1360 return -1; 1361 } 1362 1363 if (insn->type != INSN_JUMP_DYNAMIC && 1364 insn->type != INSN_CALL_DYNAMIC) { 1365 WARN_FUNC("retpoline_safe hint not an indirect jump/call", 1366 insn->sec, insn->offset); 1367 return -1; 1368 } 1369 1370 insn->retpoline_safe = true; 1371 } 1372 1373 return 0; 1374 } 1375 1376 static int read_instr_hints(struct objtool_file *file) 1377 { 1378 struct section *sec; 1379 struct instruction *insn; 1380 struct rela *rela; 1381 1382 sec = find_section_by_name(file->elf, ".rela.discard.instr_end"); 1383 if (!sec) 1384 return 0; 1385 1386 list_for_each_entry(rela, &sec->rela_list, list) { 1387 if (rela->sym->type != STT_SECTION) { 1388 WARN("unexpected relocation symbol type in %s", sec->name); 1389 return -1; 1390 } 1391 1392 insn = find_insn(file, rela->sym->sec, rela->addend); 1393 if (!insn) { 1394 WARN("bad .discard.instr_end entry"); 1395 return -1; 1396 } 1397 1398 insn->instr--; 1399 } 1400 1401 sec = find_section_by_name(file->elf, ".rela.discard.instr_begin"); 1402 if (!sec) 1403 return 0; 1404 1405 list_for_each_entry(rela, &sec->rela_list, list) { 1406 if (rela->sym->type != STT_SECTION) { 1407 WARN("unexpected relocation symbol type in %s", sec->name); 1408 return -1; 1409 } 1410 1411 insn = find_insn(file, rela->sym->sec, rela->addend); 1412 if (!insn) { 1413 WARN("bad .discard.instr_begin entry"); 1414 return -1; 1415 } 1416 1417 insn->instr++; 1418 } 1419 1420 return 0; 1421 } 1422 1423 static int read_intra_function_calls(struct objtool_file *file) 1424 { 1425 struct instruction *insn; 1426 struct section *sec; 1427 struct rela *rela; 1428 1429 sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls"); 1430 if (!sec) 1431 return 0; 1432 1433 list_for_each_entry(rela, &sec->rela_list, list) { 1434 unsigned long dest_off; 1435 1436 if (rela->sym->type != STT_SECTION) { 1437 WARN("unexpected relocation symbol type in %s", 1438 sec->name); 1439 return -1; 1440 } 1441 1442 insn = find_insn(file, rela->sym->sec, rela->addend); 1443 if (!insn) { 1444 WARN("bad .discard.intra_function_call entry"); 1445 return -1; 1446 } 1447 1448 if (insn->type != INSN_CALL) { 1449 WARN_FUNC("intra_function_call not a direct call", 1450 insn->sec, insn->offset); 1451 return -1; 1452 } 1453 1454 /* 1455 * Treat intra-function CALLs as JMPs, but with a stack_op. 1456 * See add_call_destinations(), which strips stack_ops from 1457 * normal CALLs. 1458 */ 1459 insn->type = INSN_JUMP_UNCONDITIONAL; 1460 1461 dest_off = insn->offset + insn->len + insn->immediate; 1462 insn->jump_dest = find_insn(file, insn->sec, dest_off); 1463 if (!insn->jump_dest) { 1464 WARN_FUNC("can't find call dest at %s+0x%lx", 1465 insn->sec, insn->offset, 1466 insn->sec->name, dest_off); 1467 return -1; 1468 } 1469 } 1470 1471 return 0; 1472 } 1473 1474 static void mark_rodata(struct objtool_file *file) 1475 { 1476 struct section *sec; 1477 bool found = false; 1478 1479 /* 1480 * Search for the following rodata sections, each of which can 1481 * potentially contain jump tables: 1482 * 1483 * - .rodata: can contain GCC switch tables 1484 * - .rodata.<func>: same, if -fdata-sections is being used 1485 * - .rodata..c_jump_table: contains C annotated jump tables 1486 * 1487 * .rodata.str1.* sections are ignored; they don't contain jump tables. 1488 */ 1489 for_each_sec(file, sec) { 1490 if (!strncmp(sec->name, ".rodata", 7) && 1491 !strstr(sec->name, ".str1.")) { 1492 sec->rodata = true; 1493 found = true; 1494 } 1495 } 1496 1497 file->rodata = found; 1498 } 1499 1500 static int decode_sections(struct objtool_file *file) 1501 { 1502 int ret; 1503 1504 mark_rodata(file); 1505 1506 ret = decode_instructions(file); 1507 if (ret) 1508 return ret; 1509 1510 ret = add_dead_ends(file); 1511 if (ret) 1512 return ret; 1513 1514 add_ignores(file); 1515 add_uaccess_safe(file); 1516 1517 ret = add_ignore_alternatives(file); 1518 if (ret) 1519 return ret; 1520 1521 ret = add_jump_destinations(file); 1522 if (ret) 1523 return ret; 1524 1525 ret = add_special_section_alts(file); 1526 if (ret) 1527 return ret; 1528 1529 ret = read_intra_function_calls(file); 1530 if (ret) 1531 return ret; 1532 1533 ret = add_call_destinations(file); 1534 if (ret) 1535 return ret; 1536 1537 ret = add_jump_table_alts(file); 1538 if (ret) 1539 return ret; 1540 1541 ret = read_unwind_hints(file); 1542 if (ret) 1543 return ret; 1544 1545 ret = read_retpoline_hints(file); 1546 if (ret) 1547 return ret; 1548 1549 ret = read_instr_hints(file); 1550 if (ret) 1551 return ret; 1552 1553 return 0; 1554 } 1555 1556 static bool is_fentry_call(struct instruction *insn) 1557 { 1558 if (insn->type == INSN_CALL && insn->call_dest && 1559 insn->call_dest->type == STT_NOTYPE && 1560 !strcmp(insn->call_dest->name, "__fentry__")) 1561 return true; 1562 1563 return false; 1564 } 1565 1566 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state) 1567 { 1568 u8 ret_offset = insn->ret_offset; 1569 struct cfi_state *cfi = &state->cfi; 1570 int i; 1571 1572 if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap) 1573 return true; 1574 1575 if (cfi->cfa.offset != initial_func_cfi.cfa.offset + ret_offset) 1576 return true; 1577 1578 if (cfi->stack_size != initial_func_cfi.cfa.offset + ret_offset) 1579 return true; 1580 1581 /* 1582 * If there is a ret offset hint then don't check registers 1583 * because a callee-saved register might have been pushed on 1584 * the stack. 1585 */ 1586 if (ret_offset) 1587 return false; 1588 1589 for (i = 0; i < CFI_NUM_REGS; i++) { 1590 if (cfi->regs[i].base != initial_func_cfi.regs[i].base || 1591 cfi->regs[i].offset != initial_func_cfi.regs[i].offset) 1592 return true; 1593 } 1594 1595 return false; 1596 } 1597 1598 static bool has_valid_stack_frame(struct insn_state *state) 1599 { 1600 struct cfi_state *cfi = &state->cfi; 1601 1602 if (cfi->cfa.base == CFI_BP && cfi->regs[CFI_BP].base == CFI_CFA && 1603 cfi->regs[CFI_BP].offset == -16) 1604 return true; 1605 1606 if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP) 1607 return true; 1608 1609 return false; 1610 } 1611 1612 static int update_cfi_state_regs(struct instruction *insn, 1613 struct cfi_state *cfi, 1614 struct stack_op *op) 1615 { 1616 struct cfi_reg *cfa = &cfi->cfa; 1617 1618 if (cfa->base != CFI_SP) 1619 return 0; 1620 1621 /* push */ 1622 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF) 1623 cfa->offset += 8; 1624 1625 /* pop */ 1626 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF) 1627 cfa->offset -= 8; 1628 1629 /* add immediate to sp */ 1630 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD && 1631 op->dest.reg == CFI_SP && op->src.reg == CFI_SP) 1632 cfa->offset -= op->src.offset; 1633 1634 return 0; 1635 } 1636 1637 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset) 1638 { 1639 if (arch_callee_saved_reg(reg) && 1640 cfi->regs[reg].base == CFI_UNDEFINED) { 1641 cfi->regs[reg].base = base; 1642 cfi->regs[reg].offset = offset; 1643 } 1644 } 1645 1646 static void restore_reg(struct cfi_state *cfi, unsigned char reg) 1647 { 1648 cfi->regs[reg].base = initial_func_cfi.regs[reg].base; 1649 cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset; 1650 } 1651 1652 /* 1653 * A note about DRAP stack alignment: 1654 * 1655 * GCC has the concept of a DRAP register, which is used to help keep track of 1656 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP 1657 * register. The typical DRAP pattern is: 1658 * 1659 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10 1660 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp 1661 * 41 ff 72 f8 pushq -0x8(%r10) 1662 * 55 push %rbp 1663 * 48 89 e5 mov %rsp,%rbp 1664 * (more pushes) 1665 * 41 52 push %r10 1666 * ... 1667 * 41 5a pop %r10 1668 * (more pops) 1669 * 5d pop %rbp 1670 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1671 * c3 retq 1672 * 1673 * There are some variations in the epilogues, like: 1674 * 1675 * 5b pop %rbx 1676 * 41 5a pop %r10 1677 * 41 5c pop %r12 1678 * 41 5d pop %r13 1679 * 41 5e pop %r14 1680 * c9 leaveq 1681 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1682 * c3 retq 1683 * 1684 * and: 1685 * 1686 * 4c 8b 55 e8 mov -0x18(%rbp),%r10 1687 * 48 8b 5d e0 mov -0x20(%rbp),%rbx 1688 * 4c 8b 65 f0 mov -0x10(%rbp),%r12 1689 * 4c 8b 6d f8 mov -0x8(%rbp),%r13 1690 * c9 leaveq 1691 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1692 * c3 retq 1693 * 1694 * Sometimes r13 is used as the DRAP register, in which case it's saved and 1695 * restored beforehand: 1696 * 1697 * 41 55 push %r13 1698 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13 1699 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp 1700 * ... 1701 * 49 8d 65 f0 lea -0x10(%r13),%rsp 1702 * 41 5d pop %r13 1703 * c3 retq 1704 */ 1705 static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi, 1706 struct stack_op *op) 1707 { 1708 struct cfi_reg *cfa = &cfi->cfa; 1709 struct cfi_reg *regs = cfi->regs; 1710 1711 /* stack operations don't make sense with an undefined CFA */ 1712 if (cfa->base == CFI_UNDEFINED) { 1713 if (insn->func) { 1714 WARN_FUNC("undefined stack state", insn->sec, insn->offset); 1715 return -1; 1716 } 1717 return 0; 1718 } 1719 1720 if (cfi->type == ORC_TYPE_REGS || cfi->type == ORC_TYPE_REGS_IRET) 1721 return update_cfi_state_regs(insn, cfi, op); 1722 1723 switch (op->dest.type) { 1724 1725 case OP_DEST_REG: 1726 switch (op->src.type) { 1727 1728 case OP_SRC_REG: 1729 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP && 1730 cfa->base == CFI_SP && 1731 regs[CFI_BP].base == CFI_CFA && 1732 regs[CFI_BP].offset == -cfa->offset) { 1733 1734 /* mov %rsp, %rbp */ 1735 cfa->base = op->dest.reg; 1736 cfi->bp_scratch = false; 1737 } 1738 1739 else if (op->src.reg == CFI_SP && 1740 op->dest.reg == CFI_BP && cfi->drap) { 1741 1742 /* drap: mov %rsp, %rbp */ 1743 regs[CFI_BP].base = CFI_BP; 1744 regs[CFI_BP].offset = -cfi->stack_size; 1745 cfi->bp_scratch = false; 1746 } 1747 1748 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1749 1750 /* 1751 * mov %rsp, %reg 1752 * 1753 * This is needed for the rare case where GCC 1754 * does: 1755 * 1756 * mov %rsp, %rax 1757 * ... 1758 * mov %rax, %rsp 1759 */ 1760 cfi->vals[op->dest.reg].base = CFI_CFA; 1761 cfi->vals[op->dest.reg].offset = -cfi->stack_size; 1762 } 1763 1764 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP && 1765 cfa->base == CFI_BP) { 1766 1767 /* 1768 * mov %rbp, %rsp 1769 * 1770 * Restore the original stack pointer (Clang). 1771 */ 1772 cfi->stack_size = -cfi->regs[CFI_BP].offset; 1773 } 1774 1775 else if (op->dest.reg == cfa->base) { 1776 1777 /* mov %reg, %rsp */ 1778 if (cfa->base == CFI_SP && 1779 cfi->vals[op->src.reg].base == CFI_CFA) { 1780 1781 /* 1782 * This is needed for the rare case 1783 * where GCC does something dumb like: 1784 * 1785 * lea 0x8(%rsp), %rcx 1786 * ... 1787 * mov %rcx, %rsp 1788 */ 1789 cfa->offset = -cfi->vals[op->src.reg].offset; 1790 cfi->stack_size = cfa->offset; 1791 1792 } else { 1793 cfa->base = CFI_UNDEFINED; 1794 cfa->offset = 0; 1795 } 1796 } 1797 1798 break; 1799 1800 case OP_SRC_ADD: 1801 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) { 1802 1803 /* add imm, %rsp */ 1804 cfi->stack_size -= op->src.offset; 1805 if (cfa->base == CFI_SP) 1806 cfa->offset -= op->src.offset; 1807 break; 1808 } 1809 1810 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) { 1811 1812 /* lea disp(%rbp), %rsp */ 1813 cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset); 1814 break; 1815 } 1816 1817 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1818 1819 /* drap: lea disp(%rsp), %drap */ 1820 cfi->drap_reg = op->dest.reg; 1821 1822 /* 1823 * lea disp(%rsp), %reg 1824 * 1825 * This is needed for the rare case where GCC 1826 * does something dumb like: 1827 * 1828 * lea 0x8(%rsp), %rcx 1829 * ... 1830 * mov %rcx, %rsp 1831 */ 1832 cfi->vals[op->dest.reg].base = CFI_CFA; 1833 cfi->vals[op->dest.reg].offset = \ 1834 -cfi->stack_size + op->src.offset; 1835 1836 break; 1837 } 1838 1839 if (cfi->drap && op->dest.reg == CFI_SP && 1840 op->src.reg == cfi->drap_reg) { 1841 1842 /* drap: lea disp(%drap), %rsp */ 1843 cfa->base = CFI_SP; 1844 cfa->offset = cfi->stack_size = -op->src.offset; 1845 cfi->drap_reg = CFI_UNDEFINED; 1846 cfi->drap = false; 1847 break; 1848 } 1849 1850 if (op->dest.reg == cfi->cfa.base) { 1851 WARN_FUNC("unsupported stack register modification", 1852 insn->sec, insn->offset); 1853 return -1; 1854 } 1855 1856 break; 1857 1858 case OP_SRC_AND: 1859 if (op->dest.reg != CFI_SP || 1860 (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) || 1861 (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) { 1862 WARN_FUNC("unsupported stack pointer realignment", 1863 insn->sec, insn->offset); 1864 return -1; 1865 } 1866 1867 if (cfi->drap_reg != CFI_UNDEFINED) { 1868 /* drap: and imm, %rsp */ 1869 cfa->base = cfi->drap_reg; 1870 cfa->offset = cfi->stack_size = 0; 1871 cfi->drap = true; 1872 } 1873 1874 /* 1875 * Older versions of GCC (4.8ish) realign the stack 1876 * without DRAP, with a frame pointer. 1877 */ 1878 1879 break; 1880 1881 case OP_SRC_POP: 1882 case OP_SRC_POPF: 1883 if (!cfi->drap && op->dest.reg == cfa->base) { 1884 1885 /* pop %rbp */ 1886 cfa->base = CFI_SP; 1887 } 1888 1889 if (cfi->drap && cfa->base == CFI_BP_INDIRECT && 1890 op->dest.reg == cfi->drap_reg && 1891 cfi->drap_offset == -cfi->stack_size) { 1892 1893 /* drap: pop %drap */ 1894 cfa->base = cfi->drap_reg; 1895 cfa->offset = 0; 1896 cfi->drap_offset = -1; 1897 1898 } else if (regs[op->dest.reg].offset == -cfi->stack_size) { 1899 1900 /* pop %reg */ 1901 restore_reg(cfi, op->dest.reg); 1902 } 1903 1904 cfi->stack_size -= 8; 1905 if (cfa->base == CFI_SP) 1906 cfa->offset -= 8; 1907 1908 break; 1909 1910 case OP_SRC_REG_INDIRECT: 1911 if (cfi->drap && op->src.reg == CFI_BP && 1912 op->src.offset == cfi->drap_offset) { 1913 1914 /* drap: mov disp(%rbp), %drap */ 1915 cfa->base = cfi->drap_reg; 1916 cfa->offset = 0; 1917 cfi->drap_offset = -1; 1918 } 1919 1920 if (cfi->drap && op->src.reg == CFI_BP && 1921 op->src.offset == regs[op->dest.reg].offset) { 1922 1923 /* drap: mov disp(%rbp), %reg */ 1924 restore_reg(cfi, op->dest.reg); 1925 1926 } else if (op->src.reg == cfa->base && 1927 op->src.offset == regs[op->dest.reg].offset + cfa->offset) { 1928 1929 /* mov disp(%rbp), %reg */ 1930 /* mov disp(%rsp), %reg */ 1931 restore_reg(cfi, op->dest.reg); 1932 } 1933 1934 break; 1935 1936 default: 1937 WARN_FUNC("unknown stack-related instruction", 1938 insn->sec, insn->offset); 1939 return -1; 1940 } 1941 1942 break; 1943 1944 case OP_DEST_PUSH: 1945 case OP_DEST_PUSHF: 1946 cfi->stack_size += 8; 1947 if (cfa->base == CFI_SP) 1948 cfa->offset += 8; 1949 1950 if (op->src.type != OP_SRC_REG) 1951 break; 1952 1953 if (cfi->drap) { 1954 if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) { 1955 1956 /* drap: push %drap */ 1957 cfa->base = CFI_BP_INDIRECT; 1958 cfa->offset = -cfi->stack_size; 1959 1960 /* save drap so we know when to restore it */ 1961 cfi->drap_offset = -cfi->stack_size; 1962 1963 } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) { 1964 1965 /* drap: push %rbp */ 1966 cfi->stack_size = 0; 1967 1968 } else if (regs[op->src.reg].base == CFI_UNDEFINED) { 1969 1970 /* drap: push %reg */ 1971 save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size); 1972 } 1973 1974 } else { 1975 1976 /* push %reg */ 1977 save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size); 1978 } 1979 1980 /* detect when asm code uses rbp as a scratch register */ 1981 if (!no_fp && insn->func && op->src.reg == CFI_BP && 1982 cfa->base != CFI_BP) 1983 cfi->bp_scratch = true; 1984 break; 1985 1986 case OP_DEST_REG_INDIRECT: 1987 1988 if (cfi->drap) { 1989 if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) { 1990 1991 /* drap: mov %drap, disp(%rbp) */ 1992 cfa->base = CFI_BP_INDIRECT; 1993 cfa->offset = op->dest.offset; 1994 1995 /* save drap offset so we know when to restore it */ 1996 cfi->drap_offset = op->dest.offset; 1997 } 1998 1999 else if (regs[op->src.reg].base == CFI_UNDEFINED) { 2000 2001 /* drap: mov reg, disp(%rbp) */ 2002 save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset); 2003 } 2004 2005 } else if (op->dest.reg == cfa->base) { 2006 2007 /* mov reg, disp(%rbp) */ 2008 /* mov reg, disp(%rsp) */ 2009 save_reg(cfi, op->src.reg, CFI_CFA, 2010 op->dest.offset - cfi->cfa.offset); 2011 } 2012 2013 break; 2014 2015 case OP_DEST_LEAVE: 2016 if ((!cfi->drap && cfa->base != CFI_BP) || 2017 (cfi->drap && cfa->base != cfi->drap_reg)) { 2018 WARN_FUNC("leave instruction with modified stack frame", 2019 insn->sec, insn->offset); 2020 return -1; 2021 } 2022 2023 /* leave (mov %rbp, %rsp; pop %rbp) */ 2024 2025 cfi->stack_size = -cfi->regs[CFI_BP].offset - 8; 2026 restore_reg(cfi, CFI_BP); 2027 2028 if (!cfi->drap) { 2029 cfa->base = CFI_SP; 2030 cfa->offset -= 8; 2031 } 2032 2033 break; 2034 2035 case OP_DEST_MEM: 2036 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) { 2037 WARN_FUNC("unknown stack-related memory operation", 2038 insn->sec, insn->offset); 2039 return -1; 2040 } 2041 2042 /* pop mem */ 2043 cfi->stack_size -= 8; 2044 if (cfa->base == CFI_SP) 2045 cfa->offset -= 8; 2046 2047 break; 2048 2049 default: 2050 WARN_FUNC("unknown stack-related instruction", 2051 insn->sec, insn->offset); 2052 return -1; 2053 } 2054 2055 return 0; 2056 } 2057 2058 static int handle_insn_ops(struct instruction *insn, struct insn_state *state) 2059 { 2060 struct stack_op *op; 2061 2062 list_for_each_entry(op, &insn->stack_ops, list) { 2063 int res; 2064 2065 if (insn->alt_group) { 2066 WARN_FUNC("alternative modifies stack", insn->sec, insn->offset); 2067 return -1; 2068 } 2069 2070 res = update_cfi_state(insn, &state->cfi, op); 2071 if (res) 2072 return res; 2073 2074 if (op->dest.type == OP_DEST_PUSHF) { 2075 if (!state->uaccess_stack) { 2076 state->uaccess_stack = 1; 2077 } else if (state->uaccess_stack >> 31) { 2078 WARN_FUNC("PUSHF stack exhausted", 2079 insn->sec, insn->offset); 2080 return 1; 2081 } 2082 state->uaccess_stack <<= 1; 2083 state->uaccess_stack |= state->uaccess; 2084 } 2085 2086 if (op->src.type == OP_SRC_POPF) { 2087 if (state->uaccess_stack) { 2088 state->uaccess = state->uaccess_stack & 1; 2089 state->uaccess_stack >>= 1; 2090 if (state->uaccess_stack == 1) 2091 state->uaccess_stack = 0; 2092 } 2093 } 2094 } 2095 2096 return 0; 2097 } 2098 2099 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2) 2100 { 2101 struct cfi_state *cfi1 = &insn->cfi; 2102 int i; 2103 2104 if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) { 2105 2106 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d", 2107 insn->sec, insn->offset, 2108 cfi1->cfa.base, cfi1->cfa.offset, 2109 cfi2->cfa.base, cfi2->cfa.offset); 2110 2111 } else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) { 2112 for (i = 0; i < CFI_NUM_REGS; i++) { 2113 if (!memcmp(&cfi1->regs[i], &cfi2->regs[i], 2114 sizeof(struct cfi_reg))) 2115 continue; 2116 2117 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d", 2118 insn->sec, insn->offset, 2119 i, cfi1->regs[i].base, cfi1->regs[i].offset, 2120 i, cfi2->regs[i].base, cfi2->regs[i].offset); 2121 break; 2122 } 2123 2124 } else if (cfi1->type != cfi2->type) { 2125 2126 WARN_FUNC("stack state mismatch: type1=%d type2=%d", 2127 insn->sec, insn->offset, cfi1->type, cfi2->type); 2128 2129 } else if (cfi1->drap != cfi2->drap || 2130 (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) || 2131 (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) { 2132 2133 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)", 2134 insn->sec, insn->offset, 2135 cfi1->drap, cfi1->drap_reg, cfi1->drap_offset, 2136 cfi2->drap, cfi2->drap_reg, cfi2->drap_offset); 2137 2138 } else 2139 return true; 2140 2141 return false; 2142 } 2143 2144 static inline bool func_uaccess_safe(struct symbol *func) 2145 { 2146 if (func) 2147 return func->uaccess_safe; 2148 2149 return false; 2150 } 2151 2152 static inline const char *call_dest_name(struct instruction *insn) 2153 { 2154 if (insn->call_dest) 2155 return insn->call_dest->name; 2156 2157 return "{dynamic}"; 2158 } 2159 2160 static int validate_call(struct instruction *insn, struct insn_state *state) 2161 { 2162 if (state->noinstr && state->instr <= 0 && 2163 (!insn->call_dest || !insn->call_dest->sec->noinstr)) { 2164 WARN_FUNC("call to %s() leaves .noinstr.text section", 2165 insn->sec, insn->offset, call_dest_name(insn)); 2166 return 1; 2167 } 2168 2169 if (state->uaccess && !func_uaccess_safe(insn->call_dest)) { 2170 WARN_FUNC("call to %s() with UACCESS enabled", 2171 insn->sec, insn->offset, call_dest_name(insn)); 2172 return 1; 2173 } 2174 2175 if (state->df) { 2176 WARN_FUNC("call to %s() with DF set", 2177 insn->sec, insn->offset, call_dest_name(insn)); 2178 return 1; 2179 } 2180 2181 return 0; 2182 } 2183 2184 static int validate_sibling_call(struct instruction *insn, struct insn_state *state) 2185 { 2186 if (has_modified_stack_frame(insn, state)) { 2187 WARN_FUNC("sibling call from callable instruction with modified stack frame", 2188 insn->sec, insn->offset); 2189 return 1; 2190 } 2191 2192 return validate_call(insn, state); 2193 } 2194 2195 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state) 2196 { 2197 if (state->noinstr && state->instr > 0) { 2198 WARN_FUNC("return with instrumentation enabled", 2199 insn->sec, insn->offset); 2200 return 1; 2201 } 2202 2203 if (state->uaccess && !func_uaccess_safe(func)) { 2204 WARN_FUNC("return with UACCESS enabled", 2205 insn->sec, insn->offset); 2206 return 1; 2207 } 2208 2209 if (!state->uaccess && func_uaccess_safe(func)) { 2210 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", 2211 insn->sec, insn->offset); 2212 return 1; 2213 } 2214 2215 if (state->df) { 2216 WARN_FUNC("return with DF set", 2217 insn->sec, insn->offset); 2218 return 1; 2219 } 2220 2221 if (func && has_modified_stack_frame(insn, state)) { 2222 WARN_FUNC("return with modified stack frame", 2223 insn->sec, insn->offset); 2224 return 1; 2225 } 2226 2227 if (state->cfi.bp_scratch) { 2228 WARN_FUNC("BP used as a scratch register", 2229 insn->sec, insn->offset); 2230 return 1; 2231 } 2232 2233 return 0; 2234 } 2235 2236 /* 2237 * Alternatives should not contain any ORC entries, this in turn means they 2238 * should not contain any CFI ops, which implies all instructions should have 2239 * the same same CFI state. 2240 * 2241 * It is possible to constuct alternatives that have unreachable holes that go 2242 * unreported (because they're NOPs), such holes would result in CFI_UNDEFINED 2243 * states which then results in ORC entries, which we just said we didn't want. 2244 * 2245 * Avoid them by copying the CFI entry of the first instruction into the whole 2246 * alternative. 2247 */ 2248 static void fill_alternative_cfi(struct objtool_file *file, struct instruction *insn) 2249 { 2250 struct instruction *first_insn = insn; 2251 int alt_group = insn->alt_group; 2252 2253 sec_for_each_insn_continue(file, insn) { 2254 if (insn->alt_group != alt_group) 2255 break; 2256 insn->cfi = first_insn->cfi; 2257 } 2258 } 2259 2260 /* 2261 * Follow the branch starting at the given instruction, and recursively follow 2262 * any other branches (jumps). Meanwhile, track the frame pointer state at 2263 * each instruction and validate all the rules described in 2264 * tools/objtool/Documentation/stack-validation.txt. 2265 */ 2266 static int validate_branch(struct objtool_file *file, struct symbol *func, 2267 struct instruction *insn, struct insn_state state) 2268 { 2269 struct alternative *alt; 2270 struct instruction *next_insn; 2271 struct section *sec; 2272 u8 visited; 2273 int ret; 2274 2275 sec = insn->sec; 2276 2277 while (1) { 2278 next_insn = next_insn_same_sec(file, insn); 2279 2280 if (file->c_file && func && insn->func && func != insn->func->pfunc) { 2281 WARN("%s() falls through to next function %s()", 2282 func->name, insn->func->name); 2283 return 1; 2284 } 2285 2286 if (func && insn->ignore) { 2287 WARN_FUNC("BUG: why am I validating an ignored function?", 2288 sec, insn->offset); 2289 return 1; 2290 } 2291 2292 visited = 1 << state.uaccess; 2293 if (insn->visited) { 2294 if (!insn->hint && !insn_cfi_match(insn, &state.cfi)) 2295 return 1; 2296 2297 if (insn->visited & visited) 2298 return 0; 2299 } 2300 2301 if (state.noinstr) 2302 state.instr += insn->instr; 2303 2304 if (insn->hint) 2305 state.cfi = insn->cfi; 2306 else 2307 insn->cfi = state.cfi; 2308 2309 insn->visited |= visited; 2310 2311 if (!insn->ignore_alts && !list_empty(&insn->alts)) { 2312 bool skip_orig = false; 2313 2314 list_for_each_entry(alt, &insn->alts, list) { 2315 if (alt->skip_orig) 2316 skip_orig = true; 2317 2318 ret = validate_branch(file, func, alt->insn, state); 2319 if (ret) { 2320 if (backtrace) 2321 BT_FUNC("(alt)", insn); 2322 return ret; 2323 } 2324 } 2325 2326 if (insn->alt_group) 2327 fill_alternative_cfi(file, insn); 2328 2329 if (skip_orig) 2330 return 0; 2331 } 2332 2333 if (handle_insn_ops(insn, &state)) 2334 return 1; 2335 2336 switch (insn->type) { 2337 2338 case INSN_RETURN: 2339 return validate_return(func, insn, &state); 2340 2341 case INSN_CALL: 2342 case INSN_CALL_DYNAMIC: 2343 ret = validate_call(insn, &state); 2344 if (ret) 2345 return ret; 2346 2347 if (!no_fp && func && !is_fentry_call(insn) && 2348 !has_valid_stack_frame(&state)) { 2349 WARN_FUNC("call without frame pointer save/setup", 2350 sec, insn->offset); 2351 return 1; 2352 } 2353 2354 if (dead_end_function(file, insn->call_dest)) 2355 return 0; 2356 2357 break; 2358 2359 case INSN_JUMP_CONDITIONAL: 2360 case INSN_JUMP_UNCONDITIONAL: 2361 if (func && is_sibling_call(insn)) { 2362 ret = validate_sibling_call(insn, &state); 2363 if (ret) 2364 return ret; 2365 2366 } else if (insn->jump_dest) { 2367 ret = validate_branch(file, func, 2368 insn->jump_dest, state); 2369 if (ret) { 2370 if (backtrace) 2371 BT_FUNC("(branch)", insn); 2372 return ret; 2373 } 2374 } 2375 2376 if (insn->type == INSN_JUMP_UNCONDITIONAL) 2377 return 0; 2378 2379 break; 2380 2381 case INSN_JUMP_DYNAMIC: 2382 case INSN_JUMP_DYNAMIC_CONDITIONAL: 2383 if (func && is_sibling_call(insn)) { 2384 ret = validate_sibling_call(insn, &state); 2385 if (ret) 2386 return ret; 2387 } 2388 2389 if (insn->type == INSN_JUMP_DYNAMIC) 2390 return 0; 2391 2392 break; 2393 2394 case INSN_CONTEXT_SWITCH: 2395 if (func && (!next_insn || !next_insn->hint)) { 2396 WARN_FUNC("unsupported instruction in callable function", 2397 sec, insn->offset); 2398 return 1; 2399 } 2400 return 0; 2401 2402 case INSN_STAC: 2403 if (state.uaccess) { 2404 WARN_FUNC("recursive UACCESS enable", sec, insn->offset); 2405 return 1; 2406 } 2407 2408 state.uaccess = true; 2409 break; 2410 2411 case INSN_CLAC: 2412 if (!state.uaccess && func) { 2413 WARN_FUNC("redundant UACCESS disable", sec, insn->offset); 2414 return 1; 2415 } 2416 2417 if (func_uaccess_safe(func) && !state.uaccess_stack) { 2418 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset); 2419 return 1; 2420 } 2421 2422 state.uaccess = false; 2423 break; 2424 2425 case INSN_STD: 2426 if (state.df) 2427 WARN_FUNC("recursive STD", sec, insn->offset); 2428 2429 state.df = true; 2430 break; 2431 2432 case INSN_CLD: 2433 if (!state.df && func) 2434 WARN_FUNC("redundant CLD", sec, insn->offset); 2435 2436 state.df = false; 2437 break; 2438 2439 default: 2440 break; 2441 } 2442 2443 if (insn->dead_end) 2444 return 0; 2445 2446 if (!next_insn) { 2447 if (state.cfi.cfa.base == CFI_UNDEFINED) 2448 return 0; 2449 WARN("%s: unexpected end of section", sec->name); 2450 return 1; 2451 } 2452 2453 insn = next_insn; 2454 } 2455 2456 return 0; 2457 } 2458 2459 static int validate_unwind_hints(struct objtool_file *file, struct section *sec) 2460 { 2461 struct instruction *insn; 2462 struct insn_state state; 2463 int ret, warnings = 0; 2464 2465 if (!file->hints) 2466 return 0; 2467 2468 init_insn_state(&state, sec); 2469 2470 if (sec) { 2471 insn = find_insn(file, sec, 0); 2472 if (!insn) 2473 return 0; 2474 } else { 2475 insn = list_first_entry(&file->insn_list, typeof(*insn), list); 2476 } 2477 2478 while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) { 2479 if (insn->hint && !insn->visited) { 2480 ret = validate_branch(file, insn->func, insn, state); 2481 if (ret && backtrace) 2482 BT_FUNC("<=== (hint)", insn); 2483 warnings += ret; 2484 } 2485 2486 insn = list_next_entry(insn, list); 2487 } 2488 2489 return warnings; 2490 } 2491 2492 static int validate_retpoline(struct objtool_file *file) 2493 { 2494 struct instruction *insn; 2495 int warnings = 0; 2496 2497 for_each_insn(file, insn) { 2498 if (insn->type != INSN_JUMP_DYNAMIC && 2499 insn->type != INSN_CALL_DYNAMIC) 2500 continue; 2501 2502 if (insn->retpoline_safe) 2503 continue; 2504 2505 /* 2506 * .init.text code is ran before userspace and thus doesn't 2507 * strictly need retpolines, except for modules which are 2508 * loaded late, they very much do need retpoline in their 2509 * .init.text 2510 */ 2511 if (!strcmp(insn->sec->name, ".init.text") && !module) 2512 continue; 2513 2514 WARN_FUNC("indirect %s found in RETPOLINE build", 2515 insn->sec, insn->offset, 2516 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call"); 2517 2518 warnings++; 2519 } 2520 2521 return warnings; 2522 } 2523 2524 static bool is_kasan_insn(struct instruction *insn) 2525 { 2526 return (insn->type == INSN_CALL && 2527 !strcmp(insn->call_dest->name, "__asan_handle_no_return")); 2528 } 2529 2530 static bool is_ubsan_insn(struct instruction *insn) 2531 { 2532 return (insn->type == INSN_CALL && 2533 !strcmp(insn->call_dest->name, 2534 "__ubsan_handle_builtin_unreachable")); 2535 } 2536 2537 static bool ignore_unreachable_insn(struct instruction *insn) 2538 { 2539 int i; 2540 2541 if (insn->ignore || insn->type == INSN_NOP) 2542 return true; 2543 2544 /* 2545 * Ignore any unused exceptions. This can happen when a whitelisted 2546 * function has an exception table entry. 2547 * 2548 * Also ignore alternative replacement instructions. This can happen 2549 * when a whitelisted function uses one of the ALTERNATIVE macros. 2550 */ 2551 if (!strcmp(insn->sec->name, ".fixup") || 2552 !strcmp(insn->sec->name, ".altinstr_replacement") || 2553 !strcmp(insn->sec->name, ".altinstr_aux")) 2554 return true; 2555 2556 if (!insn->func) 2557 return false; 2558 2559 /* 2560 * CONFIG_UBSAN_TRAP inserts a UD2 when it sees 2561 * __builtin_unreachable(). The BUG() macro has an unreachable() after 2562 * the UD2, which causes GCC's undefined trap logic to emit another UD2 2563 * (or occasionally a JMP to UD2). 2564 */ 2565 if (list_prev_entry(insn, list)->dead_end && 2566 (insn->type == INSN_BUG || 2567 (insn->type == INSN_JUMP_UNCONDITIONAL && 2568 insn->jump_dest && insn->jump_dest->type == INSN_BUG))) 2569 return true; 2570 2571 /* 2572 * Check if this (or a subsequent) instruction is related to 2573 * CONFIG_UBSAN or CONFIG_KASAN. 2574 * 2575 * End the search at 5 instructions to avoid going into the weeds. 2576 */ 2577 for (i = 0; i < 5; i++) { 2578 2579 if (is_kasan_insn(insn) || is_ubsan_insn(insn)) 2580 return true; 2581 2582 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 2583 if (insn->jump_dest && 2584 insn->jump_dest->func == insn->func) { 2585 insn = insn->jump_dest; 2586 continue; 2587 } 2588 2589 break; 2590 } 2591 2592 if (insn->offset + insn->len >= insn->func->offset + insn->func->len) 2593 break; 2594 2595 insn = list_next_entry(insn, list); 2596 } 2597 2598 return false; 2599 } 2600 2601 static int validate_symbol(struct objtool_file *file, struct section *sec, 2602 struct symbol *sym, struct insn_state *state) 2603 { 2604 struct instruction *insn; 2605 int ret; 2606 2607 if (!sym->len) { 2608 WARN("%s() is missing an ELF size annotation", sym->name); 2609 return 1; 2610 } 2611 2612 if (sym->pfunc != sym || sym->alias != sym) 2613 return 0; 2614 2615 insn = find_insn(file, sec, sym->offset); 2616 if (!insn || insn->ignore || insn->visited) 2617 return 0; 2618 2619 state->uaccess = sym->uaccess_safe; 2620 2621 ret = validate_branch(file, insn->func, insn, *state); 2622 if (ret && backtrace) 2623 BT_FUNC("<=== (sym)", insn); 2624 return ret; 2625 } 2626 2627 static int validate_section(struct objtool_file *file, struct section *sec) 2628 { 2629 struct insn_state state; 2630 struct symbol *func; 2631 int warnings = 0; 2632 2633 list_for_each_entry(func, &sec->symbol_list, list) { 2634 if (func->type != STT_FUNC) 2635 continue; 2636 2637 init_insn_state(&state, sec); 2638 state.cfi.cfa = initial_func_cfi.cfa; 2639 memcpy(&state.cfi.regs, &initial_func_cfi.regs, 2640 CFI_NUM_REGS * sizeof(struct cfi_reg)); 2641 state.cfi.stack_size = initial_func_cfi.cfa.offset; 2642 2643 warnings += validate_symbol(file, sec, func, &state); 2644 } 2645 2646 return warnings; 2647 } 2648 2649 static int validate_vmlinux_functions(struct objtool_file *file) 2650 { 2651 struct section *sec; 2652 int warnings = 0; 2653 2654 sec = find_section_by_name(file->elf, ".noinstr.text"); 2655 if (sec) { 2656 warnings += validate_section(file, sec); 2657 warnings += validate_unwind_hints(file, sec); 2658 } 2659 2660 sec = find_section_by_name(file->elf, ".entry.text"); 2661 if (sec) { 2662 warnings += validate_section(file, sec); 2663 warnings += validate_unwind_hints(file, sec); 2664 } 2665 2666 return warnings; 2667 } 2668 2669 static int validate_functions(struct objtool_file *file) 2670 { 2671 struct section *sec; 2672 int warnings = 0; 2673 2674 for_each_sec(file, sec) { 2675 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 2676 continue; 2677 2678 warnings += validate_section(file, sec); 2679 } 2680 2681 return warnings; 2682 } 2683 2684 static int validate_reachable_instructions(struct objtool_file *file) 2685 { 2686 struct instruction *insn; 2687 2688 if (file->ignore_unreachables) 2689 return 0; 2690 2691 for_each_insn(file, insn) { 2692 if (insn->visited || ignore_unreachable_insn(insn)) 2693 continue; 2694 2695 WARN_FUNC("unreachable instruction", insn->sec, insn->offset); 2696 return 1; 2697 } 2698 2699 return 0; 2700 } 2701 2702 static struct objtool_file file; 2703 2704 int check(const char *_objname, bool orc) 2705 { 2706 int ret, warnings = 0; 2707 2708 objname = _objname; 2709 2710 file.elf = elf_open_read(objname, orc ? O_RDWR : O_RDONLY); 2711 if (!file.elf) 2712 return 1; 2713 2714 INIT_LIST_HEAD(&file.insn_list); 2715 hash_init(file.insn_hash); 2716 file.c_file = find_section_by_name(file.elf, ".comment"); 2717 file.ignore_unreachables = no_unreachable; 2718 file.hints = false; 2719 2720 arch_initial_func_cfi_state(&initial_func_cfi); 2721 2722 ret = decode_sections(&file); 2723 if (ret < 0) 2724 goto out; 2725 warnings += ret; 2726 2727 if (list_empty(&file.insn_list)) 2728 goto out; 2729 2730 if (vmlinux && !validate_dup) { 2731 ret = validate_vmlinux_functions(&file); 2732 if (ret < 0) 2733 goto out; 2734 2735 warnings += ret; 2736 goto out; 2737 } 2738 2739 if (retpoline) { 2740 ret = validate_retpoline(&file); 2741 if (ret < 0) 2742 return ret; 2743 warnings += ret; 2744 } 2745 2746 ret = validate_functions(&file); 2747 if (ret < 0) 2748 goto out; 2749 warnings += ret; 2750 2751 ret = validate_unwind_hints(&file, NULL); 2752 if (ret < 0) 2753 goto out; 2754 warnings += ret; 2755 2756 if (!warnings) { 2757 ret = validate_reachable_instructions(&file); 2758 if (ret < 0) 2759 goto out; 2760 warnings += ret; 2761 } 2762 2763 if (orc) { 2764 ret = create_orc(&file); 2765 if (ret < 0) 2766 goto out; 2767 2768 ret = create_orc_sections(&file); 2769 if (ret < 0) 2770 goto out; 2771 2772 ret = elf_write(file.elf); 2773 if (ret < 0) 2774 goto out; 2775 } 2776 2777 out: 2778 if (ret < 0) { 2779 /* 2780 * Fatal error. The binary is corrupt or otherwise broken in 2781 * some way, or objtool itself is broken. Fail the kernel 2782 * build. 2783 */ 2784 return ret; 2785 } 2786 2787 return 0; 2788 } 2789