1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <string.h> 7 #include <stdlib.h> 8 #include <inttypes.h> 9 #include <sys/mman.h> 10 11 #include <objtool/builtin.h> 12 #include <objtool/cfi.h> 13 #include <objtool/arch.h> 14 #include <objtool/check.h> 15 #include <objtool/special.h> 16 #include <objtool/warn.h> 17 #include <objtool/endianness.h> 18 19 #include <linux/objtool_types.h> 20 #include <linux/hashtable.h> 21 #include <linux/kernel.h> 22 #include <linux/static_call_types.h> 23 #include <linux/string.h> 24 25 struct alternative { 26 struct alternative *next; 27 struct instruction *insn; 28 }; 29 30 static unsigned long nr_cfi, nr_cfi_reused, nr_cfi_cache; 31 32 static struct cfi_init_state initial_func_cfi; 33 static struct cfi_state init_cfi; 34 static struct cfi_state func_cfi; 35 static struct cfi_state force_undefined_cfi; 36 37 struct instruction *find_insn(struct objtool_file *file, 38 struct section *sec, unsigned long offset) 39 { 40 struct instruction *insn; 41 42 hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) { 43 if (insn->sec == sec && insn->offset == offset) 44 return insn; 45 } 46 47 return NULL; 48 } 49 50 struct instruction *next_insn_same_sec(struct objtool_file *file, 51 struct instruction *insn) 52 { 53 if (insn->idx == INSN_CHUNK_MAX) 54 return find_insn(file, insn->sec, insn->offset + insn->len); 55 56 insn++; 57 if (!insn->len) 58 return NULL; 59 60 return insn; 61 } 62 63 static struct instruction *next_insn_same_func(struct objtool_file *file, 64 struct instruction *insn) 65 { 66 struct instruction *next = next_insn_same_sec(file, insn); 67 struct symbol *func = insn_func(insn); 68 69 if (!func) 70 return NULL; 71 72 if (next && insn_func(next) == func) 73 return next; 74 75 /* Check if we're already in the subfunction: */ 76 if (func == func->cfunc) 77 return NULL; 78 79 /* Move to the subfunction: */ 80 return find_insn(file, func->cfunc->sec, func->cfunc->offset); 81 } 82 83 static struct instruction *prev_insn_same_sec(struct objtool_file *file, 84 struct instruction *insn) 85 { 86 if (insn->idx == 0) { 87 if (insn->prev_len) 88 return find_insn(file, insn->sec, insn->offset - insn->prev_len); 89 return NULL; 90 } 91 92 return insn - 1; 93 } 94 95 static struct instruction *prev_insn_same_sym(struct objtool_file *file, 96 struct instruction *insn) 97 { 98 struct instruction *prev = prev_insn_same_sec(file, insn); 99 100 if (prev && insn_func(prev) == insn_func(insn)) 101 return prev; 102 103 return NULL; 104 } 105 106 #define for_each_insn(file, insn) \ 107 for (struct section *__sec, *__fake = (struct section *)1; \ 108 __fake; __fake = NULL) \ 109 for_each_sec(file, __sec) \ 110 sec_for_each_insn(file, __sec, insn) 111 112 #define func_for_each_insn(file, func, insn) \ 113 for (insn = find_insn(file, func->sec, func->offset); \ 114 insn; \ 115 insn = next_insn_same_func(file, insn)) 116 117 #define sym_for_each_insn(file, sym, insn) \ 118 for (insn = find_insn(file, sym->sec, sym->offset); \ 119 insn && insn->offset < sym->offset + sym->len; \ 120 insn = next_insn_same_sec(file, insn)) 121 122 #define sym_for_each_insn_continue_reverse(file, sym, insn) \ 123 for (insn = prev_insn_same_sec(file, insn); \ 124 insn && insn->offset >= sym->offset; \ 125 insn = prev_insn_same_sec(file, insn)) 126 127 #define sec_for_each_insn_from(file, insn) \ 128 for (; insn; insn = next_insn_same_sec(file, insn)) 129 130 #define sec_for_each_insn_continue(file, insn) \ 131 for (insn = next_insn_same_sec(file, insn); insn; \ 132 insn = next_insn_same_sec(file, insn)) 133 134 static inline struct symbol *insn_call_dest(struct instruction *insn) 135 { 136 if (insn->type == INSN_JUMP_DYNAMIC || 137 insn->type == INSN_CALL_DYNAMIC) 138 return NULL; 139 140 return insn->_call_dest; 141 } 142 143 static inline struct reloc *insn_jump_table(struct instruction *insn) 144 { 145 if (insn->type == INSN_JUMP_DYNAMIC || 146 insn->type == INSN_CALL_DYNAMIC) 147 return insn->_jump_table; 148 149 return NULL; 150 } 151 152 static inline unsigned long insn_jump_table_size(struct instruction *insn) 153 { 154 if (insn->type == INSN_JUMP_DYNAMIC || 155 insn->type == INSN_CALL_DYNAMIC) 156 return insn->_jump_table_size; 157 158 return 0; 159 } 160 161 static bool is_jump_table_jump(struct instruction *insn) 162 { 163 struct alt_group *alt_group = insn->alt_group; 164 165 if (insn_jump_table(insn)) 166 return true; 167 168 /* Retpoline alternative for a jump table? */ 169 return alt_group && alt_group->orig_group && 170 insn_jump_table(alt_group->orig_group->first_insn); 171 } 172 173 static bool is_sibling_call(struct instruction *insn) 174 { 175 /* 176 * Assume only STT_FUNC calls have jump-tables. 177 */ 178 if (insn_func(insn)) { 179 /* An indirect jump is either a sibling call or a jump to a table. */ 180 if (insn->type == INSN_JUMP_DYNAMIC) 181 return !is_jump_table_jump(insn); 182 } 183 184 /* add_jump_destinations() sets insn_call_dest(insn) for sibling calls. */ 185 return (is_static_jump(insn) && insn_call_dest(insn)); 186 } 187 188 /* 189 * Checks if a string ends with another. 190 */ 191 static bool str_ends_with(const char *s, const char *sub) 192 { 193 const int slen = strlen(s); 194 const int sublen = strlen(sub); 195 196 if (sublen > slen) 197 return 0; 198 199 return !memcmp(s + slen - sublen, sub, sublen); 200 } 201 202 /* 203 * Checks if a function is a Rust "noreturn" one. 204 */ 205 static bool is_rust_noreturn(const struct symbol *func) 206 { 207 /* 208 * If it does not start with "_R", then it is not a Rust symbol. 209 */ 210 if (strncmp(func->name, "_R", 2)) 211 return false; 212 213 /* 214 * These are just heuristics -- we do not control the precise symbol 215 * name, due to the crate disambiguators (which depend on the compiler) 216 * as well as changes to the source code itself between versions (since 217 * these come from the Rust standard library). 218 */ 219 return str_ends_with(func->name, "_4core5sliceSp15copy_from_slice17len_mismatch_fail") || 220 str_ends_with(func->name, "_4core6option13expect_failed") || 221 str_ends_with(func->name, "_4core6option13unwrap_failed") || 222 str_ends_with(func->name, "_4core6result13unwrap_failed") || 223 str_ends_with(func->name, "_4core9panicking5panic") || 224 str_ends_with(func->name, "_4core9panicking9panic_fmt") || 225 str_ends_with(func->name, "_4core9panicking14panic_explicit") || 226 str_ends_with(func->name, "_4core9panicking14panic_nounwind") || 227 str_ends_with(func->name, "_4core9panicking18panic_bounds_check") || 228 str_ends_with(func->name, "_4core9panicking18panic_nounwind_fmt") || 229 str_ends_with(func->name, "_4core9panicking19assert_failed_inner") || 230 str_ends_with(func->name, "_4core9panicking30panic_null_pointer_dereference") || 231 str_ends_with(func->name, "_4core9panicking36panic_misaligned_pointer_dereference") || 232 str_ends_with(func->name, "_7___rustc17rust_begin_unwind") || 233 strstr(func->name, "_4core9panicking13assert_failed") || 234 strstr(func->name, "_4core9panicking11panic_const24panic_const_") || 235 (strstr(func->name, "_4core5slice5index") && 236 strstr(func->name, "slice_") && 237 str_ends_with(func->name, "_fail")); 238 } 239 240 /* 241 * This checks to see if the given function is a "noreturn" function. 242 * 243 * For global functions which are outside the scope of this object file, we 244 * have to keep a manual list of them. 245 * 246 * For local functions, we have to detect them manually by simply looking for 247 * the lack of a return instruction. 248 */ 249 static bool __dead_end_function(struct objtool_file *file, struct symbol *func, 250 int recursion) 251 { 252 int i; 253 struct instruction *insn; 254 bool empty = true; 255 256 #define NORETURN(func) __stringify(func), 257 static const char * const global_noreturns[] = { 258 #include "noreturns.h" 259 }; 260 #undef NORETURN 261 262 if (!func) 263 return false; 264 265 if (func->bind == STB_GLOBAL || func->bind == STB_WEAK) { 266 if (is_rust_noreturn(func)) 267 return true; 268 269 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++) 270 if (!strcmp(func->name, global_noreturns[i])) 271 return true; 272 } 273 274 if (func->bind == STB_WEAK) 275 return false; 276 277 if (!func->len) 278 return false; 279 280 insn = find_insn(file, func->sec, func->offset); 281 if (!insn || !insn_func(insn)) 282 return false; 283 284 func_for_each_insn(file, func, insn) { 285 empty = false; 286 287 if (insn->type == INSN_RETURN) 288 return false; 289 } 290 291 if (empty) 292 return false; 293 294 /* 295 * A function can have a sibling call instead of a return. In that 296 * case, the function's dead-end status depends on whether the target 297 * of the sibling call returns. 298 */ 299 func_for_each_insn(file, func, insn) { 300 if (is_sibling_call(insn)) { 301 struct instruction *dest = insn->jump_dest; 302 303 if (!dest) 304 /* sibling call to another file */ 305 return false; 306 307 /* local sibling call */ 308 if (recursion == 5) { 309 /* 310 * Infinite recursion: two functions have 311 * sibling calls to each other. This is a very 312 * rare case. It means they aren't dead ends. 313 */ 314 return false; 315 } 316 317 return __dead_end_function(file, insn_func(dest), recursion+1); 318 } 319 } 320 321 return true; 322 } 323 324 static bool dead_end_function(struct objtool_file *file, struct symbol *func) 325 { 326 return __dead_end_function(file, func, 0); 327 } 328 329 static void init_cfi_state(struct cfi_state *cfi) 330 { 331 int i; 332 333 for (i = 0; i < CFI_NUM_REGS; i++) { 334 cfi->regs[i].base = CFI_UNDEFINED; 335 cfi->vals[i].base = CFI_UNDEFINED; 336 } 337 cfi->cfa.base = CFI_UNDEFINED; 338 cfi->drap_reg = CFI_UNDEFINED; 339 cfi->drap_offset = -1; 340 } 341 342 static void init_insn_state(struct objtool_file *file, struct insn_state *state, 343 struct section *sec) 344 { 345 memset(state, 0, sizeof(*state)); 346 init_cfi_state(&state->cfi); 347 348 if (opts.noinstr && sec) 349 state->noinstr = sec->noinstr; 350 } 351 352 static struct cfi_state *cfi_alloc(void) 353 { 354 struct cfi_state *cfi = calloc(1, sizeof(struct cfi_state)); 355 if (!cfi) { 356 ERROR_GLIBC("calloc"); 357 exit(1); 358 } 359 nr_cfi++; 360 return cfi; 361 } 362 363 static int cfi_bits; 364 static struct hlist_head *cfi_hash; 365 366 static inline bool cficmp(struct cfi_state *cfi1, struct cfi_state *cfi2) 367 { 368 return memcmp((void *)cfi1 + sizeof(cfi1->hash), 369 (void *)cfi2 + sizeof(cfi2->hash), 370 sizeof(struct cfi_state) - sizeof(struct hlist_node)); 371 } 372 373 static inline u32 cfi_key(struct cfi_state *cfi) 374 { 375 return jhash((void *)cfi + sizeof(cfi->hash), 376 sizeof(*cfi) - sizeof(cfi->hash), 0); 377 } 378 379 static struct cfi_state *cfi_hash_find_or_add(struct cfi_state *cfi) 380 { 381 struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)]; 382 struct cfi_state *obj; 383 384 hlist_for_each_entry(obj, head, hash) { 385 if (!cficmp(cfi, obj)) { 386 nr_cfi_cache++; 387 return obj; 388 } 389 } 390 391 obj = cfi_alloc(); 392 *obj = *cfi; 393 hlist_add_head(&obj->hash, head); 394 395 return obj; 396 } 397 398 static void cfi_hash_add(struct cfi_state *cfi) 399 { 400 struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)]; 401 402 hlist_add_head(&cfi->hash, head); 403 } 404 405 static void *cfi_hash_alloc(unsigned long size) 406 { 407 cfi_bits = max(10, ilog2(size)); 408 cfi_hash = mmap(NULL, sizeof(struct hlist_head) << cfi_bits, 409 PROT_READ|PROT_WRITE, 410 MAP_PRIVATE|MAP_ANON, -1, 0); 411 if (cfi_hash == (void *)-1L) { 412 ERROR_GLIBC("mmap fail cfi_hash"); 413 cfi_hash = NULL; 414 } else if (opts.stats) { 415 printf("cfi_bits: %d\n", cfi_bits); 416 } 417 418 return cfi_hash; 419 } 420 421 static unsigned long nr_insns; 422 static unsigned long nr_insns_visited; 423 424 /* 425 * Call the arch-specific instruction decoder for all the instructions and add 426 * them to the global instruction list. 427 */ 428 static int decode_instructions(struct objtool_file *file) 429 { 430 struct section *sec; 431 struct symbol *func; 432 unsigned long offset; 433 struct instruction *insn; 434 int ret; 435 436 for_each_sec(file, sec) { 437 struct instruction *insns = NULL; 438 u8 prev_len = 0; 439 u8 idx = 0; 440 441 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 442 continue; 443 444 if (strcmp(sec->name, ".altinstr_replacement") && 445 strcmp(sec->name, ".altinstr_aux") && 446 strncmp(sec->name, ".discard.", 9)) 447 sec->text = true; 448 449 if (!strcmp(sec->name, ".noinstr.text") || 450 !strcmp(sec->name, ".entry.text") || 451 !strcmp(sec->name, ".cpuidle.text") || 452 !strncmp(sec->name, ".text..__x86.", 13)) 453 sec->noinstr = true; 454 455 /* 456 * .init.text code is ran before userspace and thus doesn't 457 * strictly need retpolines, except for modules which are 458 * loaded late, they very much do need retpoline in their 459 * .init.text 460 */ 461 if (!strcmp(sec->name, ".init.text") && !opts.module) 462 sec->init = true; 463 464 for (offset = 0; offset < sec->sh.sh_size; offset += insn->len) { 465 if (!insns || idx == INSN_CHUNK_MAX) { 466 insns = calloc(sizeof(*insn), INSN_CHUNK_SIZE); 467 if (!insns) { 468 ERROR_GLIBC("calloc"); 469 return -1; 470 } 471 idx = 0; 472 } else { 473 idx++; 474 } 475 insn = &insns[idx]; 476 insn->idx = idx; 477 478 INIT_LIST_HEAD(&insn->call_node); 479 insn->sec = sec; 480 insn->offset = offset; 481 insn->prev_len = prev_len; 482 483 ret = arch_decode_instruction(file, sec, offset, 484 sec->sh.sh_size - offset, 485 insn); 486 if (ret) 487 return ret; 488 489 prev_len = insn->len; 490 491 /* 492 * By default, "ud2" is a dead end unless otherwise 493 * annotated, because GCC 7 inserts it for certain 494 * divide-by-zero cases. 495 */ 496 if (insn->type == INSN_BUG) 497 insn->dead_end = true; 498 499 hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset)); 500 nr_insns++; 501 } 502 503 sec_for_each_sym(sec, func) { 504 if (func->type != STT_NOTYPE && func->type != STT_FUNC) 505 continue; 506 507 if (func->offset == sec->sh.sh_size) { 508 /* Heuristic: likely an "end" symbol */ 509 if (func->type == STT_NOTYPE) 510 continue; 511 ERROR("%s(): STT_FUNC at end of section", func->name); 512 return -1; 513 } 514 515 if (func->embedded_insn || func->alias != func) 516 continue; 517 518 if (!find_insn(file, sec, func->offset)) { 519 ERROR("%s(): can't find starting instruction", func->name); 520 return -1; 521 } 522 523 sym_for_each_insn(file, func, insn) { 524 insn->sym = func; 525 if (func->type == STT_FUNC && 526 insn->type == INSN_ENDBR && 527 list_empty(&insn->call_node)) { 528 if (insn->offset == func->offset) { 529 list_add_tail(&insn->call_node, &file->endbr_list); 530 file->nr_endbr++; 531 } else { 532 file->nr_endbr_int++; 533 } 534 } 535 } 536 } 537 } 538 539 if (opts.stats) 540 printf("nr_insns: %lu\n", nr_insns); 541 542 return 0; 543 } 544 545 /* 546 * Read the pv_ops[] .data table to find the static initialized values. 547 */ 548 static int add_pv_ops(struct objtool_file *file, const char *symname) 549 { 550 struct symbol *sym, *func; 551 unsigned long off, end; 552 struct reloc *reloc; 553 int idx; 554 555 sym = find_symbol_by_name(file->elf, symname); 556 if (!sym) 557 return 0; 558 559 off = sym->offset; 560 end = off + sym->len; 561 for (;;) { 562 reloc = find_reloc_by_dest_range(file->elf, sym->sec, off, end - off); 563 if (!reloc) 564 break; 565 566 idx = (reloc_offset(reloc) - sym->offset) / sizeof(unsigned long); 567 568 func = reloc->sym; 569 if (func->type == STT_SECTION) 570 func = find_symbol_by_offset(reloc->sym->sec, 571 reloc_addend(reloc)); 572 if (!func) { 573 ERROR_FUNC(reloc->sym->sec, reloc_addend(reloc), 574 "can't find func at %s[%d]", symname, idx); 575 return -1; 576 } 577 578 if (objtool_pv_add(file, idx, func)) 579 return -1; 580 581 off = reloc_offset(reloc) + 1; 582 if (off > end) 583 break; 584 } 585 586 return 0; 587 } 588 589 /* 590 * Allocate and initialize file->pv_ops[]. 591 */ 592 static int init_pv_ops(struct objtool_file *file) 593 { 594 static const char *pv_ops_tables[] = { 595 "pv_ops", 596 "xen_cpu_ops", 597 "xen_irq_ops", 598 "xen_mmu_ops", 599 NULL, 600 }; 601 const char *pv_ops; 602 struct symbol *sym; 603 int idx, nr, ret; 604 605 if (!opts.noinstr) 606 return 0; 607 608 file->pv_ops = NULL; 609 610 sym = find_symbol_by_name(file->elf, "pv_ops"); 611 if (!sym) 612 return 0; 613 614 nr = sym->len / sizeof(unsigned long); 615 file->pv_ops = calloc(sizeof(struct pv_state), nr); 616 if (!file->pv_ops) { 617 ERROR_GLIBC("calloc"); 618 return -1; 619 } 620 621 for (idx = 0; idx < nr; idx++) 622 INIT_LIST_HEAD(&file->pv_ops[idx].targets); 623 624 for (idx = 0; (pv_ops = pv_ops_tables[idx]); idx++) { 625 ret = add_pv_ops(file, pv_ops); 626 if (ret) 627 return ret; 628 } 629 630 return 0; 631 } 632 633 static int create_static_call_sections(struct objtool_file *file) 634 { 635 struct static_call_site *site; 636 struct section *sec; 637 struct instruction *insn; 638 struct symbol *key_sym; 639 char *key_name, *tmp; 640 int idx; 641 642 sec = find_section_by_name(file->elf, ".static_call_sites"); 643 if (sec) { 644 INIT_LIST_HEAD(&file->static_call_list); 645 WARN("file already has .static_call_sites section, skipping"); 646 return 0; 647 } 648 649 if (list_empty(&file->static_call_list)) 650 return 0; 651 652 idx = 0; 653 list_for_each_entry(insn, &file->static_call_list, call_node) 654 idx++; 655 656 sec = elf_create_section_pair(file->elf, ".static_call_sites", 657 sizeof(*site), idx, idx * 2); 658 if (!sec) 659 return -1; 660 661 /* Allow modules to modify the low bits of static_call_site::key */ 662 sec->sh.sh_flags |= SHF_WRITE; 663 664 idx = 0; 665 list_for_each_entry(insn, &file->static_call_list, call_node) { 666 667 /* populate reloc for 'addr' */ 668 if (!elf_init_reloc_text_sym(file->elf, sec, 669 idx * sizeof(*site), idx * 2, 670 insn->sec, insn->offset)) 671 return -1; 672 673 /* find key symbol */ 674 key_name = strdup(insn_call_dest(insn)->name); 675 if (!key_name) { 676 ERROR_GLIBC("strdup"); 677 return -1; 678 } 679 if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR, 680 STATIC_CALL_TRAMP_PREFIX_LEN)) { 681 ERROR("static_call: trampoline name malformed: %s", key_name); 682 return -1; 683 } 684 tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN; 685 memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN); 686 687 key_sym = find_symbol_by_name(file->elf, tmp); 688 if (!key_sym) { 689 if (!opts.module) { 690 ERROR("static_call: can't find static_call_key symbol: %s", tmp); 691 return -1; 692 } 693 694 /* 695 * For modules(), the key might not be exported, which 696 * means the module can make static calls but isn't 697 * allowed to change them. 698 * 699 * In that case we temporarily set the key to be the 700 * trampoline address. This is fixed up in 701 * static_call_add_module(). 702 */ 703 key_sym = insn_call_dest(insn); 704 } 705 706 /* populate reloc for 'key' */ 707 if (!elf_init_reloc_data_sym(file->elf, sec, 708 idx * sizeof(*site) + 4, 709 (idx * 2) + 1, key_sym, 710 is_sibling_call(insn) * STATIC_CALL_SITE_TAIL)) 711 return -1; 712 713 idx++; 714 } 715 716 return 0; 717 } 718 719 static int create_retpoline_sites_sections(struct objtool_file *file) 720 { 721 struct instruction *insn; 722 struct section *sec; 723 int idx; 724 725 sec = find_section_by_name(file->elf, ".retpoline_sites"); 726 if (sec) { 727 WARN("file already has .retpoline_sites, skipping"); 728 return 0; 729 } 730 731 idx = 0; 732 list_for_each_entry(insn, &file->retpoline_call_list, call_node) 733 idx++; 734 735 if (!idx) 736 return 0; 737 738 sec = elf_create_section_pair(file->elf, ".retpoline_sites", 739 sizeof(int), idx, idx); 740 if (!sec) 741 return -1; 742 743 idx = 0; 744 list_for_each_entry(insn, &file->retpoline_call_list, call_node) { 745 746 if (!elf_init_reloc_text_sym(file->elf, sec, 747 idx * sizeof(int), idx, 748 insn->sec, insn->offset)) 749 return -1; 750 751 idx++; 752 } 753 754 return 0; 755 } 756 757 static int create_return_sites_sections(struct objtool_file *file) 758 { 759 struct instruction *insn; 760 struct section *sec; 761 int idx; 762 763 sec = find_section_by_name(file->elf, ".return_sites"); 764 if (sec) { 765 WARN("file already has .return_sites, skipping"); 766 return 0; 767 } 768 769 idx = 0; 770 list_for_each_entry(insn, &file->return_thunk_list, call_node) 771 idx++; 772 773 if (!idx) 774 return 0; 775 776 sec = elf_create_section_pair(file->elf, ".return_sites", 777 sizeof(int), idx, idx); 778 if (!sec) 779 return -1; 780 781 idx = 0; 782 list_for_each_entry(insn, &file->return_thunk_list, call_node) { 783 784 if (!elf_init_reloc_text_sym(file->elf, sec, 785 idx * sizeof(int), idx, 786 insn->sec, insn->offset)) 787 return -1; 788 789 idx++; 790 } 791 792 return 0; 793 } 794 795 static int create_ibt_endbr_seal_sections(struct objtool_file *file) 796 { 797 struct instruction *insn; 798 struct section *sec; 799 int idx; 800 801 sec = find_section_by_name(file->elf, ".ibt_endbr_seal"); 802 if (sec) { 803 WARN("file already has .ibt_endbr_seal, skipping"); 804 return 0; 805 } 806 807 idx = 0; 808 list_for_each_entry(insn, &file->endbr_list, call_node) 809 idx++; 810 811 if (opts.stats) { 812 printf("ibt: ENDBR at function start: %d\n", file->nr_endbr); 813 printf("ibt: ENDBR inside functions: %d\n", file->nr_endbr_int); 814 printf("ibt: superfluous ENDBR: %d\n", idx); 815 } 816 817 if (!idx) 818 return 0; 819 820 sec = elf_create_section_pair(file->elf, ".ibt_endbr_seal", 821 sizeof(int), idx, idx); 822 if (!sec) 823 return -1; 824 825 idx = 0; 826 list_for_each_entry(insn, &file->endbr_list, call_node) { 827 828 int *site = (int *)sec->data->d_buf + idx; 829 struct symbol *sym = insn->sym; 830 *site = 0; 831 832 if (opts.module && sym && sym->type == STT_FUNC && 833 insn->offset == sym->offset && 834 (!strcmp(sym->name, "init_module") || 835 !strcmp(sym->name, "cleanup_module"))) { 836 ERROR("%s(): Magic init_module() function name is deprecated, use module_init(fn) instead", 837 sym->name); 838 return -1; 839 } 840 841 if (!elf_init_reloc_text_sym(file->elf, sec, 842 idx * sizeof(int), idx, 843 insn->sec, insn->offset)) 844 return -1; 845 846 idx++; 847 } 848 849 return 0; 850 } 851 852 static int create_cfi_sections(struct objtool_file *file) 853 { 854 struct section *sec; 855 struct symbol *sym; 856 int idx; 857 858 sec = find_section_by_name(file->elf, ".cfi_sites"); 859 if (sec) { 860 INIT_LIST_HEAD(&file->call_list); 861 WARN("file already has .cfi_sites section, skipping"); 862 return 0; 863 } 864 865 idx = 0; 866 for_each_sym(file, sym) { 867 if (sym->type != STT_FUNC) 868 continue; 869 870 if (strncmp(sym->name, "__cfi_", 6)) 871 continue; 872 873 idx++; 874 } 875 876 sec = elf_create_section_pair(file->elf, ".cfi_sites", 877 sizeof(unsigned int), idx, idx); 878 if (!sec) 879 return -1; 880 881 idx = 0; 882 for_each_sym(file, sym) { 883 if (sym->type != STT_FUNC) 884 continue; 885 886 if (strncmp(sym->name, "__cfi_", 6)) 887 continue; 888 889 if (!elf_init_reloc_text_sym(file->elf, sec, 890 idx * sizeof(unsigned int), idx, 891 sym->sec, sym->offset)) 892 return -1; 893 894 idx++; 895 } 896 897 return 0; 898 } 899 900 static int create_mcount_loc_sections(struct objtool_file *file) 901 { 902 size_t addr_size = elf_addr_size(file->elf); 903 struct instruction *insn; 904 struct section *sec; 905 int idx; 906 907 sec = find_section_by_name(file->elf, "__mcount_loc"); 908 if (sec) { 909 INIT_LIST_HEAD(&file->mcount_loc_list); 910 WARN("file already has __mcount_loc section, skipping"); 911 return 0; 912 } 913 914 if (list_empty(&file->mcount_loc_list)) 915 return 0; 916 917 idx = 0; 918 list_for_each_entry(insn, &file->mcount_loc_list, call_node) 919 idx++; 920 921 sec = elf_create_section_pair(file->elf, "__mcount_loc", addr_size, 922 idx, idx); 923 if (!sec) 924 return -1; 925 926 sec->sh.sh_addralign = addr_size; 927 928 idx = 0; 929 list_for_each_entry(insn, &file->mcount_loc_list, call_node) { 930 931 struct reloc *reloc; 932 933 reloc = elf_init_reloc_text_sym(file->elf, sec, idx * addr_size, idx, 934 insn->sec, insn->offset); 935 if (!reloc) 936 return -1; 937 938 set_reloc_type(file->elf, reloc, addr_size == 8 ? R_ABS64 : R_ABS32); 939 940 idx++; 941 } 942 943 return 0; 944 } 945 946 static int create_direct_call_sections(struct objtool_file *file) 947 { 948 struct instruction *insn; 949 struct section *sec; 950 int idx; 951 952 sec = find_section_by_name(file->elf, ".call_sites"); 953 if (sec) { 954 INIT_LIST_HEAD(&file->call_list); 955 WARN("file already has .call_sites section, skipping"); 956 return 0; 957 } 958 959 if (list_empty(&file->call_list)) 960 return 0; 961 962 idx = 0; 963 list_for_each_entry(insn, &file->call_list, call_node) 964 idx++; 965 966 sec = elf_create_section_pair(file->elf, ".call_sites", 967 sizeof(unsigned int), idx, idx); 968 if (!sec) 969 return -1; 970 971 idx = 0; 972 list_for_each_entry(insn, &file->call_list, call_node) { 973 974 if (!elf_init_reloc_text_sym(file->elf, sec, 975 idx * sizeof(unsigned int), idx, 976 insn->sec, insn->offset)) 977 return -1; 978 979 idx++; 980 } 981 982 return 0; 983 } 984 985 /* 986 * Warnings shouldn't be reported for ignored functions. 987 */ 988 static int add_ignores(struct objtool_file *file) 989 { 990 struct section *rsec; 991 struct symbol *func; 992 struct reloc *reloc; 993 994 rsec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard"); 995 if (!rsec) 996 return 0; 997 998 for_each_reloc(rsec, reloc) { 999 switch (reloc->sym->type) { 1000 case STT_FUNC: 1001 func = reloc->sym; 1002 break; 1003 1004 case STT_SECTION: 1005 func = find_func_by_offset(reloc->sym->sec, reloc_addend(reloc)); 1006 if (!func) 1007 continue; 1008 break; 1009 1010 default: 1011 ERROR("unexpected relocation symbol type in %s: %d", 1012 rsec->name, reloc->sym->type); 1013 return -1; 1014 } 1015 1016 func->ignore = true; 1017 if (func->cfunc) 1018 func->cfunc->ignore = true; 1019 } 1020 1021 return 0; 1022 } 1023 1024 /* 1025 * This is a whitelist of functions that is allowed to be called with AC set. 1026 * The list is meant to be minimal and only contains compiler instrumentation 1027 * ABI and a few functions used to implement *_{to,from}_user() functions. 1028 * 1029 * These functions must not directly change AC, but may PUSHF/POPF. 1030 */ 1031 static const char *uaccess_safe_builtin[] = { 1032 /* KASAN */ 1033 "kasan_report", 1034 "kasan_check_range", 1035 /* KASAN out-of-line */ 1036 "__asan_loadN_noabort", 1037 "__asan_load1_noabort", 1038 "__asan_load2_noabort", 1039 "__asan_load4_noabort", 1040 "__asan_load8_noabort", 1041 "__asan_load16_noabort", 1042 "__asan_storeN_noabort", 1043 "__asan_store1_noabort", 1044 "__asan_store2_noabort", 1045 "__asan_store4_noabort", 1046 "__asan_store8_noabort", 1047 "__asan_store16_noabort", 1048 "__kasan_check_read", 1049 "__kasan_check_write", 1050 /* KASAN in-line */ 1051 "__asan_report_load_n_noabort", 1052 "__asan_report_load1_noabort", 1053 "__asan_report_load2_noabort", 1054 "__asan_report_load4_noabort", 1055 "__asan_report_load8_noabort", 1056 "__asan_report_load16_noabort", 1057 "__asan_report_store_n_noabort", 1058 "__asan_report_store1_noabort", 1059 "__asan_report_store2_noabort", 1060 "__asan_report_store4_noabort", 1061 "__asan_report_store8_noabort", 1062 "__asan_report_store16_noabort", 1063 /* KCSAN */ 1064 "__kcsan_check_access", 1065 "__kcsan_mb", 1066 "__kcsan_wmb", 1067 "__kcsan_rmb", 1068 "__kcsan_release", 1069 "kcsan_found_watchpoint", 1070 "kcsan_setup_watchpoint", 1071 "kcsan_check_scoped_accesses", 1072 "kcsan_disable_current", 1073 "kcsan_enable_current_nowarn", 1074 /* KCSAN/TSAN */ 1075 "__tsan_func_entry", 1076 "__tsan_func_exit", 1077 "__tsan_read_range", 1078 "__tsan_write_range", 1079 "__tsan_read1", 1080 "__tsan_read2", 1081 "__tsan_read4", 1082 "__tsan_read8", 1083 "__tsan_read16", 1084 "__tsan_write1", 1085 "__tsan_write2", 1086 "__tsan_write4", 1087 "__tsan_write8", 1088 "__tsan_write16", 1089 "__tsan_read_write1", 1090 "__tsan_read_write2", 1091 "__tsan_read_write4", 1092 "__tsan_read_write8", 1093 "__tsan_read_write16", 1094 "__tsan_volatile_read1", 1095 "__tsan_volatile_read2", 1096 "__tsan_volatile_read4", 1097 "__tsan_volatile_read8", 1098 "__tsan_volatile_read16", 1099 "__tsan_volatile_write1", 1100 "__tsan_volatile_write2", 1101 "__tsan_volatile_write4", 1102 "__tsan_volatile_write8", 1103 "__tsan_volatile_write16", 1104 "__tsan_atomic8_load", 1105 "__tsan_atomic16_load", 1106 "__tsan_atomic32_load", 1107 "__tsan_atomic64_load", 1108 "__tsan_atomic8_store", 1109 "__tsan_atomic16_store", 1110 "__tsan_atomic32_store", 1111 "__tsan_atomic64_store", 1112 "__tsan_atomic8_exchange", 1113 "__tsan_atomic16_exchange", 1114 "__tsan_atomic32_exchange", 1115 "__tsan_atomic64_exchange", 1116 "__tsan_atomic8_fetch_add", 1117 "__tsan_atomic16_fetch_add", 1118 "__tsan_atomic32_fetch_add", 1119 "__tsan_atomic64_fetch_add", 1120 "__tsan_atomic8_fetch_sub", 1121 "__tsan_atomic16_fetch_sub", 1122 "__tsan_atomic32_fetch_sub", 1123 "__tsan_atomic64_fetch_sub", 1124 "__tsan_atomic8_fetch_and", 1125 "__tsan_atomic16_fetch_and", 1126 "__tsan_atomic32_fetch_and", 1127 "__tsan_atomic64_fetch_and", 1128 "__tsan_atomic8_fetch_or", 1129 "__tsan_atomic16_fetch_or", 1130 "__tsan_atomic32_fetch_or", 1131 "__tsan_atomic64_fetch_or", 1132 "__tsan_atomic8_fetch_xor", 1133 "__tsan_atomic16_fetch_xor", 1134 "__tsan_atomic32_fetch_xor", 1135 "__tsan_atomic64_fetch_xor", 1136 "__tsan_atomic8_fetch_nand", 1137 "__tsan_atomic16_fetch_nand", 1138 "__tsan_atomic32_fetch_nand", 1139 "__tsan_atomic64_fetch_nand", 1140 "__tsan_atomic8_compare_exchange_strong", 1141 "__tsan_atomic16_compare_exchange_strong", 1142 "__tsan_atomic32_compare_exchange_strong", 1143 "__tsan_atomic64_compare_exchange_strong", 1144 "__tsan_atomic8_compare_exchange_weak", 1145 "__tsan_atomic16_compare_exchange_weak", 1146 "__tsan_atomic32_compare_exchange_weak", 1147 "__tsan_atomic64_compare_exchange_weak", 1148 "__tsan_atomic8_compare_exchange_val", 1149 "__tsan_atomic16_compare_exchange_val", 1150 "__tsan_atomic32_compare_exchange_val", 1151 "__tsan_atomic64_compare_exchange_val", 1152 "__tsan_atomic_thread_fence", 1153 "__tsan_atomic_signal_fence", 1154 "__tsan_unaligned_read16", 1155 "__tsan_unaligned_write16", 1156 /* KCOV */ 1157 "write_comp_data", 1158 "check_kcov_mode", 1159 "__sanitizer_cov_trace_pc", 1160 "__sanitizer_cov_trace_const_cmp1", 1161 "__sanitizer_cov_trace_const_cmp2", 1162 "__sanitizer_cov_trace_const_cmp4", 1163 "__sanitizer_cov_trace_const_cmp8", 1164 "__sanitizer_cov_trace_cmp1", 1165 "__sanitizer_cov_trace_cmp2", 1166 "__sanitizer_cov_trace_cmp4", 1167 "__sanitizer_cov_trace_cmp8", 1168 "__sanitizer_cov_trace_switch", 1169 /* KMSAN */ 1170 "kmsan_copy_to_user", 1171 "kmsan_disable_current", 1172 "kmsan_enable_current", 1173 "kmsan_report", 1174 "kmsan_unpoison_entry_regs", 1175 "kmsan_unpoison_memory", 1176 "__msan_chain_origin", 1177 "__msan_get_context_state", 1178 "__msan_instrument_asm_store", 1179 "__msan_metadata_ptr_for_load_1", 1180 "__msan_metadata_ptr_for_load_2", 1181 "__msan_metadata_ptr_for_load_4", 1182 "__msan_metadata_ptr_for_load_8", 1183 "__msan_metadata_ptr_for_load_n", 1184 "__msan_metadata_ptr_for_store_1", 1185 "__msan_metadata_ptr_for_store_2", 1186 "__msan_metadata_ptr_for_store_4", 1187 "__msan_metadata_ptr_for_store_8", 1188 "__msan_metadata_ptr_for_store_n", 1189 "__msan_poison_alloca", 1190 "__msan_warning", 1191 /* UBSAN */ 1192 "ubsan_type_mismatch_common", 1193 "__ubsan_handle_type_mismatch", 1194 "__ubsan_handle_type_mismatch_v1", 1195 "__ubsan_handle_shift_out_of_bounds", 1196 "__ubsan_handle_load_invalid_value", 1197 /* KSTACK_ERASE */ 1198 "__sanitizer_cov_stack_depth", 1199 /* TRACE_BRANCH_PROFILING */ 1200 "ftrace_likely_update", 1201 /* STACKPROTECTOR */ 1202 "__stack_chk_fail", 1203 /* misc */ 1204 "csum_partial_copy_generic", 1205 "copy_mc_fragile", 1206 "copy_mc_fragile_handle_tail", 1207 "copy_mc_enhanced_fast_string", 1208 "rep_stos_alternative", 1209 "rep_movs_alternative", 1210 "__copy_user_nocache", 1211 NULL 1212 }; 1213 1214 static void add_uaccess_safe(struct objtool_file *file) 1215 { 1216 struct symbol *func; 1217 const char **name; 1218 1219 if (!opts.uaccess) 1220 return; 1221 1222 for (name = uaccess_safe_builtin; *name; name++) { 1223 func = find_symbol_by_name(file->elf, *name); 1224 if (!func) 1225 continue; 1226 1227 func->uaccess_safe = true; 1228 } 1229 } 1230 1231 /* 1232 * Symbols that replace INSN_CALL_DYNAMIC, every (tail) call to such a symbol 1233 * will be added to the .retpoline_sites section. 1234 */ 1235 __weak bool arch_is_retpoline(struct symbol *sym) 1236 { 1237 return false; 1238 } 1239 1240 /* 1241 * Symbols that replace INSN_RETURN, every (tail) call to such a symbol 1242 * will be added to the .return_sites section. 1243 */ 1244 __weak bool arch_is_rethunk(struct symbol *sym) 1245 { 1246 return false; 1247 } 1248 1249 /* 1250 * Symbols that are embedded inside other instructions, because sometimes crazy 1251 * code exists. These are mostly ignored for validation purposes. 1252 */ 1253 __weak bool arch_is_embedded_insn(struct symbol *sym) 1254 { 1255 return false; 1256 } 1257 1258 static struct reloc *insn_reloc(struct objtool_file *file, struct instruction *insn) 1259 { 1260 struct reloc *reloc; 1261 1262 if (insn->no_reloc) 1263 return NULL; 1264 1265 if (!file) 1266 return NULL; 1267 1268 reloc = find_reloc_by_dest_range(file->elf, insn->sec, 1269 insn->offset, insn->len); 1270 if (!reloc) { 1271 insn->no_reloc = 1; 1272 return NULL; 1273 } 1274 1275 return reloc; 1276 } 1277 1278 static void remove_insn_ops(struct instruction *insn) 1279 { 1280 struct stack_op *op, *next; 1281 1282 for (op = insn->stack_ops; op; op = next) { 1283 next = op->next; 1284 free(op); 1285 } 1286 insn->stack_ops = NULL; 1287 } 1288 1289 static int annotate_call_site(struct objtool_file *file, 1290 struct instruction *insn, bool sibling) 1291 { 1292 struct reloc *reloc = insn_reloc(file, insn); 1293 struct symbol *sym = insn_call_dest(insn); 1294 1295 if (!sym) 1296 sym = reloc->sym; 1297 1298 if (sym->static_call_tramp) { 1299 list_add_tail(&insn->call_node, &file->static_call_list); 1300 return 0; 1301 } 1302 1303 if (sym->retpoline_thunk) { 1304 list_add_tail(&insn->call_node, &file->retpoline_call_list); 1305 return 0; 1306 } 1307 1308 /* 1309 * Many compilers cannot disable KCOV or sanitizer calls with a function 1310 * attribute so they need a little help, NOP out any such calls from 1311 * noinstr text. 1312 */ 1313 if (opts.hack_noinstr && insn->sec->noinstr && sym->profiling_func) { 1314 if (reloc) 1315 set_reloc_type(file->elf, reloc, R_NONE); 1316 1317 if (elf_write_insn(file->elf, insn->sec, 1318 insn->offset, insn->len, 1319 sibling ? arch_ret_insn(insn->len) 1320 : arch_nop_insn(insn->len))) { 1321 return -1; 1322 } 1323 1324 insn->type = sibling ? INSN_RETURN : INSN_NOP; 1325 1326 if (sibling) { 1327 /* 1328 * We've replaced the tail-call JMP insn by two new 1329 * insn: RET; INT3, except we only have a single struct 1330 * insn here. Mark it retpoline_safe to avoid the SLS 1331 * warning, instead of adding another insn. 1332 */ 1333 insn->retpoline_safe = true; 1334 } 1335 1336 return 0; 1337 } 1338 1339 if (opts.mcount && sym->fentry) { 1340 if (sibling) 1341 WARN_INSN(insn, "tail call to __fentry__ !?!?"); 1342 if (opts.mnop) { 1343 if (reloc) 1344 set_reloc_type(file->elf, reloc, R_NONE); 1345 1346 if (elf_write_insn(file->elf, insn->sec, 1347 insn->offset, insn->len, 1348 arch_nop_insn(insn->len))) { 1349 return -1; 1350 } 1351 1352 insn->type = INSN_NOP; 1353 } 1354 1355 list_add_tail(&insn->call_node, &file->mcount_loc_list); 1356 return 0; 1357 } 1358 1359 if (insn->type == INSN_CALL && !insn->sec->init && 1360 !insn->_call_dest->embedded_insn) 1361 list_add_tail(&insn->call_node, &file->call_list); 1362 1363 if (!sibling && dead_end_function(file, sym)) 1364 insn->dead_end = true; 1365 1366 return 0; 1367 } 1368 1369 static int add_call_dest(struct objtool_file *file, struct instruction *insn, 1370 struct symbol *dest, bool sibling) 1371 { 1372 insn->_call_dest = dest; 1373 if (!dest) 1374 return 0; 1375 1376 /* 1377 * Whatever stack impact regular CALLs have, should be undone 1378 * by the RETURN of the called function. 1379 * 1380 * Annotated intra-function calls retain the stack_ops but 1381 * are converted to JUMP, see read_intra_function_calls(). 1382 */ 1383 remove_insn_ops(insn); 1384 1385 return annotate_call_site(file, insn, sibling); 1386 } 1387 1388 static int add_retpoline_call(struct objtool_file *file, struct instruction *insn) 1389 { 1390 /* 1391 * Retpoline calls/jumps are really dynamic calls/jumps in disguise, 1392 * so convert them accordingly. 1393 */ 1394 switch (insn->type) { 1395 case INSN_CALL: 1396 insn->type = INSN_CALL_DYNAMIC; 1397 break; 1398 case INSN_JUMP_UNCONDITIONAL: 1399 insn->type = INSN_JUMP_DYNAMIC; 1400 break; 1401 case INSN_JUMP_CONDITIONAL: 1402 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL; 1403 break; 1404 default: 1405 return 0; 1406 } 1407 1408 insn->retpoline_safe = true; 1409 1410 /* 1411 * Whatever stack impact regular CALLs have, should be undone 1412 * by the RETURN of the called function. 1413 * 1414 * Annotated intra-function calls retain the stack_ops but 1415 * are converted to JUMP, see read_intra_function_calls(). 1416 */ 1417 remove_insn_ops(insn); 1418 1419 return annotate_call_site(file, insn, false); 1420 } 1421 1422 static void add_return_call(struct objtool_file *file, struct instruction *insn, bool add) 1423 { 1424 /* 1425 * Return thunk tail calls are really just returns in disguise, 1426 * so convert them accordingly. 1427 */ 1428 insn->type = INSN_RETURN; 1429 insn->retpoline_safe = true; 1430 1431 if (add) 1432 list_add_tail(&insn->call_node, &file->return_thunk_list); 1433 } 1434 1435 static bool is_first_func_insn(struct objtool_file *file, 1436 struct instruction *insn, struct symbol *sym) 1437 { 1438 if (insn->offset == sym->offset) 1439 return true; 1440 1441 /* Allow direct CALL/JMP past ENDBR */ 1442 if (opts.ibt) { 1443 struct instruction *prev = prev_insn_same_sym(file, insn); 1444 1445 if (prev && prev->type == INSN_ENDBR && 1446 insn->offset == sym->offset + prev->len) 1447 return true; 1448 } 1449 1450 return false; 1451 } 1452 1453 /* 1454 * A sibling call is a tail-call to another symbol -- to differentiate from a 1455 * recursive tail-call which is to the same symbol. 1456 */ 1457 static bool jump_is_sibling_call(struct objtool_file *file, 1458 struct instruction *from, struct instruction *to) 1459 { 1460 struct symbol *fs = from->sym; 1461 struct symbol *ts = to->sym; 1462 1463 /* Not a sibling call if from/to a symbol hole */ 1464 if (!fs || !ts) 1465 return false; 1466 1467 /* Not a sibling call if not targeting the start of a symbol. */ 1468 if (!is_first_func_insn(file, to, ts)) 1469 return false; 1470 1471 /* Disallow sibling calls into STT_NOTYPE */ 1472 if (ts->type == STT_NOTYPE) 1473 return false; 1474 1475 /* Must not be self to be a sibling */ 1476 return fs->pfunc != ts->pfunc; 1477 } 1478 1479 /* 1480 * Find the destination instructions for all jumps. 1481 */ 1482 static int add_jump_destinations(struct objtool_file *file) 1483 { 1484 struct instruction *insn, *jump_dest; 1485 struct reloc *reloc; 1486 struct section *dest_sec; 1487 unsigned long dest_off; 1488 int ret; 1489 1490 for_each_insn(file, insn) { 1491 struct symbol *func = insn_func(insn); 1492 1493 if (insn->jump_dest) { 1494 /* 1495 * handle_group_alt() may have previously set 1496 * 'jump_dest' for some alternatives. 1497 */ 1498 continue; 1499 } 1500 if (!is_static_jump(insn)) 1501 continue; 1502 1503 reloc = insn_reloc(file, insn); 1504 if (!reloc) { 1505 dest_sec = insn->sec; 1506 dest_off = arch_jump_destination(insn); 1507 } else if (reloc->sym->type == STT_SECTION) { 1508 dest_sec = reloc->sym->sec; 1509 dest_off = arch_dest_reloc_offset(reloc_addend(reloc)); 1510 } else if (reloc->sym->retpoline_thunk) { 1511 ret = add_retpoline_call(file, insn); 1512 if (ret) 1513 return ret; 1514 continue; 1515 } else if (reloc->sym->return_thunk) { 1516 add_return_call(file, insn, true); 1517 continue; 1518 } else if (func) { 1519 /* 1520 * External sibling call or internal sibling call with 1521 * STT_FUNC reloc. 1522 */ 1523 ret = add_call_dest(file, insn, reloc->sym, true); 1524 if (ret) 1525 return ret; 1526 continue; 1527 } else if (reloc->sym->sec->idx) { 1528 dest_sec = reloc->sym->sec; 1529 dest_off = reloc->sym->sym.st_value + 1530 arch_dest_reloc_offset(reloc_addend(reloc)); 1531 } else { 1532 /* non-func asm code jumping to another file */ 1533 continue; 1534 } 1535 1536 jump_dest = find_insn(file, dest_sec, dest_off); 1537 if (!jump_dest) { 1538 struct symbol *sym = find_symbol_by_offset(dest_sec, dest_off); 1539 1540 /* 1541 * This is a special case for retbleed_untrain_ret(). 1542 * It jumps to __x86_return_thunk(), but objtool 1543 * can't find the thunk's starting RET 1544 * instruction, because the RET is also in the 1545 * middle of another instruction. Objtool only 1546 * knows about the outer instruction. 1547 */ 1548 if (sym && sym->embedded_insn) { 1549 add_return_call(file, insn, false); 1550 continue; 1551 } 1552 1553 /* 1554 * GCOV/KCOV dead code can jump to the end of the 1555 * function/section. 1556 */ 1557 if (file->ignore_unreachables && func && 1558 dest_sec == insn->sec && 1559 dest_off == func->offset + func->len) 1560 continue; 1561 1562 ERROR_INSN(insn, "can't find jump dest instruction at %s+0x%lx", 1563 dest_sec->name, dest_off); 1564 return -1; 1565 } 1566 1567 /* 1568 * An intra-TU jump in retpoline.o might not have a relocation 1569 * for its jump dest, in which case the above 1570 * add_{retpoline,return}_call() didn't happen. 1571 */ 1572 if (jump_dest->sym && jump_dest->offset == jump_dest->sym->offset) { 1573 if (jump_dest->sym->retpoline_thunk) { 1574 ret = add_retpoline_call(file, insn); 1575 if (ret) 1576 return ret; 1577 continue; 1578 } 1579 if (jump_dest->sym->return_thunk) { 1580 add_return_call(file, insn, true); 1581 continue; 1582 } 1583 } 1584 1585 /* 1586 * Cross-function jump. 1587 */ 1588 if (func && insn_func(jump_dest) && func != insn_func(jump_dest)) { 1589 1590 /* 1591 * For GCC 8+, create parent/child links for any cold 1592 * subfunctions. This is _mostly_ redundant with a 1593 * similar initialization in read_symbols(). 1594 * 1595 * If a function has aliases, we want the *first* such 1596 * function in the symbol table to be the subfunction's 1597 * parent. In that case we overwrite the 1598 * initialization done in read_symbols(). 1599 * 1600 * However this code can't completely replace the 1601 * read_symbols() code because this doesn't detect the 1602 * case where the parent function's only reference to a 1603 * subfunction is through a jump table. 1604 */ 1605 if (!strstr(func->name, ".cold") && 1606 strstr(insn_func(jump_dest)->name, ".cold")) { 1607 func->cfunc = insn_func(jump_dest); 1608 insn_func(jump_dest)->pfunc = func; 1609 } 1610 } 1611 1612 if (jump_is_sibling_call(file, insn, jump_dest)) { 1613 /* 1614 * Internal sibling call without reloc or with 1615 * STT_SECTION reloc. 1616 */ 1617 ret = add_call_dest(file, insn, insn_func(jump_dest), true); 1618 if (ret) 1619 return ret; 1620 continue; 1621 } 1622 1623 insn->jump_dest = jump_dest; 1624 } 1625 1626 return 0; 1627 } 1628 1629 static struct symbol *find_call_destination(struct section *sec, unsigned long offset) 1630 { 1631 struct symbol *call_dest; 1632 1633 call_dest = find_func_by_offset(sec, offset); 1634 if (!call_dest) 1635 call_dest = find_symbol_by_offset(sec, offset); 1636 1637 return call_dest; 1638 } 1639 1640 /* 1641 * Find the destination instructions for all calls. 1642 */ 1643 static int add_call_destinations(struct objtool_file *file) 1644 { 1645 struct instruction *insn; 1646 unsigned long dest_off; 1647 struct symbol *dest; 1648 struct reloc *reloc; 1649 int ret; 1650 1651 for_each_insn(file, insn) { 1652 struct symbol *func = insn_func(insn); 1653 if (insn->type != INSN_CALL) 1654 continue; 1655 1656 reloc = insn_reloc(file, insn); 1657 if (!reloc) { 1658 dest_off = arch_jump_destination(insn); 1659 dest = find_call_destination(insn->sec, dest_off); 1660 1661 ret = add_call_dest(file, insn, dest, false); 1662 if (ret) 1663 return ret; 1664 1665 if (func && func->ignore) 1666 continue; 1667 1668 if (!insn_call_dest(insn)) { 1669 ERROR_INSN(insn, "unannotated intra-function call"); 1670 return -1; 1671 } 1672 1673 if (func && insn_call_dest(insn)->type != STT_FUNC) { 1674 ERROR_INSN(insn, "unsupported call to non-function"); 1675 return -1; 1676 } 1677 1678 } else if (reloc->sym->type == STT_SECTION) { 1679 dest_off = arch_dest_reloc_offset(reloc_addend(reloc)); 1680 dest = find_call_destination(reloc->sym->sec, dest_off); 1681 if (!dest) { 1682 ERROR_INSN(insn, "can't find call dest symbol at %s+0x%lx", 1683 reloc->sym->sec->name, dest_off); 1684 return -1; 1685 } 1686 1687 ret = add_call_dest(file, insn, dest, false); 1688 if (ret) 1689 return ret; 1690 1691 } else if (reloc->sym->retpoline_thunk) { 1692 ret = add_retpoline_call(file, insn); 1693 if (ret) 1694 return ret; 1695 1696 } else { 1697 ret = add_call_dest(file, insn, reloc->sym, false); 1698 if (ret) 1699 return ret; 1700 } 1701 } 1702 1703 return 0; 1704 } 1705 1706 /* 1707 * The .alternatives section requires some extra special care over and above 1708 * other special sections because alternatives are patched in place. 1709 */ 1710 static int handle_group_alt(struct objtool_file *file, 1711 struct special_alt *special_alt, 1712 struct instruction *orig_insn, 1713 struct instruction **new_insn) 1714 { 1715 struct instruction *last_new_insn = NULL, *insn, *nop = NULL; 1716 struct alt_group *orig_alt_group, *new_alt_group; 1717 unsigned long dest_off; 1718 1719 orig_alt_group = orig_insn->alt_group; 1720 if (!orig_alt_group) { 1721 struct instruction *last_orig_insn = NULL; 1722 1723 orig_alt_group = calloc(1, sizeof(*orig_alt_group)); 1724 if (!orig_alt_group) { 1725 ERROR_GLIBC("calloc"); 1726 return -1; 1727 } 1728 orig_alt_group->cfi = calloc(special_alt->orig_len, 1729 sizeof(struct cfi_state *)); 1730 if (!orig_alt_group->cfi) { 1731 ERROR_GLIBC("calloc"); 1732 return -1; 1733 } 1734 1735 insn = orig_insn; 1736 sec_for_each_insn_from(file, insn) { 1737 if (insn->offset >= special_alt->orig_off + special_alt->orig_len) 1738 break; 1739 1740 insn->alt_group = orig_alt_group; 1741 last_orig_insn = insn; 1742 } 1743 orig_alt_group->orig_group = NULL; 1744 orig_alt_group->first_insn = orig_insn; 1745 orig_alt_group->last_insn = last_orig_insn; 1746 orig_alt_group->nop = NULL; 1747 orig_alt_group->ignore = orig_insn->ignore_alts; 1748 } else { 1749 if (orig_alt_group->last_insn->offset + orig_alt_group->last_insn->len - 1750 orig_alt_group->first_insn->offset != special_alt->orig_len) { 1751 ERROR_INSN(orig_insn, "weirdly overlapping alternative! %ld != %d", 1752 orig_alt_group->last_insn->offset + 1753 orig_alt_group->last_insn->len - 1754 orig_alt_group->first_insn->offset, 1755 special_alt->orig_len); 1756 return -1; 1757 } 1758 } 1759 1760 new_alt_group = calloc(1, sizeof(*new_alt_group)); 1761 if (!new_alt_group) { 1762 ERROR_GLIBC("calloc"); 1763 return -1; 1764 } 1765 1766 if (special_alt->new_len < special_alt->orig_len) { 1767 /* 1768 * Insert a fake nop at the end to make the replacement 1769 * alt_group the same size as the original. This is needed to 1770 * allow propagate_alt_cfi() to do its magic. When the last 1771 * instruction affects the stack, the instruction after it (the 1772 * nop) will propagate the new state to the shared CFI array. 1773 */ 1774 nop = calloc(1, sizeof(*nop)); 1775 if (!nop) { 1776 ERROR_GLIBC("calloc"); 1777 return -1; 1778 } 1779 memset(nop, 0, sizeof(*nop)); 1780 1781 nop->sec = special_alt->new_sec; 1782 nop->offset = special_alt->new_off + special_alt->new_len; 1783 nop->len = special_alt->orig_len - special_alt->new_len; 1784 nop->type = INSN_NOP; 1785 nop->sym = orig_insn->sym; 1786 nop->alt_group = new_alt_group; 1787 } 1788 1789 if (!special_alt->new_len) { 1790 *new_insn = nop; 1791 goto end; 1792 } 1793 1794 insn = *new_insn; 1795 sec_for_each_insn_from(file, insn) { 1796 struct reloc *alt_reloc; 1797 1798 if (insn->offset >= special_alt->new_off + special_alt->new_len) 1799 break; 1800 1801 last_new_insn = insn; 1802 1803 insn->sym = orig_insn->sym; 1804 insn->alt_group = new_alt_group; 1805 1806 /* 1807 * Since alternative replacement code is copy/pasted by the 1808 * kernel after applying relocations, generally such code can't 1809 * have relative-address relocation references to outside the 1810 * .altinstr_replacement section, unless the arch's 1811 * alternatives code can adjust the relative offsets 1812 * accordingly. 1813 */ 1814 alt_reloc = insn_reloc(file, insn); 1815 if (alt_reloc && arch_pc_relative_reloc(alt_reloc) && 1816 !arch_support_alt_relocation(special_alt, insn, alt_reloc)) { 1817 1818 ERROR_INSN(insn, "unsupported relocation in alternatives section"); 1819 return -1; 1820 } 1821 1822 if (!is_static_jump(insn)) 1823 continue; 1824 1825 if (!insn->immediate) 1826 continue; 1827 1828 dest_off = arch_jump_destination(insn); 1829 if (dest_off == special_alt->new_off + special_alt->new_len) { 1830 insn->jump_dest = next_insn_same_sec(file, orig_alt_group->last_insn); 1831 if (!insn->jump_dest) { 1832 ERROR_INSN(insn, "can't find alternative jump destination"); 1833 return -1; 1834 } 1835 } 1836 } 1837 1838 if (!last_new_insn) { 1839 ERROR_FUNC(special_alt->new_sec, special_alt->new_off, 1840 "can't find last new alternative instruction"); 1841 return -1; 1842 } 1843 1844 end: 1845 new_alt_group->orig_group = orig_alt_group; 1846 new_alt_group->first_insn = *new_insn; 1847 new_alt_group->last_insn = last_new_insn; 1848 new_alt_group->nop = nop; 1849 new_alt_group->ignore = (*new_insn)->ignore_alts; 1850 new_alt_group->cfi = orig_alt_group->cfi; 1851 return 0; 1852 } 1853 1854 /* 1855 * A jump table entry can either convert a nop to a jump or a jump to a nop. 1856 * If the original instruction is a jump, make the alt entry an effective nop 1857 * by just skipping the original instruction. 1858 */ 1859 static int handle_jump_alt(struct objtool_file *file, 1860 struct special_alt *special_alt, 1861 struct instruction *orig_insn, 1862 struct instruction **new_insn) 1863 { 1864 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL && 1865 orig_insn->type != INSN_NOP) { 1866 1867 ERROR_INSN(orig_insn, "unsupported instruction at jump label"); 1868 return -1; 1869 } 1870 1871 if (opts.hack_jump_label && special_alt->key_addend & 2) { 1872 struct reloc *reloc = insn_reloc(file, orig_insn); 1873 1874 if (reloc) 1875 set_reloc_type(file->elf, reloc, R_NONE); 1876 1877 if (elf_write_insn(file->elf, orig_insn->sec, 1878 orig_insn->offset, orig_insn->len, 1879 arch_nop_insn(orig_insn->len))) { 1880 return -1; 1881 } 1882 1883 orig_insn->type = INSN_NOP; 1884 } 1885 1886 if (orig_insn->type == INSN_NOP) { 1887 if (orig_insn->len == 2) 1888 file->jl_nop_short++; 1889 else 1890 file->jl_nop_long++; 1891 1892 return 0; 1893 } 1894 1895 if (orig_insn->len == 2) 1896 file->jl_short++; 1897 else 1898 file->jl_long++; 1899 1900 *new_insn = next_insn_same_sec(file, orig_insn); 1901 return 0; 1902 } 1903 1904 /* 1905 * Read all the special sections which have alternate instructions which can be 1906 * patched in or redirected to at runtime. Each instruction having alternate 1907 * instruction(s) has them added to its insn->alts list, which will be 1908 * traversed in validate_branch(). 1909 */ 1910 static int add_special_section_alts(struct objtool_file *file) 1911 { 1912 struct list_head special_alts; 1913 struct instruction *orig_insn, *new_insn; 1914 struct special_alt *special_alt, *tmp; 1915 struct alternative *alt; 1916 int ret; 1917 1918 if (special_get_alts(file->elf, &special_alts)) 1919 return -1; 1920 1921 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) { 1922 1923 orig_insn = find_insn(file, special_alt->orig_sec, 1924 special_alt->orig_off); 1925 if (!orig_insn) { 1926 ERROR_FUNC(special_alt->orig_sec, special_alt->orig_off, 1927 "special: can't find orig instruction"); 1928 return -1; 1929 } 1930 1931 new_insn = NULL; 1932 if (!special_alt->group || special_alt->new_len) { 1933 new_insn = find_insn(file, special_alt->new_sec, 1934 special_alt->new_off); 1935 if (!new_insn) { 1936 ERROR_FUNC(special_alt->new_sec, special_alt->new_off, 1937 "special: can't find new instruction"); 1938 return -1; 1939 } 1940 } 1941 1942 if (special_alt->group) { 1943 if (!special_alt->orig_len) { 1944 ERROR_INSN(orig_insn, "empty alternative entry"); 1945 continue; 1946 } 1947 1948 ret = handle_group_alt(file, special_alt, orig_insn, 1949 &new_insn); 1950 if (ret) 1951 return ret; 1952 1953 } else if (special_alt->jump_or_nop) { 1954 ret = handle_jump_alt(file, special_alt, orig_insn, 1955 &new_insn); 1956 if (ret) 1957 return ret; 1958 } 1959 1960 alt = calloc(1, sizeof(*alt)); 1961 if (!alt) { 1962 ERROR_GLIBC("calloc"); 1963 return -1; 1964 } 1965 1966 alt->insn = new_insn; 1967 alt->next = orig_insn->alts; 1968 orig_insn->alts = alt; 1969 1970 list_del(&special_alt->list); 1971 free(special_alt); 1972 } 1973 1974 if (opts.stats) { 1975 printf("jl\\\tNOP\tJMP\n"); 1976 printf("short:\t%ld\t%ld\n", file->jl_nop_short, file->jl_short); 1977 printf("long:\t%ld\t%ld\n", file->jl_nop_long, file->jl_long); 1978 } 1979 1980 return 0; 1981 } 1982 1983 __weak unsigned long arch_jump_table_sym_offset(struct reloc *reloc, struct reloc *table) 1984 { 1985 return reloc->sym->offset + reloc_addend(reloc); 1986 } 1987 1988 static int add_jump_table(struct objtool_file *file, struct instruction *insn) 1989 { 1990 unsigned long table_size = insn_jump_table_size(insn); 1991 struct symbol *pfunc = insn_func(insn)->pfunc; 1992 struct reloc *table = insn_jump_table(insn); 1993 struct instruction *dest_insn; 1994 unsigned int prev_offset = 0; 1995 struct reloc *reloc = table; 1996 struct alternative *alt; 1997 unsigned long sym_offset; 1998 1999 /* 2000 * Each @reloc is a switch table relocation which points to the target 2001 * instruction. 2002 */ 2003 for_each_reloc_from(table->sec, reloc) { 2004 2005 /* Check for the end of the table: */ 2006 if (table_size && reloc_offset(reloc) - reloc_offset(table) >= table_size) 2007 break; 2008 if (reloc != table && is_jump_table(reloc)) 2009 break; 2010 2011 /* Make sure the table entries are consecutive: */ 2012 if (prev_offset && reloc_offset(reloc) != prev_offset + arch_reloc_size(reloc)) 2013 break; 2014 2015 sym_offset = arch_jump_table_sym_offset(reloc, table); 2016 2017 /* Detect function pointers from contiguous objects: */ 2018 if (reloc->sym->sec == pfunc->sec && sym_offset == pfunc->offset) 2019 break; 2020 2021 /* 2022 * Clang sometimes leaves dangling unused jump table entries 2023 * which point to the end of the function. Ignore them. 2024 */ 2025 if (reloc->sym->sec == pfunc->sec && 2026 sym_offset == pfunc->offset + pfunc->len) 2027 goto next; 2028 2029 dest_insn = find_insn(file, reloc->sym->sec, sym_offset); 2030 if (!dest_insn) 2031 break; 2032 2033 /* Make sure the destination is in the same function: */ 2034 if (!insn_func(dest_insn) || insn_func(dest_insn)->pfunc != pfunc) 2035 break; 2036 2037 alt = calloc(1, sizeof(*alt)); 2038 if (!alt) { 2039 ERROR_GLIBC("calloc"); 2040 return -1; 2041 } 2042 2043 alt->insn = dest_insn; 2044 alt->next = insn->alts; 2045 insn->alts = alt; 2046 next: 2047 prev_offset = reloc_offset(reloc); 2048 } 2049 2050 if (!prev_offset) { 2051 ERROR_INSN(insn, "can't find switch jump table"); 2052 return -1; 2053 } 2054 2055 return 0; 2056 } 2057 2058 /* 2059 * find_jump_table() - Given a dynamic jump, find the switch jump table 2060 * associated with it. 2061 */ 2062 static void find_jump_table(struct objtool_file *file, struct symbol *func, 2063 struct instruction *insn) 2064 { 2065 struct reloc *table_reloc; 2066 struct instruction *dest_insn, *orig_insn = insn; 2067 unsigned long table_size; 2068 unsigned long sym_offset; 2069 2070 /* 2071 * Backward search using the @first_jump_src links, these help avoid 2072 * much of the 'in between' code. Which avoids us getting confused by 2073 * it. 2074 */ 2075 for (; 2076 insn && insn_func(insn) && insn_func(insn)->pfunc == func; 2077 insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) { 2078 2079 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC) 2080 break; 2081 2082 /* allow small jumps within the range */ 2083 if (insn->type == INSN_JUMP_UNCONDITIONAL && 2084 insn->jump_dest && 2085 (insn->jump_dest->offset <= insn->offset || 2086 insn->jump_dest->offset > orig_insn->offset)) 2087 break; 2088 2089 table_reloc = arch_find_switch_table(file, insn, &table_size); 2090 if (!table_reloc) 2091 continue; 2092 2093 sym_offset = table_reloc->sym->offset + reloc_addend(table_reloc); 2094 2095 dest_insn = find_insn(file, table_reloc->sym->sec, sym_offset); 2096 if (!dest_insn || !insn_func(dest_insn) || insn_func(dest_insn)->pfunc != func) 2097 continue; 2098 2099 set_jump_table(table_reloc); 2100 orig_insn->_jump_table = table_reloc; 2101 orig_insn->_jump_table_size = table_size; 2102 2103 break; 2104 } 2105 } 2106 2107 /* 2108 * First pass: Mark the head of each jump table so that in the next pass, 2109 * we know when a given jump table ends and the next one starts. 2110 */ 2111 static void mark_func_jump_tables(struct objtool_file *file, 2112 struct symbol *func) 2113 { 2114 struct instruction *insn, *last = NULL; 2115 2116 func_for_each_insn(file, func, insn) { 2117 if (!last) 2118 last = insn; 2119 2120 /* 2121 * Store back-pointers for unconditional forward jumps such 2122 * that find_jump_table() can back-track using those and 2123 * avoid some potentially confusing code. 2124 */ 2125 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest && 2126 insn->offset > last->offset && 2127 insn->jump_dest->offset > insn->offset && 2128 !insn->jump_dest->first_jump_src) { 2129 2130 insn->jump_dest->first_jump_src = insn; 2131 last = insn->jump_dest; 2132 } 2133 2134 if (insn->type != INSN_JUMP_DYNAMIC) 2135 continue; 2136 2137 find_jump_table(file, func, insn); 2138 } 2139 } 2140 2141 static int add_func_jump_tables(struct objtool_file *file, 2142 struct symbol *func) 2143 { 2144 struct instruction *insn; 2145 int ret; 2146 2147 func_for_each_insn(file, func, insn) { 2148 if (!insn_jump_table(insn)) 2149 continue; 2150 2151 ret = add_jump_table(file, insn); 2152 if (ret) 2153 return ret; 2154 } 2155 2156 return 0; 2157 } 2158 2159 /* 2160 * For some switch statements, gcc generates a jump table in the .rodata 2161 * section which contains a list of addresses within the function to jump to. 2162 * This finds these jump tables and adds them to the insn->alts lists. 2163 */ 2164 static int add_jump_table_alts(struct objtool_file *file) 2165 { 2166 struct symbol *func; 2167 int ret; 2168 2169 if (!file->rodata) 2170 return 0; 2171 2172 for_each_sym(file, func) { 2173 if (func->type != STT_FUNC) 2174 continue; 2175 2176 mark_func_jump_tables(file, func); 2177 ret = add_func_jump_tables(file, func); 2178 if (ret) 2179 return ret; 2180 } 2181 2182 return 0; 2183 } 2184 2185 static void set_func_state(struct cfi_state *state) 2186 { 2187 state->cfa = initial_func_cfi.cfa; 2188 memcpy(&state->regs, &initial_func_cfi.regs, 2189 CFI_NUM_REGS * sizeof(struct cfi_reg)); 2190 state->stack_size = initial_func_cfi.cfa.offset; 2191 state->type = UNWIND_HINT_TYPE_CALL; 2192 } 2193 2194 static int read_unwind_hints(struct objtool_file *file) 2195 { 2196 struct cfi_state cfi = init_cfi; 2197 struct section *sec; 2198 struct unwind_hint *hint; 2199 struct instruction *insn; 2200 struct reloc *reloc; 2201 unsigned long offset; 2202 int i; 2203 2204 sec = find_section_by_name(file->elf, ".discard.unwind_hints"); 2205 if (!sec) 2206 return 0; 2207 2208 if (!sec->rsec) { 2209 ERROR("missing .rela.discard.unwind_hints section"); 2210 return -1; 2211 } 2212 2213 if (sec->sh.sh_size % sizeof(struct unwind_hint)) { 2214 ERROR("struct unwind_hint size mismatch"); 2215 return -1; 2216 } 2217 2218 file->hints = true; 2219 2220 for (i = 0; i < sec->sh.sh_size / sizeof(struct unwind_hint); i++) { 2221 hint = (struct unwind_hint *)sec->data->d_buf + i; 2222 2223 reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint)); 2224 if (!reloc) { 2225 ERROR("can't find reloc for unwind_hints[%d]", i); 2226 return -1; 2227 } 2228 2229 if (reloc->sym->type == STT_SECTION) { 2230 offset = reloc_addend(reloc); 2231 } else if (reloc->sym->local_label) { 2232 offset = reloc->sym->offset; 2233 } else { 2234 ERROR("unexpected relocation symbol type in %s", sec->rsec->name); 2235 return -1; 2236 } 2237 2238 insn = find_insn(file, reloc->sym->sec, offset); 2239 if (!insn) { 2240 ERROR("can't find insn for unwind_hints[%d]", i); 2241 return -1; 2242 } 2243 2244 insn->hint = true; 2245 2246 if (hint->type == UNWIND_HINT_TYPE_UNDEFINED) { 2247 insn->cfi = &force_undefined_cfi; 2248 continue; 2249 } 2250 2251 if (hint->type == UNWIND_HINT_TYPE_SAVE) { 2252 insn->hint = false; 2253 insn->save = true; 2254 continue; 2255 } 2256 2257 if (hint->type == UNWIND_HINT_TYPE_RESTORE) { 2258 insn->restore = true; 2259 continue; 2260 } 2261 2262 if (hint->type == UNWIND_HINT_TYPE_REGS_PARTIAL) { 2263 struct symbol *sym = find_symbol_by_offset(insn->sec, insn->offset); 2264 2265 if (sym && sym->bind == STB_GLOBAL) { 2266 if (opts.ibt && insn->type != INSN_ENDBR && !insn->noendbr) { 2267 ERROR_INSN(insn, "UNWIND_HINT_IRET_REGS without ENDBR"); 2268 return -1; 2269 } 2270 } 2271 } 2272 2273 if (hint->type == UNWIND_HINT_TYPE_FUNC) { 2274 insn->cfi = &func_cfi; 2275 continue; 2276 } 2277 2278 if (insn->cfi) 2279 cfi = *(insn->cfi); 2280 2281 if (arch_decode_hint_reg(hint->sp_reg, &cfi.cfa.base)) { 2282 ERROR_INSN(insn, "unsupported unwind_hint sp base reg %d", hint->sp_reg); 2283 return -1; 2284 } 2285 2286 cfi.cfa.offset = bswap_if_needed(file->elf, hint->sp_offset); 2287 cfi.type = hint->type; 2288 cfi.signal = hint->signal; 2289 2290 insn->cfi = cfi_hash_find_or_add(&cfi); 2291 } 2292 2293 return 0; 2294 } 2295 2296 static int read_annotate(struct objtool_file *file, 2297 int (*func)(struct objtool_file *file, int type, struct instruction *insn)) 2298 { 2299 struct section *sec; 2300 struct instruction *insn; 2301 struct reloc *reloc; 2302 uint64_t offset; 2303 int type, ret; 2304 2305 sec = find_section_by_name(file->elf, ".discard.annotate_insn"); 2306 if (!sec) 2307 return 0; 2308 2309 if (!sec->rsec) 2310 return 0; 2311 2312 if (sec->sh.sh_entsize != 8) { 2313 static bool warned = false; 2314 if (!warned && opts.verbose) { 2315 WARN("%s: dodgy linker, sh_entsize != 8", sec->name); 2316 warned = true; 2317 } 2318 sec->sh.sh_entsize = 8; 2319 } 2320 2321 for_each_reloc(sec->rsec, reloc) { 2322 type = *(u32 *)(sec->data->d_buf + (reloc_idx(reloc) * sec->sh.sh_entsize) + 4); 2323 type = bswap_if_needed(file->elf, type); 2324 2325 offset = reloc->sym->offset + reloc_addend(reloc); 2326 insn = find_insn(file, reloc->sym->sec, offset); 2327 2328 if (!insn) { 2329 ERROR("bad .discard.annotate_insn entry: %d of type %d", reloc_idx(reloc), type); 2330 return -1; 2331 } 2332 2333 ret = func(file, type, insn); 2334 if (ret < 0) 2335 return ret; 2336 } 2337 2338 return 0; 2339 } 2340 2341 static int __annotate_early(struct objtool_file *file, int type, struct instruction *insn) 2342 { 2343 switch (type) { 2344 2345 /* Must be before add_special_section_alts() */ 2346 case ANNOTYPE_IGNORE_ALTS: 2347 insn->ignore_alts = true; 2348 break; 2349 2350 /* 2351 * Must be before read_unwind_hints() since that needs insn->noendbr. 2352 */ 2353 case ANNOTYPE_NOENDBR: 2354 insn->noendbr = 1; 2355 break; 2356 2357 default: 2358 break; 2359 } 2360 2361 return 0; 2362 } 2363 2364 static int __annotate_ifc(struct objtool_file *file, int type, struct instruction *insn) 2365 { 2366 unsigned long dest_off; 2367 2368 if (type != ANNOTYPE_INTRA_FUNCTION_CALL) 2369 return 0; 2370 2371 if (insn->type != INSN_CALL) { 2372 ERROR_INSN(insn, "intra_function_call not a direct call"); 2373 return -1; 2374 } 2375 2376 /* 2377 * Treat intra-function CALLs as JMPs, but with a stack_op. 2378 * See add_call_destinations(), which strips stack_ops from 2379 * normal CALLs. 2380 */ 2381 insn->type = INSN_JUMP_UNCONDITIONAL; 2382 2383 dest_off = arch_jump_destination(insn); 2384 insn->jump_dest = find_insn(file, insn->sec, dest_off); 2385 if (!insn->jump_dest) { 2386 ERROR_INSN(insn, "can't find call dest at %s+0x%lx", 2387 insn->sec->name, dest_off); 2388 return -1; 2389 } 2390 2391 return 0; 2392 } 2393 2394 static int __annotate_late(struct objtool_file *file, int type, struct instruction *insn) 2395 { 2396 struct symbol *sym; 2397 2398 switch (type) { 2399 case ANNOTYPE_NOENDBR: 2400 /* early */ 2401 break; 2402 2403 case ANNOTYPE_RETPOLINE_SAFE: 2404 if (insn->type != INSN_JUMP_DYNAMIC && 2405 insn->type != INSN_CALL_DYNAMIC && 2406 insn->type != INSN_RETURN && 2407 insn->type != INSN_NOP) { 2408 ERROR_INSN(insn, "retpoline_safe hint not an indirect jump/call/ret/nop"); 2409 return -1; 2410 } 2411 2412 insn->retpoline_safe = true; 2413 break; 2414 2415 case ANNOTYPE_INSTR_BEGIN: 2416 insn->instr++; 2417 break; 2418 2419 case ANNOTYPE_INSTR_END: 2420 insn->instr--; 2421 break; 2422 2423 case ANNOTYPE_UNRET_BEGIN: 2424 insn->unret = 1; 2425 break; 2426 2427 case ANNOTYPE_IGNORE_ALTS: 2428 /* early */ 2429 break; 2430 2431 case ANNOTYPE_INTRA_FUNCTION_CALL: 2432 /* ifc */ 2433 break; 2434 2435 case ANNOTYPE_REACHABLE: 2436 insn->dead_end = false; 2437 break; 2438 2439 case ANNOTYPE_NOCFI: 2440 sym = insn->sym; 2441 if (!sym) { 2442 ERROR_INSN(insn, "dodgy NOCFI annotation"); 2443 return -1; 2444 } 2445 insn->sym->nocfi = 1; 2446 break; 2447 2448 default: 2449 ERROR_INSN(insn, "Unknown annotation type: %d", type); 2450 return -1; 2451 } 2452 2453 return 0; 2454 } 2455 2456 /* 2457 * Return true if name matches an instrumentation function, where calls to that 2458 * function from noinstr code can safely be removed, but compilers won't do so. 2459 */ 2460 static bool is_profiling_func(const char *name) 2461 { 2462 /* 2463 * Many compilers cannot disable KCOV with a function attribute. 2464 */ 2465 if (!strncmp(name, "__sanitizer_cov_", 16)) 2466 return true; 2467 2468 return false; 2469 } 2470 2471 static int classify_symbols(struct objtool_file *file) 2472 { 2473 struct symbol *func; 2474 2475 for_each_sym(file, func) { 2476 if (func->type == STT_NOTYPE && strstarts(func->name, ".L")) 2477 func->local_label = true; 2478 2479 if (func->bind != STB_GLOBAL) 2480 continue; 2481 2482 if (!strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR, 2483 strlen(STATIC_CALL_TRAMP_PREFIX_STR))) 2484 func->static_call_tramp = true; 2485 2486 if (arch_is_retpoline(func)) 2487 func->retpoline_thunk = true; 2488 2489 if (arch_is_rethunk(func)) 2490 func->return_thunk = true; 2491 2492 if (arch_is_embedded_insn(func)) 2493 func->embedded_insn = true; 2494 2495 if (arch_ftrace_match(func->name)) 2496 func->fentry = true; 2497 2498 if (is_profiling_func(func->name)) 2499 func->profiling_func = true; 2500 } 2501 2502 return 0; 2503 } 2504 2505 static void mark_rodata(struct objtool_file *file) 2506 { 2507 struct section *sec; 2508 bool found = false; 2509 2510 /* 2511 * Search for the following rodata sections, each of which can 2512 * potentially contain jump tables: 2513 * 2514 * - .rodata: can contain GCC switch tables 2515 * - .rodata.<func>: same, if -fdata-sections is being used 2516 * - .data.rel.ro.c_jump_table: contains C annotated jump tables 2517 * 2518 * .rodata.str1.* sections are ignored; they don't contain jump tables. 2519 */ 2520 for_each_sec(file, sec) { 2521 if ((!strncmp(sec->name, ".rodata", 7) && 2522 !strstr(sec->name, ".str1.")) || 2523 !strncmp(sec->name, ".data.rel.ro", 12)) { 2524 sec->rodata = true; 2525 found = true; 2526 } 2527 } 2528 2529 file->rodata = found; 2530 } 2531 2532 static int decode_sections(struct objtool_file *file) 2533 { 2534 int ret; 2535 2536 mark_rodata(file); 2537 2538 ret = init_pv_ops(file); 2539 if (ret) 2540 return ret; 2541 2542 /* 2543 * Must be before add_{jump_call}_destination. 2544 */ 2545 ret = classify_symbols(file); 2546 if (ret) 2547 return ret; 2548 2549 ret = decode_instructions(file); 2550 if (ret) 2551 return ret; 2552 2553 ret = add_ignores(file); 2554 if (ret) 2555 return ret; 2556 2557 add_uaccess_safe(file); 2558 2559 ret = read_annotate(file, __annotate_early); 2560 if (ret) 2561 return ret; 2562 2563 /* 2564 * Must be before add_jump_destinations(), which depends on 'func' 2565 * being set for alternatives, to enable proper sibling call detection. 2566 */ 2567 if (opts.stackval || opts.orc || opts.uaccess || opts.noinstr) { 2568 ret = add_special_section_alts(file); 2569 if (ret) 2570 return ret; 2571 } 2572 2573 ret = add_jump_destinations(file); 2574 if (ret) 2575 return ret; 2576 2577 /* 2578 * Must be before add_call_destination(); it changes INSN_CALL to 2579 * INSN_JUMP. 2580 */ 2581 ret = read_annotate(file, __annotate_ifc); 2582 if (ret) 2583 return ret; 2584 2585 ret = add_call_destinations(file); 2586 if (ret) 2587 return ret; 2588 2589 ret = add_jump_table_alts(file); 2590 if (ret) 2591 return ret; 2592 2593 ret = read_unwind_hints(file); 2594 if (ret) 2595 return ret; 2596 2597 /* 2598 * Must be after add_call_destinations() such that it can override 2599 * dead_end_function() marks. 2600 */ 2601 ret = read_annotate(file, __annotate_late); 2602 if (ret) 2603 return ret; 2604 2605 return 0; 2606 } 2607 2608 static bool is_special_call(struct instruction *insn) 2609 { 2610 if (insn->type == INSN_CALL) { 2611 struct symbol *dest = insn_call_dest(insn); 2612 2613 if (!dest) 2614 return false; 2615 2616 if (dest->fentry || dest->embedded_insn) 2617 return true; 2618 } 2619 2620 return false; 2621 } 2622 2623 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state) 2624 { 2625 struct cfi_state *cfi = &state->cfi; 2626 int i; 2627 2628 if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap) 2629 return true; 2630 2631 if (cfi->cfa.offset != initial_func_cfi.cfa.offset) 2632 return true; 2633 2634 if (cfi->stack_size != initial_func_cfi.cfa.offset) 2635 return true; 2636 2637 for (i = 0; i < CFI_NUM_REGS; i++) { 2638 if (cfi->regs[i].base != initial_func_cfi.regs[i].base || 2639 cfi->regs[i].offset != initial_func_cfi.regs[i].offset) 2640 return true; 2641 } 2642 2643 return false; 2644 } 2645 2646 static bool check_reg_frame_pos(const struct cfi_reg *reg, 2647 int expected_offset) 2648 { 2649 return reg->base == CFI_CFA && 2650 reg->offset == expected_offset; 2651 } 2652 2653 static bool has_valid_stack_frame(struct insn_state *state) 2654 { 2655 struct cfi_state *cfi = &state->cfi; 2656 2657 if (cfi->cfa.base == CFI_BP && 2658 check_reg_frame_pos(&cfi->regs[CFI_BP], -cfi->cfa.offset) && 2659 check_reg_frame_pos(&cfi->regs[CFI_RA], -cfi->cfa.offset + 8)) 2660 return true; 2661 2662 if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP) 2663 return true; 2664 2665 return false; 2666 } 2667 2668 static int update_cfi_state_regs(struct instruction *insn, 2669 struct cfi_state *cfi, 2670 struct stack_op *op) 2671 { 2672 struct cfi_reg *cfa = &cfi->cfa; 2673 2674 if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT) 2675 return 0; 2676 2677 /* push */ 2678 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF) 2679 cfa->offset += 8; 2680 2681 /* pop */ 2682 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF) 2683 cfa->offset -= 8; 2684 2685 /* add immediate to sp */ 2686 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD && 2687 op->dest.reg == CFI_SP && op->src.reg == CFI_SP) 2688 cfa->offset -= op->src.offset; 2689 2690 return 0; 2691 } 2692 2693 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset) 2694 { 2695 if (arch_callee_saved_reg(reg) && 2696 cfi->regs[reg].base == CFI_UNDEFINED) { 2697 cfi->regs[reg].base = base; 2698 cfi->regs[reg].offset = offset; 2699 } 2700 } 2701 2702 static void restore_reg(struct cfi_state *cfi, unsigned char reg) 2703 { 2704 cfi->regs[reg].base = initial_func_cfi.regs[reg].base; 2705 cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset; 2706 } 2707 2708 /* 2709 * A note about DRAP stack alignment: 2710 * 2711 * GCC has the concept of a DRAP register, which is used to help keep track of 2712 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP 2713 * register. The typical DRAP pattern is: 2714 * 2715 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10 2716 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp 2717 * 41 ff 72 f8 pushq -0x8(%r10) 2718 * 55 push %rbp 2719 * 48 89 e5 mov %rsp,%rbp 2720 * (more pushes) 2721 * 41 52 push %r10 2722 * ... 2723 * 41 5a pop %r10 2724 * (more pops) 2725 * 5d pop %rbp 2726 * 49 8d 62 f8 lea -0x8(%r10),%rsp 2727 * c3 retq 2728 * 2729 * There are some variations in the epilogues, like: 2730 * 2731 * 5b pop %rbx 2732 * 41 5a pop %r10 2733 * 41 5c pop %r12 2734 * 41 5d pop %r13 2735 * 41 5e pop %r14 2736 * c9 leaveq 2737 * 49 8d 62 f8 lea -0x8(%r10),%rsp 2738 * c3 retq 2739 * 2740 * and: 2741 * 2742 * 4c 8b 55 e8 mov -0x18(%rbp),%r10 2743 * 48 8b 5d e0 mov -0x20(%rbp),%rbx 2744 * 4c 8b 65 f0 mov -0x10(%rbp),%r12 2745 * 4c 8b 6d f8 mov -0x8(%rbp),%r13 2746 * c9 leaveq 2747 * 49 8d 62 f8 lea -0x8(%r10),%rsp 2748 * c3 retq 2749 * 2750 * Sometimes r13 is used as the DRAP register, in which case it's saved and 2751 * restored beforehand: 2752 * 2753 * 41 55 push %r13 2754 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13 2755 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp 2756 * ... 2757 * 49 8d 65 f0 lea -0x10(%r13),%rsp 2758 * 41 5d pop %r13 2759 * c3 retq 2760 */ 2761 static int update_cfi_state(struct instruction *insn, 2762 struct instruction *next_insn, 2763 struct cfi_state *cfi, struct stack_op *op) 2764 { 2765 struct cfi_reg *cfa = &cfi->cfa; 2766 struct cfi_reg *regs = cfi->regs; 2767 2768 /* ignore UNWIND_HINT_UNDEFINED regions */ 2769 if (cfi->force_undefined) 2770 return 0; 2771 2772 /* stack operations don't make sense with an undefined CFA */ 2773 if (cfa->base == CFI_UNDEFINED) { 2774 if (insn_func(insn)) { 2775 WARN_INSN(insn, "undefined stack state"); 2776 return 1; 2777 } 2778 return 0; 2779 } 2780 2781 if (cfi->type == UNWIND_HINT_TYPE_REGS || 2782 cfi->type == UNWIND_HINT_TYPE_REGS_PARTIAL) 2783 return update_cfi_state_regs(insn, cfi, op); 2784 2785 switch (op->dest.type) { 2786 2787 case OP_DEST_REG: 2788 switch (op->src.type) { 2789 2790 case OP_SRC_REG: 2791 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP && 2792 cfa->base == CFI_SP && 2793 check_reg_frame_pos(®s[CFI_BP], -cfa->offset)) { 2794 2795 /* mov %rsp, %rbp */ 2796 cfa->base = op->dest.reg; 2797 cfi->bp_scratch = false; 2798 } 2799 2800 else if (op->src.reg == CFI_SP && 2801 op->dest.reg == CFI_BP && cfi->drap) { 2802 2803 /* drap: mov %rsp, %rbp */ 2804 regs[CFI_BP].base = CFI_BP; 2805 regs[CFI_BP].offset = -cfi->stack_size; 2806 cfi->bp_scratch = false; 2807 } 2808 2809 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 2810 2811 /* 2812 * mov %rsp, %reg 2813 * 2814 * This is needed for the rare case where GCC 2815 * does: 2816 * 2817 * mov %rsp, %rax 2818 * ... 2819 * mov %rax, %rsp 2820 */ 2821 cfi->vals[op->dest.reg].base = CFI_CFA; 2822 cfi->vals[op->dest.reg].offset = -cfi->stack_size; 2823 } 2824 2825 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP && 2826 (cfa->base == CFI_BP || cfa->base == cfi->drap_reg)) { 2827 2828 /* 2829 * mov %rbp, %rsp 2830 * 2831 * Restore the original stack pointer (Clang). 2832 */ 2833 cfi->stack_size = -cfi->regs[CFI_BP].offset; 2834 } 2835 2836 else if (op->dest.reg == cfa->base) { 2837 2838 /* mov %reg, %rsp */ 2839 if (cfa->base == CFI_SP && 2840 cfi->vals[op->src.reg].base == CFI_CFA) { 2841 2842 /* 2843 * This is needed for the rare case 2844 * where GCC does something dumb like: 2845 * 2846 * lea 0x8(%rsp), %rcx 2847 * ... 2848 * mov %rcx, %rsp 2849 */ 2850 cfa->offset = -cfi->vals[op->src.reg].offset; 2851 cfi->stack_size = cfa->offset; 2852 2853 } else if (cfa->base == CFI_SP && 2854 cfi->vals[op->src.reg].base == CFI_SP_INDIRECT && 2855 cfi->vals[op->src.reg].offset == cfa->offset) { 2856 2857 /* 2858 * Stack swizzle: 2859 * 2860 * 1: mov %rsp, (%[tos]) 2861 * 2: mov %[tos], %rsp 2862 * ... 2863 * 3: pop %rsp 2864 * 2865 * Where: 2866 * 2867 * 1 - places a pointer to the previous 2868 * stack at the Top-of-Stack of the 2869 * new stack. 2870 * 2871 * 2 - switches to the new stack. 2872 * 2873 * 3 - pops the Top-of-Stack to restore 2874 * the original stack. 2875 * 2876 * Note: we set base to SP_INDIRECT 2877 * here and preserve offset. Therefore 2878 * when the unwinder reaches ToS it 2879 * will dereference SP and then add the 2880 * offset to find the next frame, IOW: 2881 * (%rsp) + offset. 2882 */ 2883 cfa->base = CFI_SP_INDIRECT; 2884 2885 } else { 2886 cfa->base = CFI_UNDEFINED; 2887 cfa->offset = 0; 2888 } 2889 } 2890 2891 else if (op->dest.reg == CFI_SP && 2892 cfi->vals[op->src.reg].base == CFI_SP_INDIRECT && 2893 cfi->vals[op->src.reg].offset == cfa->offset) { 2894 2895 /* 2896 * The same stack swizzle case 2) as above. But 2897 * because we can't change cfa->base, case 3) 2898 * will become a regular POP. Pretend we're a 2899 * PUSH so things don't go unbalanced. 2900 */ 2901 cfi->stack_size += 8; 2902 } 2903 2904 2905 break; 2906 2907 case OP_SRC_ADD: 2908 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) { 2909 2910 /* add imm, %rsp */ 2911 cfi->stack_size -= op->src.offset; 2912 if (cfa->base == CFI_SP) 2913 cfa->offset -= op->src.offset; 2914 break; 2915 } 2916 2917 if (op->dest.reg == CFI_BP && op->src.reg == CFI_SP && 2918 insn->sym->frame_pointer) { 2919 /* addi.d fp,sp,imm on LoongArch */ 2920 if (cfa->base == CFI_SP && cfa->offset == op->src.offset) { 2921 cfa->base = CFI_BP; 2922 cfa->offset = 0; 2923 } 2924 break; 2925 } 2926 2927 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) { 2928 /* addi.d sp,fp,imm on LoongArch */ 2929 if (cfa->base == CFI_BP && cfa->offset == 0) { 2930 if (insn->sym->frame_pointer) { 2931 cfa->base = CFI_SP; 2932 cfa->offset = -op->src.offset; 2933 } 2934 } else { 2935 /* lea disp(%rbp), %rsp */ 2936 cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset); 2937 } 2938 break; 2939 } 2940 2941 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 2942 2943 /* drap: lea disp(%rsp), %drap */ 2944 cfi->drap_reg = op->dest.reg; 2945 2946 /* 2947 * lea disp(%rsp), %reg 2948 * 2949 * This is needed for the rare case where GCC 2950 * does something dumb like: 2951 * 2952 * lea 0x8(%rsp), %rcx 2953 * ... 2954 * mov %rcx, %rsp 2955 */ 2956 cfi->vals[op->dest.reg].base = CFI_CFA; 2957 cfi->vals[op->dest.reg].offset = \ 2958 -cfi->stack_size + op->src.offset; 2959 2960 break; 2961 } 2962 2963 if (cfi->drap && op->dest.reg == CFI_SP && 2964 op->src.reg == cfi->drap_reg) { 2965 2966 /* drap: lea disp(%drap), %rsp */ 2967 cfa->base = CFI_SP; 2968 cfa->offset = cfi->stack_size = -op->src.offset; 2969 cfi->drap_reg = CFI_UNDEFINED; 2970 cfi->drap = false; 2971 break; 2972 } 2973 2974 if (op->dest.reg == cfi->cfa.base && !(next_insn && next_insn->hint)) { 2975 WARN_INSN(insn, "unsupported stack register modification"); 2976 return -1; 2977 } 2978 2979 break; 2980 2981 case OP_SRC_AND: 2982 if (op->dest.reg != CFI_SP || 2983 (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) || 2984 (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) { 2985 WARN_INSN(insn, "unsupported stack pointer realignment"); 2986 return -1; 2987 } 2988 2989 if (cfi->drap_reg != CFI_UNDEFINED) { 2990 /* drap: and imm, %rsp */ 2991 cfa->base = cfi->drap_reg; 2992 cfa->offset = cfi->stack_size = 0; 2993 cfi->drap = true; 2994 } 2995 2996 /* 2997 * Older versions of GCC (4.8ish) realign the stack 2998 * without DRAP, with a frame pointer. 2999 */ 3000 3001 break; 3002 3003 case OP_SRC_POP: 3004 case OP_SRC_POPF: 3005 if (op->dest.reg == CFI_SP && cfa->base == CFI_SP_INDIRECT) { 3006 3007 /* pop %rsp; # restore from a stack swizzle */ 3008 cfa->base = CFI_SP; 3009 break; 3010 } 3011 3012 if (!cfi->drap && op->dest.reg == cfa->base) { 3013 3014 /* pop %rbp */ 3015 cfa->base = CFI_SP; 3016 } 3017 3018 if (cfi->drap && cfa->base == CFI_BP_INDIRECT && 3019 op->dest.reg == cfi->drap_reg && 3020 cfi->drap_offset == -cfi->stack_size) { 3021 3022 /* drap: pop %drap */ 3023 cfa->base = cfi->drap_reg; 3024 cfa->offset = 0; 3025 cfi->drap_offset = -1; 3026 3027 } else if (cfi->stack_size == -regs[op->dest.reg].offset) { 3028 3029 /* pop %reg */ 3030 restore_reg(cfi, op->dest.reg); 3031 } 3032 3033 cfi->stack_size -= 8; 3034 if (cfa->base == CFI_SP) 3035 cfa->offset -= 8; 3036 3037 break; 3038 3039 case OP_SRC_REG_INDIRECT: 3040 if (!cfi->drap && op->dest.reg == cfa->base && 3041 op->dest.reg == CFI_BP) { 3042 3043 /* mov disp(%rsp), %rbp */ 3044 cfa->base = CFI_SP; 3045 cfa->offset = cfi->stack_size; 3046 } 3047 3048 if (cfi->drap && op->src.reg == CFI_BP && 3049 op->src.offset == cfi->drap_offset) { 3050 3051 /* drap: mov disp(%rbp), %drap */ 3052 cfa->base = cfi->drap_reg; 3053 cfa->offset = 0; 3054 cfi->drap_offset = -1; 3055 } 3056 3057 if (cfi->drap && op->src.reg == CFI_BP && 3058 op->src.offset == regs[op->dest.reg].offset) { 3059 3060 /* drap: mov disp(%rbp), %reg */ 3061 restore_reg(cfi, op->dest.reg); 3062 3063 } else if (op->src.reg == cfa->base && 3064 op->src.offset == regs[op->dest.reg].offset + cfa->offset) { 3065 3066 /* mov disp(%rbp), %reg */ 3067 /* mov disp(%rsp), %reg */ 3068 restore_reg(cfi, op->dest.reg); 3069 3070 } else if (op->src.reg == CFI_SP && 3071 op->src.offset == regs[op->dest.reg].offset + cfi->stack_size) { 3072 3073 /* mov disp(%rsp), %reg */ 3074 restore_reg(cfi, op->dest.reg); 3075 } 3076 3077 break; 3078 3079 default: 3080 WARN_INSN(insn, "unknown stack-related instruction"); 3081 return -1; 3082 } 3083 3084 break; 3085 3086 case OP_DEST_PUSH: 3087 case OP_DEST_PUSHF: 3088 cfi->stack_size += 8; 3089 if (cfa->base == CFI_SP) 3090 cfa->offset += 8; 3091 3092 if (op->src.type != OP_SRC_REG) 3093 break; 3094 3095 if (cfi->drap) { 3096 if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) { 3097 3098 /* drap: push %drap */ 3099 cfa->base = CFI_BP_INDIRECT; 3100 cfa->offset = -cfi->stack_size; 3101 3102 /* save drap so we know when to restore it */ 3103 cfi->drap_offset = -cfi->stack_size; 3104 3105 } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) { 3106 3107 /* drap: push %rbp */ 3108 cfi->stack_size = 0; 3109 3110 } else { 3111 3112 /* drap: push %reg */ 3113 save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size); 3114 } 3115 3116 } else { 3117 3118 /* push %reg */ 3119 save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size); 3120 } 3121 3122 /* detect when asm code uses rbp as a scratch register */ 3123 if (opts.stackval && insn_func(insn) && op->src.reg == CFI_BP && 3124 cfa->base != CFI_BP) 3125 cfi->bp_scratch = true; 3126 break; 3127 3128 case OP_DEST_REG_INDIRECT: 3129 3130 if (cfi->drap) { 3131 if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) { 3132 3133 /* drap: mov %drap, disp(%rbp) */ 3134 cfa->base = CFI_BP_INDIRECT; 3135 cfa->offset = op->dest.offset; 3136 3137 /* save drap offset so we know when to restore it */ 3138 cfi->drap_offset = op->dest.offset; 3139 } else { 3140 3141 /* drap: mov reg, disp(%rbp) */ 3142 save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset); 3143 } 3144 3145 } else if (op->dest.reg == cfa->base) { 3146 3147 /* mov reg, disp(%rbp) */ 3148 /* mov reg, disp(%rsp) */ 3149 save_reg(cfi, op->src.reg, CFI_CFA, 3150 op->dest.offset - cfi->cfa.offset); 3151 3152 } else if (op->dest.reg == CFI_SP) { 3153 3154 /* mov reg, disp(%rsp) */ 3155 save_reg(cfi, op->src.reg, CFI_CFA, 3156 op->dest.offset - cfi->stack_size); 3157 3158 } else if (op->src.reg == CFI_SP && op->dest.offset == 0) { 3159 3160 /* mov %rsp, (%reg); # setup a stack swizzle. */ 3161 cfi->vals[op->dest.reg].base = CFI_SP_INDIRECT; 3162 cfi->vals[op->dest.reg].offset = cfa->offset; 3163 } 3164 3165 break; 3166 3167 case OP_DEST_MEM: 3168 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) { 3169 WARN_INSN(insn, "unknown stack-related memory operation"); 3170 return -1; 3171 } 3172 3173 /* pop mem */ 3174 cfi->stack_size -= 8; 3175 if (cfa->base == CFI_SP) 3176 cfa->offset -= 8; 3177 3178 break; 3179 3180 default: 3181 WARN_INSN(insn, "unknown stack-related instruction"); 3182 return -1; 3183 } 3184 3185 return 0; 3186 } 3187 3188 /* 3189 * The stack layouts of alternatives instructions can sometimes diverge when 3190 * they have stack modifications. That's fine as long as the potential stack 3191 * layouts don't conflict at any given potential instruction boundary. 3192 * 3193 * Flatten the CFIs of the different alternative code streams (both original 3194 * and replacement) into a single shared CFI array which can be used to detect 3195 * conflicts and nicely feed a linear array of ORC entries to the unwinder. 3196 */ 3197 static int propagate_alt_cfi(struct objtool_file *file, struct instruction *insn) 3198 { 3199 struct cfi_state **alt_cfi; 3200 int group_off; 3201 3202 if (!insn->alt_group) 3203 return 0; 3204 3205 if (!insn->cfi) { 3206 WARN("CFI missing"); 3207 return -1; 3208 } 3209 3210 alt_cfi = insn->alt_group->cfi; 3211 group_off = insn->offset - insn->alt_group->first_insn->offset; 3212 3213 if (!alt_cfi[group_off]) { 3214 alt_cfi[group_off] = insn->cfi; 3215 } else { 3216 if (cficmp(alt_cfi[group_off], insn->cfi)) { 3217 struct alt_group *orig_group = insn->alt_group->orig_group ?: insn->alt_group; 3218 struct instruction *orig = orig_group->first_insn; 3219 WARN_INSN(orig, "stack layout conflict in alternatives: %s", 3220 offstr(insn->sec, insn->offset)); 3221 return -1; 3222 } 3223 } 3224 3225 return 0; 3226 } 3227 3228 static int handle_insn_ops(struct instruction *insn, 3229 struct instruction *next_insn, 3230 struct insn_state *state) 3231 { 3232 struct stack_op *op; 3233 int ret; 3234 3235 for (op = insn->stack_ops; op; op = op->next) { 3236 3237 ret = update_cfi_state(insn, next_insn, &state->cfi, op); 3238 if (ret) 3239 return ret; 3240 3241 if (!opts.uaccess || !insn->alt_group) 3242 continue; 3243 3244 if (op->dest.type == OP_DEST_PUSHF) { 3245 if (!state->uaccess_stack) { 3246 state->uaccess_stack = 1; 3247 } else if (state->uaccess_stack >> 31) { 3248 WARN_INSN(insn, "PUSHF stack exhausted"); 3249 return 1; 3250 } 3251 state->uaccess_stack <<= 1; 3252 state->uaccess_stack |= state->uaccess; 3253 } 3254 3255 if (op->src.type == OP_SRC_POPF) { 3256 if (state->uaccess_stack) { 3257 state->uaccess = state->uaccess_stack & 1; 3258 state->uaccess_stack >>= 1; 3259 if (state->uaccess_stack == 1) 3260 state->uaccess_stack = 0; 3261 } 3262 } 3263 } 3264 3265 return 0; 3266 } 3267 3268 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2) 3269 { 3270 struct cfi_state *cfi1 = insn->cfi; 3271 int i; 3272 3273 if (!cfi1) { 3274 WARN("CFI missing"); 3275 return false; 3276 } 3277 3278 if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) { 3279 3280 WARN_INSN(insn, "stack state mismatch: cfa1=%d%+d cfa2=%d%+d", 3281 cfi1->cfa.base, cfi1->cfa.offset, 3282 cfi2->cfa.base, cfi2->cfa.offset); 3283 return false; 3284 3285 } 3286 3287 if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) { 3288 for (i = 0; i < CFI_NUM_REGS; i++) { 3289 3290 if (!memcmp(&cfi1->regs[i], &cfi2->regs[i], sizeof(struct cfi_reg))) 3291 continue; 3292 3293 WARN_INSN(insn, "stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d", 3294 i, cfi1->regs[i].base, cfi1->regs[i].offset, 3295 i, cfi2->regs[i].base, cfi2->regs[i].offset); 3296 } 3297 return false; 3298 } 3299 3300 if (cfi1->type != cfi2->type) { 3301 3302 WARN_INSN(insn, "stack state mismatch: type1=%d type2=%d", 3303 cfi1->type, cfi2->type); 3304 return false; 3305 } 3306 3307 if (cfi1->drap != cfi2->drap || 3308 (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) || 3309 (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) { 3310 3311 WARN_INSN(insn, "stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)", 3312 cfi1->drap, cfi1->drap_reg, cfi1->drap_offset, 3313 cfi2->drap, cfi2->drap_reg, cfi2->drap_offset); 3314 return false; 3315 } 3316 3317 return true; 3318 } 3319 3320 static inline bool func_uaccess_safe(struct symbol *func) 3321 { 3322 if (func) 3323 return func->uaccess_safe; 3324 3325 return false; 3326 } 3327 3328 static inline const char *call_dest_name(struct instruction *insn) 3329 { 3330 static char pvname[19]; 3331 struct reloc *reloc; 3332 int idx; 3333 3334 if (insn_call_dest(insn)) 3335 return insn_call_dest(insn)->name; 3336 3337 reloc = insn_reloc(NULL, insn); 3338 if (reloc && !strcmp(reloc->sym->name, "pv_ops")) { 3339 idx = (reloc_addend(reloc) / sizeof(void *)); 3340 snprintf(pvname, sizeof(pvname), "pv_ops[%d]", idx); 3341 return pvname; 3342 } 3343 3344 return "{dynamic}"; 3345 } 3346 3347 static bool pv_call_dest(struct objtool_file *file, struct instruction *insn) 3348 { 3349 struct symbol *target; 3350 struct reloc *reloc; 3351 int idx; 3352 3353 reloc = insn_reloc(file, insn); 3354 if (!reloc || strcmp(reloc->sym->name, "pv_ops")) 3355 return false; 3356 3357 idx = (arch_dest_reloc_offset(reloc_addend(reloc)) / sizeof(void *)); 3358 3359 if (file->pv_ops[idx].clean) 3360 return true; 3361 3362 file->pv_ops[idx].clean = true; 3363 3364 list_for_each_entry(target, &file->pv_ops[idx].targets, pv_target) { 3365 if (!target->sec->noinstr) { 3366 WARN("pv_ops[%d]: %s", idx, target->name); 3367 file->pv_ops[idx].clean = false; 3368 } 3369 } 3370 3371 return file->pv_ops[idx].clean; 3372 } 3373 3374 static inline bool noinstr_call_dest(struct objtool_file *file, 3375 struct instruction *insn, 3376 struct symbol *func) 3377 { 3378 /* 3379 * We can't deal with indirect function calls at present; 3380 * assume they're instrumented. 3381 */ 3382 if (!func) { 3383 if (file->pv_ops) 3384 return pv_call_dest(file, insn); 3385 3386 return false; 3387 } 3388 3389 /* 3390 * If the symbol is from a noinstr section; we good. 3391 */ 3392 if (func->sec->noinstr) 3393 return true; 3394 3395 /* 3396 * If the symbol is a static_call trampoline, we can't tell. 3397 */ 3398 if (func->static_call_tramp) 3399 return true; 3400 3401 /* 3402 * The __ubsan_handle_*() calls are like WARN(), they only happen when 3403 * something 'BAD' happened. At the risk of taking the machine down, 3404 * let them proceed to get the message out. 3405 */ 3406 if (!strncmp(func->name, "__ubsan_handle_", 15)) 3407 return true; 3408 3409 return false; 3410 } 3411 3412 static int validate_call(struct objtool_file *file, 3413 struct instruction *insn, 3414 struct insn_state *state) 3415 { 3416 if (state->noinstr && state->instr <= 0 && 3417 !noinstr_call_dest(file, insn, insn_call_dest(insn))) { 3418 WARN_INSN(insn, "call to %s() leaves .noinstr.text section", call_dest_name(insn)); 3419 return 1; 3420 } 3421 3422 if (state->uaccess && !func_uaccess_safe(insn_call_dest(insn))) { 3423 WARN_INSN(insn, "call to %s() with UACCESS enabled", call_dest_name(insn)); 3424 return 1; 3425 } 3426 3427 if (state->df) { 3428 WARN_INSN(insn, "call to %s() with DF set", call_dest_name(insn)); 3429 return 1; 3430 } 3431 3432 return 0; 3433 } 3434 3435 static int validate_sibling_call(struct objtool_file *file, 3436 struct instruction *insn, 3437 struct insn_state *state) 3438 { 3439 if (insn_func(insn) && has_modified_stack_frame(insn, state)) { 3440 WARN_INSN(insn, "sibling call from callable instruction with modified stack frame"); 3441 return 1; 3442 } 3443 3444 return validate_call(file, insn, state); 3445 } 3446 3447 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state) 3448 { 3449 if (state->noinstr && state->instr > 0) { 3450 WARN_INSN(insn, "return with instrumentation enabled"); 3451 return 1; 3452 } 3453 3454 if (state->uaccess && !func_uaccess_safe(func)) { 3455 WARN_INSN(insn, "return with UACCESS enabled"); 3456 return 1; 3457 } 3458 3459 if (!state->uaccess && func_uaccess_safe(func)) { 3460 WARN_INSN(insn, "return with UACCESS disabled from a UACCESS-safe function"); 3461 return 1; 3462 } 3463 3464 if (state->df) { 3465 WARN_INSN(insn, "return with DF set"); 3466 return 1; 3467 } 3468 3469 if (func && has_modified_stack_frame(insn, state)) { 3470 WARN_INSN(insn, "return with modified stack frame"); 3471 return 1; 3472 } 3473 3474 if (state->cfi.bp_scratch) { 3475 WARN_INSN(insn, "BP used as a scratch register"); 3476 return 1; 3477 } 3478 3479 return 0; 3480 } 3481 3482 static struct instruction *next_insn_to_validate(struct objtool_file *file, 3483 struct instruction *insn) 3484 { 3485 struct alt_group *alt_group = insn->alt_group; 3486 3487 /* 3488 * Simulate the fact that alternatives are patched in-place. When the 3489 * end of a replacement alt_group is reached, redirect objtool flow to 3490 * the end of the original alt_group. 3491 * 3492 * insn->alts->insn -> alt_group->first_insn 3493 * ... 3494 * alt_group->last_insn 3495 * [alt_group->nop] -> next(orig_group->last_insn) 3496 */ 3497 if (alt_group) { 3498 if (alt_group->nop) { 3499 /* ->nop implies ->orig_group */ 3500 if (insn == alt_group->last_insn) 3501 return alt_group->nop; 3502 if (insn == alt_group->nop) 3503 goto next_orig; 3504 } 3505 if (insn == alt_group->last_insn && alt_group->orig_group) 3506 goto next_orig; 3507 } 3508 3509 return next_insn_same_sec(file, insn); 3510 3511 next_orig: 3512 return next_insn_same_sec(file, alt_group->orig_group->last_insn); 3513 } 3514 3515 static bool skip_alt_group(struct instruction *insn) 3516 { 3517 struct instruction *alt_insn = insn->alts ? insn->alts->insn : NULL; 3518 3519 if (!insn->alt_group) 3520 return false; 3521 3522 /* ANNOTATE_IGNORE_ALTERNATIVE */ 3523 if (insn->alt_group->ignore) 3524 return true; 3525 3526 /* 3527 * For NOP patched with CLAC/STAC, only follow the latter to avoid 3528 * impossible code paths combining patched CLAC with unpatched STAC 3529 * or vice versa. 3530 * 3531 * ANNOTATE_IGNORE_ALTERNATIVE could have been used here, but Linus 3532 * requested not to do that to avoid hurting .s file readability 3533 * around CLAC/STAC alternative sites. 3534 */ 3535 3536 if (!alt_insn) 3537 return false; 3538 3539 /* Don't override ASM_{CLAC,STAC}_UNSAFE */ 3540 if (alt_insn->alt_group && alt_insn->alt_group->ignore) 3541 return false; 3542 3543 return alt_insn->type == INSN_CLAC || alt_insn->type == INSN_STAC; 3544 } 3545 3546 /* 3547 * Follow the branch starting at the given instruction, and recursively follow 3548 * any other branches (jumps). Meanwhile, track the frame pointer state at 3549 * each instruction and validate all the rules described in 3550 * tools/objtool/Documentation/objtool.txt. 3551 */ 3552 static int validate_branch(struct objtool_file *file, struct symbol *func, 3553 struct instruction *insn, struct insn_state state) 3554 { 3555 struct alternative *alt; 3556 struct instruction *next_insn, *prev_insn = NULL; 3557 struct section *sec; 3558 u8 visited; 3559 int ret; 3560 3561 if (func && func->ignore) 3562 return 0; 3563 3564 sec = insn->sec; 3565 3566 while (1) { 3567 next_insn = next_insn_to_validate(file, insn); 3568 3569 if (func && insn_func(insn) && func != insn_func(insn)->pfunc) { 3570 /* Ignore KCFI type preambles, which always fall through */ 3571 if (!strncmp(func->name, "__cfi_", 6) || 3572 !strncmp(func->name, "__pfx_", 6) || 3573 !strncmp(func->name, "__pi___cfi_", 11) || 3574 !strncmp(func->name, "__pi___pfx_", 11)) 3575 return 0; 3576 3577 if (file->ignore_unreachables) 3578 return 0; 3579 3580 WARN("%s() falls through to next function %s()", 3581 func->name, insn_func(insn)->name); 3582 func->warned = 1; 3583 3584 return 1; 3585 } 3586 3587 visited = VISITED_BRANCH << state.uaccess; 3588 if (insn->visited & VISITED_BRANCH_MASK) { 3589 if (!insn->hint && !insn_cfi_match(insn, &state.cfi)) 3590 return 1; 3591 3592 if (insn->visited & visited) 3593 return 0; 3594 } else { 3595 nr_insns_visited++; 3596 } 3597 3598 if (state.noinstr) 3599 state.instr += insn->instr; 3600 3601 if (insn->hint) { 3602 if (insn->restore) { 3603 struct instruction *save_insn, *i; 3604 3605 i = insn; 3606 save_insn = NULL; 3607 3608 sym_for_each_insn_continue_reverse(file, func, i) { 3609 if (i->save) { 3610 save_insn = i; 3611 break; 3612 } 3613 } 3614 3615 if (!save_insn) { 3616 WARN_INSN(insn, "no corresponding CFI save for CFI restore"); 3617 return 1; 3618 } 3619 3620 if (!save_insn->visited) { 3621 /* 3622 * If the restore hint insn is at the 3623 * beginning of a basic block and was 3624 * branched to from elsewhere, and the 3625 * save insn hasn't been visited yet, 3626 * defer following this branch for now. 3627 * It will be seen later via the 3628 * straight-line path. 3629 */ 3630 if (!prev_insn) 3631 return 0; 3632 3633 WARN_INSN(insn, "objtool isn't smart enough to handle this CFI save/restore combo"); 3634 return 1; 3635 } 3636 3637 insn->cfi = save_insn->cfi; 3638 nr_cfi_reused++; 3639 } 3640 3641 state.cfi = *insn->cfi; 3642 } else { 3643 /* XXX track if we actually changed state.cfi */ 3644 3645 if (prev_insn && !cficmp(prev_insn->cfi, &state.cfi)) { 3646 insn->cfi = prev_insn->cfi; 3647 nr_cfi_reused++; 3648 } else { 3649 insn->cfi = cfi_hash_find_or_add(&state.cfi); 3650 } 3651 } 3652 3653 insn->visited |= visited; 3654 3655 if (propagate_alt_cfi(file, insn)) 3656 return 1; 3657 3658 if (insn->alts) { 3659 for (alt = insn->alts; alt; alt = alt->next) { 3660 ret = validate_branch(file, func, alt->insn, state); 3661 if (ret) { 3662 BT_INSN(insn, "(alt)"); 3663 return ret; 3664 } 3665 } 3666 } 3667 3668 if (skip_alt_group(insn)) 3669 return 0; 3670 3671 if (handle_insn_ops(insn, next_insn, &state)) 3672 return 1; 3673 3674 switch (insn->type) { 3675 3676 case INSN_RETURN: 3677 return validate_return(func, insn, &state); 3678 3679 case INSN_CALL: 3680 case INSN_CALL_DYNAMIC: 3681 ret = validate_call(file, insn, &state); 3682 if (ret) 3683 return ret; 3684 3685 if (opts.stackval && func && !is_special_call(insn) && 3686 !has_valid_stack_frame(&state)) { 3687 WARN_INSN(insn, "call without frame pointer save/setup"); 3688 return 1; 3689 } 3690 3691 break; 3692 3693 case INSN_JUMP_CONDITIONAL: 3694 case INSN_JUMP_UNCONDITIONAL: 3695 if (is_sibling_call(insn)) { 3696 ret = validate_sibling_call(file, insn, &state); 3697 if (ret) 3698 return ret; 3699 3700 } else if (insn->jump_dest) { 3701 ret = validate_branch(file, func, 3702 insn->jump_dest, state); 3703 if (ret) { 3704 BT_INSN(insn, "(branch)"); 3705 return ret; 3706 } 3707 } 3708 3709 if (insn->type == INSN_JUMP_UNCONDITIONAL) 3710 return 0; 3711 3712 break; 3713 3714 case INSN_JUMP_DYNAMIC: 3715 case INSN_JUMP_DYNAMIC_CONDITIONAL: 3716 if (is_sibling_call(insn)) { 3717 ret = validate_sibling_call(file, insn, &state); 3718 if (ret) 3719 return ret; 3720 } 3721 3722 if (insn->type == INSN_JUMP_DYNAMIC) 3723 return 0; 3724 3725 break; 3726 3727 case INSN_SYSCALL: 3728 if (func && (!next_insn || !next_insn->hint)) { 3729 WARN_INSN(insn, "unsupported instruction in callable function"); 3730 return 1; 3731 } 3732 3733 break; 3734 3735 case INSN_SYSRET: 3736 if (func && (!next_insn || !next_insn->hint)) { 3737 WARN_INSN(insn, "unsupported instruction in callable function"); 3738 return 1; 3739 } 3740 3741 return 0; 3742 3743 case INSN_STAC: 3744 if (!opts.uaccess) 3745 break; 3746 3747 if (state.uaccess) { 3748 WARN_INSN(insn, "recursive UACCESS enable"); 3749 return 1; 3750 } 3751 3752 state.uaccess = true; 3753 break; 3754 3755 case INSN_CLAC: 3756 if (!opts.uaccess) 3757 break; 3758 3759 if (!state.uaccess && func) { 3760 WARN_INSN(insn, "redundant UACCESS disable"); 3761 return 1; 3762 } 3763 3764 if (func_uaccess_safe(func) && !state.uaccess_stack) { 3765 WARN_INSN(insn, "UACCESS-safe disables UACCESS"); 3766 return 1; 3767 } 3768 3769 state.uaccess = false; 3770 break; 3771 3772 case INSN_STD: 3773 if (state.df) { 3774 WARN_INSN(insn, "recursive STD"); 3775 return 1; 3776 } 3777 3778 state.df = true; 3779 break; 3780 3781 case INSN_CLD: 3782 if (!state.df && func) { 3783 WARN_INSN(insn, "redundant CLD"); 3784 return 1; 3785 } 3786 3787 state.df = false; 3788 break; 3789 3790 default: 3791 break; 3792 } 3793 3794 if (insn->dead_end) 3795 return 0; 3796 3797 if (!next_insn) { 3798 if (state.cfi.cfa.base == CFI_UNDEFINED) 3799 return 0; 3800 if (file->ignore_unreachables) 3801 return 0; 3802 3803 WARN("%s%sunexpected end of section %s", 3804 func ? func->name : "", func ? "(): " : "", 3805 sec->name); 3806 return 1; 3807 } 3808 3809 prev_insn = insn; 3810 insn = next_insn; 3811 } 3812 3813 return 0; 3814 } 3815 3816 static int validate_unwind_hint(struct objtool_file *file, 3817 struct instruction *insn, 3818 struct insn_state *state) 3819 { 3820 if (insn->hint && !insn->visited) { 3821 int ret = validate_branch(file, insn_func(insn), insn, *state); 3822 if (ret) 3823 BT_INSN(insn, "<=== (hint)"); 3824 return ret; 3825 } 3826 3827 return 0; 3828 } 3829 3830 static int validate_unwind_hints(struct objtool_file *file, struct section *sec) 3831 { 3832 struct instruction *insn; 3833 struct insn_state state; 3834 int warnings = 0; 3835 3836 if (!file->hints) 3837 return 0; 3838 3839 init_insn_state(file, &state, sec); 3840 3841 if (sec) { 3842 sec_for_each_insn(file, sec, insn) 3843 warnings += validate_unwind_hint(file, insn, &state); 3844 } else { 3845 for_each_insn(file, insn) 3846 warnings += validate_unwind_hint(file, insn, &state); 3847 } 3848 3849 return warnings; 3850 } 3851 3852 /* 3853 * Validate rethunk entry constraint: must untrain RET before the first RET. 3854 * 3855 * Follow every branch (intra-function) and ensure VALIDATE_UNRET_END comes 3856 * before an actual RET instruction. 3857 */ 3858 static int validate_unret(struct objtool_file *file, struct instruction *insn) 3859 { 3860 struct instruction *next, *dest; 3861 int ret; 3862 3863 for (;;) { 3864 next = next_insn_to_validate(file, insn); 3865 3866 if (insn->visited & VISITED_UNRET) 3867 return 0; 3868 3869 insn->visited |= VISITED_UNRET; 3870 3871 if (insn->alts) { 3872 struct alternative *alt; 3873 for (alt = insn->alts; alt; alt = alt->next) { 3874 ret = validate_unret(file, alt->insn); 3875 if (ret) { 3876 BT_INSN(insn, "(alt)"); 3877 return ret; 3878 } 3879 } 3880 } 3881 3882 switch (insn->type) { 3883 3884 case INSN_CALL_DYNAMIC: 3885 case INSN_JUMP_DYNAMIC: 3886 case INSN_JUMP_DYNAMIC_CONDITIONAL: 3887 WARN_INSN(insn, "early indirect call"); 3888 return 1; 3889 3890 case INSN_JUMP_UNCONDITIONAL: 3891 case INSN_JUMP_CONDITIONAL: 3892 if (!is_sibling_call(insn)) { 3893 if (!insn->jump_dest) { 3894 WARN_INSN(insn, "unresolved jump target after linking?!?"); 3895 return 1; 3896 } 3897 ret = validate_unret(file, insn->jump_dest); 3898 if (ret) { 3899 BT_INSN(insn, "(branch%s)", 3900 insn->type == INSN_JUMP_CONDITIONAL ? "-cond" : ""); 3901 return ret; 3902 } 3903 3904 if (insn->type == INSN_JUMP_UNCONDITIONAL) 3905 return 0; 3906 3907 break; 3908 } 3909 3910 /* fallthrough */ 3911 case INSN_CALL: 3912 dest = find_insn(file, insn_call_dest(insn)->sec, 3913 insn_call_dest(insn)->offset); 3914 if (!dest) { 3915 WARN("Unresolved function after linking!?: %s", 3916 insn_call_dest(insn)->name); 3917 return 1; 3918 } 3919 3920 ret = validate_unret(file, dest); 3921 if (ret) { 3922 BT_INSN(insn, "(call)"); 3923 return ret; 3924 } 3925 /* 3926 * If a call returns without error, it must have seen UNTRAIN_RET. 3927 * Therefore any non-error return is a success. 3928 */ 3929 return 0; 3930 3931 case INSN_RETURN: 3932 WARN_INSN(insn, "RET before UNTRAIN"); 3933 return 1; 3934 3935 case INSN_SYSCALL: 3936 break; 3937 3938 case INSN_SYSRET: 3939 return 0; 3940 3941 case INSN_NOP: 3942 if (insn->retpoline_safe) 3943 return 0; 3944 break; 3945 3946 default: 3947 break; 3948 } 3949 3950 if (insn->dead_end) 3951 return 0; 3952 3953 if (!next) { 3954 WARN_INSN(insn, "teh end!"); 3955 return 1; 3956 } 3957 insn = next; 3958 } 3959 3960 return 0; 3961 } 3962 3963 /* 3964 * Validate that all branches starting at VALIDATE_UNRET_BEGIN encounter 3965 * VALIDATE_UNRET_END before RET. 3966 */ 3967 static int validate_unrets(struct objtool_file *file) 3968 { 3969 struct instruction *insn; 3970 int warnings = 0; 3971 3972 for_each_insn(file, insn) { 3973 if (!insn->unret) 3974 continue; 3975 3976 warnings += validate_unret(file, insn); 3977 } 3978 3979 return warnings; 3980 } 3981 3982 static int validate_retpoline(struct objtool_file *file) 3983 { 3984 struct instruction *insn; 3985 int warnings = 0; 3986 3987 for_each_insn(file, insn) { 3988 if (insn->type != INSN_JUMP_DYNAMIC && 3989 insn->type != INSN_CALL_DYNAMIC && 3990 insn->type != INSN_RETURN) 3991 continue; 3992 3993 if (insn->retpoline_safe) 3994 continue; 3995 3996 if (insn->sec->init) 3997 continue; 3998 3999 if (insn->type == INSN_RETURN) { 4000 if (opts.rethunk) { 4001 WARN_INSN(insn, "'naked' return found in MITIGATION_RETHUNK build"); 4002 warnings++; 4003 } 4004 continue; 4005 } 4006 4007 WARN_INSN(insn, "indirect %s found in MITIGATION_RETPOLINE build", 4008 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call"); 4009 warnings++; 4010 } 4011 4012 if (!opts.cfi) 4013 return warnings; 4014 4015 /* 4016 * kCFI call sites look like: 4017 * 4018 * movl $(-0x12345678), %r10d 4019 * addl -4(%r11), %r10d 4020 * jz 1f 4021 * ud2 4022 * 1: cs call __x86_indirect_thunk_r11 4023 * 4024 * Verify all indirect calls are kCFI adorned by checking for the 4025 * UD2. Notably, doing __nocfi calls to regular (cfi) functions is 4026 * broken. 4027 */ 4028 list_for_each_entry(insn, &file->retpoline_call_list, call_node) { 4029 struct symbol *sym = insn->sym; 4030 4031 if (sym && (sym->type == STT_NOTYPE || 4032 sym->type == STT_FUNC) && !sym->nocfi) { 4033 struct instruction *prev = 4034 prev_insn_same_sym(file, insn); 4035 4036 if (!prev || prev->type != INSN_BUG) { 4037 WARN_INSN(insn, "no-cfi indirect call!"); 4038 warnings++; 4039 } 4040 } 4041 } 4042 4043 return warnings; 4044 } 4045 4046 static bool is_kasan_insn(struct instruction *insn) 4047 { 4048 return (insn->type == INSN_CALL && 4049 !strcmp(insn_call_dest(insn)->name, "__asan_handle_no_return")); 4050 } 4051 4052 static bool is_ubsan_insn(struct instruction *insn) 4053 { 4054 return (insn->type == INSN_CALL && 4055 !strcmp(insn_call_dest(insn)->name, 4056 "__ubsan_handle_builtin_unreachable")); 4057 } 4058 4059 static bool ignore_unreachable_insn(struct objtool_file *file, struct instruction *insn) 4060 { 4061 struct symbol *func = insn_func(insn); 4062 struct instruction *prev_insn; 4063 int i; 4064 4065 if (insn->type == INSN_NOP || insn->type == INSN_TRAP || (func && func->ignore)) 4066 return true; 4067 4068 /* 4069 * Ignore alternative replacement instructions. This can happen 4070 * when a whitelisted function uses one of the ALTERNATIVE macros. 4071 */ 4072 if (!strcmp(insn->sec->name, ".altinstr_replacement") || 4073 !strcmp(insn->sec->name, ".altinstr_aux")) 4074 return true; 4075 4076 /* 4077 * Whole archive runs might encounter dead code from weak symbols. 4078 * This is where the linker will have dropped the weak symbol in 4079 * favour of a regular symbol, but leaves the code in place. 4080 * 4081 * In this case we'll find a piece of code (whole function) that is not 4082 * covered by a !section symbol. Ignore them. 4083 */ 4084 if (opts.link && !func) { 4085 int size = find_symbol_hole_containing(insn->sec, insn->offset); 4086 unsigned long end = insn->offset + size; 4087 4088 if (!size) /* not a hole */ 4089 return false; 4090 4091 if (size < 0) /* hole until the end */ 4092 return true; 4093 4094 sec_for_each_insn_continue(file, insn) { 4095 /* 4096 * If we reach a visited instruction at or before the 4097 * end of the hole, ignore the unreachable. 4098 */ 4099 if (insn->visited) 4100 return true; 4101 4102 if (insn->offset >= end) 4103 break; 4104 4105 /* 4106 * If this hole jumps to a .cold function, mark it ignore too. 4107 */ 4108 if (insn->jump_dest && insn_func(insn->jump_dest) && 4109 strstr(insn_func(insn->jump_dest)->name, ".cold")) { 4110 insn_func(insn->jump_dest)->ignore = true; 4111 } 4112 } 4113 4114 return false; 4115 } 4116 4117 if (!func) 4118 return false; 4119 4120 if (func->static_call_tramp) 4121 return true; 4122 4123 /* 4124 * CONFIG_UBSAN_TRAP inserts a UD2 when it sees 4125 * __builtin_unreachable(). The BUG() macro has an unreachable() after 4126 * the UD2, which causes GCC's undefined trap logic to emit another UD2 4127 * (or occasionally a JMP to UD2). 4128 * 4129 * It may also insert a UD2 after calling a __noreturn function. 4130 */ 4131 prev_insn = prev_insn_same_sec(file, insn); 4132 if (prev_insn && prev_insn->dead_end && 4133 (insn->type == INSN_BUG || 4134 (insn->type == INSN_JUMP_UNCONDITIONAL && 4135 insn->jump_dest && insn->jump_dest->type == INSN_BUG))) 4136 return true; 4137 4138 /* 4139 * Check if this (or a subsequent) instruction is related to 4140 * CONFIG_UBSAN or CONFIG_KASAN. 4141 * 4142 * End the search at 5 instructions to avoid going into the weeds. 4143 */ 4144 for (i = 0; i < 5; i++) { 4145 4146 if (is_kasan_insn(insn) || is_ubsan_insn(insn)) 4147 return true; 4148 4149 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 4150 if (insn->jump_dest && 4151 insn_func(insn->jump_dest) == func) { 4152 insn = insn->jump_dest; 4153 continue; 4154 } 4155 4156 break; 4157 } 4158 4159 if (insn->offset + insn->len >= func->offset + func->len) 4160 break; 4161 4162 insn = next_insn_same_sec(file, insn); 4163 } 4164 4165 return false; 4166 } 4167 4168 static int add_prefix_symbol(struct objtool_file *file, struct symbol *func) 4169 { 4170 struct instruction *insn, *prev; 4171 struct cfi_state *cfi; 4172 4173 insn = find_insn(file, func->sec, func->offset); 4174 if (!insn) 4175 return -1; 4176 4177 for (prev = prev_insn_same_sec(file, insn); 4178 prev; 4179 prev = prev_insn_same_sec(file, prev)) { 4180 u64 offset; 4181 4182 if (prev->type != INSN_NOP) 4183 return -1; 4184 4185 offset = func->offset - prev->offset; 4186 4187 if (offset > opts.prefix) 4188 return -1; 4189 4190 if (offset < opts.prefix) 4191 continue; 4192 4193 elf_create_prefix_symbol(file->elf, func, opts.prefix); 4194 break; 4195 } 4196 4197 if (!prev) 4198 return -1; 4199 4200 if (!insn->cfi) { 4201 /* 4202 * This can happen if stack validation isn't enabled or the 4203 * function is annotated with STACK_FRAME_NON_STANDARD. 4204 */ 4205 return 0; 4206 } 4207 4208 /* Propagate insn->cfi to the prefix code */ 4209 cfi = cfi_hash_find_or_add(insn->cfi); 4210 for (; prev != insn; prev = next_insn_same_sec(file, prev)) 4211 prev->cfi = cfi; 4212 4213 return 0; 4214 } 4215 4216 static int add_prefix_symbols(struct objtool_file *file) 4217 { 4218 struct section *sec; 4219 struct symbol *func; 4220 4221 for_each_sec(file, sec) { 4222 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 4223 continue; 4224 4225 sec_for_each_sym(sec, func) { 4226 if (func->type != STT_FUNC) 4227 continue; 4228 4229 add_prefix_symbol(file, func); 4230 } 4231 } 4232 4233 return 0; 4234 } 4235 4236 static int validate_symbol(struct objtool_file *file, struct section *sec, 4237 struct symbol *sym, struct insn_state *state) 4238 { 4239 struct instruction *insn; 4240 int ret; 4241 4242 if (!sym->len) { 4243 WARN("%s() is missing an ELF size annotation", sym->name); 4244 return 1; 4245 } 4246 4247 if (sym->pfunc != sym || sym->alias != sym) 4248 return 0; 4249 4250 insn = find_insn(file, sec, sym->offset); 4251 if (!insn || insn->visited) 4252 return 0; 4253 4254 if (opts.uaccess) 4255 state->uaccess = sym->uaccess_safe; 4256 4257 ret = validate_branch(file, insn_func(insn), insn, *state); 4258 if (ret) 4259 BT_INSN(insn, "<=== (sym)"); 4260 return ret; 4261 } 4262 4263 static int validate_section(struct objtool_file *file, struct section *sec) 4264 { 4265 struct insn_state state; 4266 struct symbol *func; 4267 int warnings = 0; 4268 4269 sec_for_each_sym(sec, func) { 4270 if (func->type != STT_FUNC) 4271 continue; 4272 4273 init_insn_state(file, &state, sec); 4274 set_func_state(&state.cfi); 4275 4276 warnings += validate_symbol(file, sec, func, &state); 4277 } 4278 4279 return warnings; 4280 } 4281 4282 static int validate_noinstr_sections(struct objtool_file *file) 4283 { 4284 struct section *sec; 4285 int warnings = 0; 4286 4287 sec = find_section_by_name(file->elf, ".noinstr.text"); 4288 if (sec) { 4289 warnings += validate_section(file, sec); 4290 warnings += validate_unwind_hints(file, sec); 4291 } 4292 4293 sec = find_section_by_name(file->elf, ".entry.text"); 4294 if (sec) { 4295 warnings += validate_section(file, sec); 4296 warnings += validate_unwind_hints(file, sec); 4297 } 4298 4299 sec = find_section_by_name(file->elf, ".cpuidle.text"); 4300 if (sec) { 4301 warnings += validate_section(file, sec); 4302 warnings += validate_unwind_hints(file, sec); 4303 } 4304 4305 return warnings; 4306 } 4307 4308 static int validate_functions(struct objtool_file *file) 4309 { 4310 struct section *sec; 4311 int warnings = 0; 4312 4313 for_each_sec(file, sec) { 4314 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 4315 continue; 4316 4317 warnings += validate_section(file, sec); 4318 } 4319 4320 return warnings; 4321 } 4322 4323 static void mark_endbr_used(struct instruction *insn) 4324 { 4325 if (!list_empty(&insn->call_node)) 4326 list_del_init(&insn->call_node); 4327 } 4328 4329 static bool noendbr_range(struct objtool_file *file, struct instruction *insn) 4330 { 4331 struct symbol *sym = find_symbol_containing(insn->sec, insn->offset-1); 4332 struct instruction *first; 4333 4334 if (!sym) 4335 return false; 4336 4337 first = find_insn(file, sym->sec, sym->offset); 4338 if (!first) 4339 return false; 4340 4341 if (first->type != INSN_ENDBR && !first->noendbr) 4342 return false; 4343 4344 return insn->offset == sym->offset + sym->len; 4345 } 4346 4347 static int __validate_ibt_insn(struct objtool_file *file, struct instruction *insn, 4348 struct instruction *dest) 4349 { 4350 if (dest->type == INSN_ENDBR) { 4351 mark_endbr_used(dest); 4352 return 0; 4353 } 4354 4355 if (insn_func(dest) && insn_func(insn) && 4356 insn_func(dest)->pfunc == insn_func(insn)->pfunc) { 4357 /* 4358 * Anything from->to self is either _THIS_IP_ or 4359 * IRET-to-self. 4360 * 4361 * There is no sane way to annotate _THIS_IP_ since the 4362 * compiler treats the relocation as a constant and is 4363 * happy to fold in offsets, skewing any annotation we 4364 * do, leading to vast amounts of false-positives. 4365 * 4366 * There's also compiler generated _THIS_IP_ through 4367 * KCOV and such which we have no hope of annotating. 4368 * 4369 * As such, blanket accept self-references without 4370 * issue. 4371 */ 4372 return 0; 4373 } 4374 4375 /* 4376 * Accept anything ANNOTATE_NOENDBR. 4377 */ 4378 if (dest->noendbr) 4379 return 0; 4380 4381 /* 4382 * Accept if this is the instruction after a symbol 4383 * that is (no)endbr -- typical code-range usage. 4384 */ 4385 if (noendbr_range(file, dest)) 4386 return 0; 4387 4388 WARN_INSN(insn, "relocation to !ENDBR: %s", offstr(dest->sec, dest->offset)); 4389 return 1; 4390 } 4391 4392 static int validate_ibt_insn(struct objtool_file *file, struct instruction *insn) 4393 { 4394 struct instruction *dest; 4395 struct reloc *reloc; 4396 unsigned long off; 4397 int warnings = 0; 4398 4399 /* 4400 * Looking for function pointer load relocations. Ignore 4401 * direct/indirect branches: 4402 */ 4403 switch (insn->type) { 4404 4405 case INSN_CALL: 4406 case INSN_CALL_DYNAMIC: 4407 case INSN_JUMP_CONDITIONAL: 4408 case INSN_JUMP_UNCONDITIONAL: 4409 case INSN_JUMP_DYNAMIC: 4410 case INSN_JUMP_DYNAMIC_CONDITIONAL: 4411 case INSN_RETURN: 4412 case INSN_NOP: 4413 return 0; 4414 4415 case INSN_LEA_RIP: 4416 if (!insn_reloc(file, insn)) { 4417 /* local function pointer reference without reloc */ 4418 4419 off = arch_jump_destination(insn); 4420 4421 dest = find_insn(file, insn->sec, off); 4422 if (!dest) { 4423 WARN_INSN(insn, "corrupt function pointer reference"); 4424 return 1; 4425 } 4426 4427 return __validate_ibt_insn(file, insn, dest); 4428 } 4429 break; 4430 4431 default: 4432 break; 4433 } 4434 4435 for (reloc = insn_reloc(file, insn); 4436 reloc; 4437 reloc = find_reloc_by_dest_range(file->elf, insn->sec, 4438 reloc_offset(reloc) + 1, 4439 (insn->offset + insn->len) - (reloc_offset(reloc) + 1))) { 4440 4441 off = reloc->sym->offset; 4442 if (reloc_type(reloc) == R_X86_64_PC32 || 4443 reloc_type(reloc) == R_X86_64_PLT32) 4444 off += arch_dest_reloc_offset(reloc_addend(reloc)); 4445 else 4446 off += reloc_addend(reloc); 4447 4448 dest = find_insn(file, reloc->sym->sec, off); 4449 if (!dest) 4450 continue; 4451 4452 warnings += __validate_ibt_insn(file, insn, dest); 4453 } 4454 4455 return warnings; 4456 } 4457 4458 static int validate_ibt_data_reloc(struct objtool_file *file, 4459 struct reloc *reloc) 4460 { 4461 struct instruction *dest; 4462 4463 dest = find_insn(file, reloc->sym->sec, 4464 reloc->sym->offset + reloc_addend(reloc)); 4465 if (!dest) 4466 return 0; 4467 4468 if (dest->type == INSN_ENDBR) { 4469 mark_endbr_used(dest); 4470 return 0; 4471 } 4472 4473 if (dest->noendbr) 4474 return 0; 4475 4476 WARN_FUNC(reloc->sec->base, reloc_offset(reloc), 4477 "data relocation to !ENDBR: %s", offstr(dest->sec, dest->offset)); 4478 4479 return 1; 4480 } 4481 4482 /* 4483 * Validate IBT rules and remove used ENDBR instructions from the seal list. 4484 * Unused ENDBR instructions will be annotated for sealing (i.e., replaced with 4485 * NOPs) later, in create_ibt_endbr_seal_sections(). 4486 */ 4487 static int validate_ibt(struct objtool_file *file) 4488 { 4489 struct section *sec; 4490 struct reloc *reloc; 4491 struct instruction *insn; 4492 int warnings = 0; 4493 4494 for_each_insn(file, insn) 4495 warnings += validate_ibt_insn(file, insn); 4496 4497 for_each_sec(file, sec) { 4498 4499 /* Already done by validate_ibt_insn() */ 4500 if (sec->sh.sh_flags & SHF_EXECINSTR) 4501 continue; 4502 4503 if (!sec->rsec) 4504 continue; 4505 4506 /* 4507 * These sections can reference text addresses, but not with 4508 * the intent to indirect branch to them. 4509 */ 4510 if ((!strncmp(sec->name, ".discard", 8) && 4511 strcmp(sec->name, ".discard.ibt_endbr_noseal")) || 4512 !strncmp(sec->name, ".debug", 6) || 4513 !strcmp(sec->name, ".altinstructions") || 4514 !strcmp(sec->name, ".ibt_endbr_seal") || 4515 !strcmp(sec->name, ".orc_unwind_ip") || 4516 !strcmp(sec->name, ".parainstructions") || 4517 !strcmp(sec->name, ".retpoline_sites") || 4518 !strcmp(sec->name, ".smp_locks") || 4519 !strcmp(sec->name, ".static_call_sites") || 4520 !strcmp(sec->name, "_error_injection_whitelist") || 4521 !strcmp(sec->name, "_kprobe_blacklist") || 4522 !strcmp(sec->name, "__bug_table") || 4523 !strcmp(sec->name, "__ex_table") || 4524 !strcmp(sec->name, "__jump_table") || 4525 !strcmp(sec->name, "__mcount_loc") || 4526 !strcmp(sec->name, ".kcfi_traps") || 4527 !strcmp(sec->name, ".llvm.call-graph-profile") || 4528 !strcmp(sec->name, ".llvm_bb_addr_map") || 4529 !strcmp(sec->name, "__tracepoints") || 4530 strstr(sec->name, "__patchable_function_entries")) 4531 continue; 4532 4533 for_each_reloc(sec->rsec, reloc) 4534 warnings += validate_ibt_data_reloc(file, reloc); 4535 } 4536 4537 return warnings; 4538 } 4539 4540 static int validate_sls(struct objtool_file *file) 4541 { 4542 struct instruction *insn, *next_insn; 4543 int warnings = 0; 4544 4545 for_each_insn(file, insn) { 4546 next_insn = next_insn_same_sec(file, insn); 4547 4548 if (insn->retpoline_safe) 4549 continue; 4550 4551 switch (insn->type) { 4552 case INSN_RETURN: 4553 if (!next_insn || next_insn->type != INSN_TRAP) { 4554 WARN_INSN(insn, "missing int3 after ret"); 4555 warnings++; 4556 } 4557 4558 break; 4559 case INSN_JUMP_DYNAMIC: 4560 if (!next_insn || next_insn->type != INSN_TRAP) { 4561 WARN_INSN(insn, "missing int3 after indirect jump"); 4562 warnings++; 4563 } 4564 break; 4565 default: 4566 break; 4567 } 4568 } 4569 4570 return warnings; 4571 } 4572 4573 static int validate_reachable_instructions(struct objtool_file *file) 4574 { 4575 struct instruction *insn, *prev_insn; 4576 struct symbol *call_dest; 4577 int warnings = 0; 4578 4579 if (file->ignore_unreachables) 4580 return 0; 4581 4582 for_each_insn(file, insn) { 4583 if (insn->visited || ignore_unreachable_insn(file, insn)) 4584 continue; 4585 4586 prev_insn = prev_insn_same_sec(file, insn); 4587 if (prev_insn && prev_insn->dead_end) { 4588 call_dest = insn_call_dest(prev_insn); 4589 if (call_dest) { 4590 WARN_INSN(insn, "%s() missing __noreturn in .c/.h or NORETURN() in noreturns.h", 4591 call_dest->name); 4592 warnings++; 4593 continue; 4594 } 4595 } 4596 4597 WARN_INSN(insn, "unreachable instruction"); 4598 warnings++; 4599 } 4600 4601 return warnings; 4602 } 4603 4604 /* 'funcs' is a space-separated list of function names */ 4605 static void disas_funcs(const char *funcs) 4606 { 4607 const char *objdump_str, *cross_compile; 4608 int size, ret; 4609 char *cmd; 4610 4611 cross_compile = getenv("CROSS_COMPILE"); 4612 if (!cross_compile) 4613 cross_compile = ""; 4614 4615 objdump_str = "%sobjdump -wdr %s | gawk -M -v _funcs='%s' '" 4616 "BEGIN { split(_funcs, funcs); }" 4617 "/^$/ { func_match = 0; }" 4618 "/<.*>:/ { " 4619 "f = gensub(/.*<(.*)>:/, \"\\\\1\", 1);" 4620 "for (i in funcs) {" 4621 "if (funcs[i] == f) {" 4622 "func_match = 1;" 4623 "base = strtonum(\"0x\" $1);" 4624 "break;" 4625 "}" 4626 "}" 4627 "}" 4628 "{" 4629 "if (func_match) {" 4630 "addr = strtonum(\"0x\" $1);" 4631 "printf(\"%%04x \", addr - base);" 4632 "print;" 4633 "}" 4634 "}' 1>&2"; 4635 4636 /* fake snprintf() to calculate the size */ 4637 size = snprintf(NULL, 0, objdump_str, cross_compile, objname, funcs) + 1; 4638 if (size <= 0) { 4639 WARN("objdump string size calculation failed"); 4640 return; 4641 } 4642 4643 cmd = malloc(size); 4644 4645 /* real snprintf() */ 4646 snprintf(cmd, size, objdump_str, cross_compile, objname, funcs); 4647 ret = system(cmd); 4648 if (ret) { 4649 WARN("disassembly failed: %d", ret); 4650 return; 4651 } 4652 } 4653 4654 static void disas_warned_funcs(struct objtool_file *file) 4655 { 4656 struct symbol *sym; 4657 char *funcs = NULL, *tmp; 4658 4659 for_each_sym(file, sym) { 4660 if (sym->warned) { 4661 if (!funcs) { 4662 funcs = malloc(strlen(sym->name) + 1); 4663 if (!funcs) { 4664 ERROR_GLIBC("malloc"); 4665 return; 4666 } 4667 strcpy(funcs, sym->name); 4668 } else { 4669 tmp = malloc(strlen(funcs) + strlen(sym->name) + 2); 4670 if (!tmp) { 4671 ERROR_GLIBC("malloc"); 4672 return; 4673 } 4674 sprintf(tmp, "%s %s", funcs, sym->name); 4675 free(funcs); 4676 funcs = tmp; 4677 } 4678 } 4679 } 4680 4681 if (funcs) 4682 disas_funcs(funcs); 4683 } 4684 4685 __weak bool arch_absolute_reloc(struct elf *elf, struct reloc *reloc) 4686 { 4687 unsigned int type = reloc_type(reloc); 4688 size_t sz = elf_addr_size(elf); 4689 4690 return (sz == 8) ? (type == R_ABS64) : (type == R_ABS32); 4691 } 4692 4693 static int check_abs_references(struct objtool_file *file) 4694 { 4695 struct section *sec; 4696 struct reloc *reloc; 4697 int ret = 0; 4698 4699 for_each_sec(file, sec) { 4700 /* absolute references in non-loadable sections are fine */ 4701 if (!(sec->sh.sh_flags & SHF_ALLOC)) 4702 continue; 4703 4704 /* section must have an associated .rela section */ 4705 if (!sec->rsec) 4706 continue; 4707 4708 /* 4709 * Special case for compiler generated metadata that is not 4710 * consumed until after boot. 4711 */ 4712 if (!strcmp(sec->name, "__patchable_function_entries")) 4713 continue; 4714 4715 for_each_reloc(sec->rsec, reloc) { 4716 if (arch_absolute_reloc(file->elf, reloc)) { 4717 WARN("section %s has absolute relocation at offset 0x%llx", 4718 sec->name, (unsigned long long)reloc_offset(reloc)); 4719 ret++; 4720 } 4721 } 4722 } 4723 return ret; 4724 } 4725 4726 struct insn_chunk { 4727 void *addr; 4728 struct insn_chunk *next; 4729 }; 4730 4731 /* 4732 * Reduce peak RSS usage by freeing insns memory before writing the ELF file, 4733 * which can trigger more allocations for .debug_* sections whose data hasn't 4734 * been read yet. 4735 */ 4736 static void free_insns(struct objtool_file *file) 4737 { 4738 struct instruction *insn; 4739 struct insn_chunk *chunks = NULL, *chunk; 4740 4741 for_each_insn(file, insn) { 4742 if (!insn->idx) { 4743 chunk = malloc(sizeof(*chunk)); 4744 chunk->addr = insn; 4745 chunk->next = chunks; 4746 chunks = chunk; 4747 } 4748 } 4749 4750 for (chunk = chunks; chunk; chunk = chunk->next) 4751 free(chunk->addr); 4752 } 4753 4754 int check(struct objtool_file *file) 4755 { 4756 int ret = 0, warnings = 0; 4757 4758 arch_initial_func_cfi_state(&initial_func_cfi); 4759 init_cfi_state(&init_cfi); 4760 init_cfi_state(&func_cfi); 4761 set_func_state(&func_cfi); 4762 init_cfi_state(&force_undefined_cfi); 4763 force_undefined_cfi.force_undefined = true; 4764 4765 if (!cfi_hash_alloc(1UL << (file->elf->symbol_bits - 3))) { 4766 ret = -1; 4767 goto out; 4768 } 4769 4770 cfi_hash_add(&init_cfi); 4771 cfi_hash_add(&func_cfi); 4772 4773 ret = decode_sections(file); 4774 if (ret) 4775 goto out; 4776 4777 if (!nr_insns) 4778 goto out; 4779 4780 if (opts.retpoline) 4781 warnings += validate_retpoline(file); 4782 4783 if (opts.stackval || opts.orc || opts.uaccess) { 4784 int w = 0; 4785 4786 w += validate_functions(file); 4787 w += validate_unwind_hints(file, NULL); 4788 if (!w) 4789 w += validate_reachable_instructions(file); 4790 4791 warnings += w; 4792 4793 } else if (opts.noinstr) { 4794 warnings += validate_noinstr_sections(file); 4795 } 4796 4797 if (opts.unret) { 4798 /* 4799 * Must be after validate_branch() and friends, it plays 4800 * further games with insn->visited. 4801 */ 4802 warnings += validate_unrets(file); 4803 } 4804 4805 if (opts.ibt) 4806 warnings += validate_ibt(file); 4807 4808 if (opts.sls) 4809 warnings += validate_sls(file); 4810 4811 if (opts.static_call) { 4812 ret = create_static_call_sections(file); 4813 if (ret) 4814 goto out; 4815 } 4816 4817 if (opts.retpoline) { 4818 ret = create_retpoline_sites_sections(file); 4819 if (ret) 4820 goto out; 4821 } 4822 4823 if (opts.cfi) { 4824 ret = create_cfi_sections(file); 4825 if (ret) 4826 goto out; 4827 } 4828 4829 if (opts.rethunk) { 4830 ret = create_return_sites_sections(file); 4831 if (ret) 4832 goto out; 4833 4834 if (opts.hack_skylake) { 4835 ret = create_direct_call_sections(file); 4836 if (ret) 4837 goto out; 4838 } 4839 } 4840 4841 if (opts.mcount) { 4842 ret = create_mcount_loc_sections(file); 4843 if (ret) 4844 goto out; 4845 } 4846 4847 if (opts.prefix) { 4848 ret = add_prefix_symbols(file); 4849 if (ret) 4850 goto out; 4851 } 4852 4853 if (opts.ibt) { 4854 ret = create_ibt_endbr_seal_sections(file); 4855 if (ret) 4856 goto out; 4857 } 4858 4859 if (opts.noabs) 4860 warnings += check_abs_references(file); 4861 4862 if (opts.orc && nr_insns) { 4863 ret = orc_create(file); 4864 if (ret) 4865 goto out; 4866 } 4867 4868 free_insns(file); 4869 4870 if (opts.stats) { 4871 printf("nr_insns_visited: %ld\n", nr_insns_visited); 4872 printf("nr_cfi: %ld\n", nr_cfi); 4873 printf("nr_cfi_reused: %ld\n", nr_cfi_reused); 4874 printf("nr_cfi_cache: %ld\n", nr_cfi_cache); 4875 } 4876 4877 out: 4878 if (!ret && !warnings) 4879 return 0; 4880 4881 if (opts.werror && warnings) 4882 ret = 1; 4883 4884 if (opts.verbose) { 4885 if (opts.werror && warnings) 4886 WARN("%d warning(s) upgraded to errors", warnings); 4887 print_args(); 4888 disas_warned_funcs(file); 4889 } 4890 4891 return ret; 4892 } 4893