xref: /linux/tools/objtool/check.c (revision a776c270a0b2fad6715cb714187e4290cadb9237)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5 
6 #include <string.h>
7 #include <stdlib.h>
8 
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15 
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18 
19 #define FAKE_JUMP_OFFSET -1
20 
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22 
23 struct alternative {
24 	struct list_head list;
25 	struct instruction *insn;
26 	bool skip_orig;
27 };
28 
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31 
32 struct instruction *find_insn(struct objtool_file *file,
33 			      struct section *sec, unsigned long offset)
34 {
35 	struct instruction *insn;
36 
37 	hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 		if (insn->sec == sec && insn->offset == offset)
39 			return insn;
40 
41 	return NULL;
42 }
43 
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 					      struct instruction *insn)
46 {
47 	struct instruction *next = list_next_entry(insn, list);
48 
49 	if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 		return NULL;
51 
52 	return next;
53 }
54 
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 					       struct instruction *insn)
57 {
58 	struct instruction *next = list_next_entry(insn, list);
59 	struct symbol *func = insn->func;
60 
61 	if (!func)
62 		return NULL;
63 
64 	if (&next->list != &file->insn_list && next->func == func)
65 		return next;
66 
67 	/* Check if we're already in the subfunction: */
68 	if (func == func->cfunc)
69 		return NULL;
70 
71 	/* Move to the subfunction: */
72 	return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74 
75 #define func_for_each_insn(file, func, insn)				\
76 	for (insn = find_insn(file, func->sec, func->offset);		\
77 	     insn;							\
78 	     insn = next_insn_same_func(file, insn))
79 
80 #define sym_for_each_insn(file, sym, insn)				\
81 	for (insn = find_insn(file, sym->sec, sym->offset);		\
82 	     insn && &insn->list != &file->insn_list &&			\
83 		insn->sec == sym->sec &&				\
84 		insn->offset < sym->offset + sym->len;			\
85 	     insn = list_next_entry(insn, list))
86 
87 #define sym_for_each_insn_continue_reverse(file, sym, insn)		\
88 	for (insn = list_prev_entry(insn, list);			\
89 	     &insn->list != &file->insn_list &&				\
90 		insn->sec == sym->sec && insn->offset >= sym->offset;	\
91 	     insn = list_prev_entry(insn, list))
92 
93 #define sec_for_each_insn_from(file, insn)				\
94 	for (; insn; insn = next_insn_same_sec(file, insn))
95 
96 #define sec_for_each_insn_continue(file, insn)				\
97 	for (insn = next_insn_same_sec(file, insn); insn;		\
98 	     insn = next_insn_same_sec(file, insn))
99 
100 static bool is_static_jump(struct instruction *insn)
101 {
102 	return insn->type == INSN_JUMP_CONDITIONAL ||
103 	       insn->type == INSN_JUMP_UNCONDITIONAL;
104 }
105 
106 static bool is_sibling_call(struct instruction *insn)
107 {
108 	/* An indirect jump is either a sibling call or a jump to a table. */
109 	if (insn->type == INSN_JUMP_DYNAMIC)
110 		return list_empty(&insn->alts);
111 
112 	if (!is_static_jump(insn))
113 		return false;
114 
115 	/* add_jump_destinations() sets insn->call_dest for sibling calls. */
116 	return !!insn->call_dest;
117 }
118 
119 /*
120  * This checks to see if the given function is a "noreturn" function.
121  *
122  * For global functions which are outside the scope of this object file, we
123  * have to keep a manual list of them.
124  *
125  * For local functions, we have to detect them manually by simply looking for
126  * the lack of a return instruction.
127  */
128 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
129 				int recursion)
130 {
131 	int i;
132 	struct instruction *insn;
133 	bool empty = true;
134 
135 	/*
136 	 * Unfortunately these have to be hard coded because the noreturn
137 	 * attribute isn't provided in ELF data.
138 	 */
139 	static const char * const global_noreturns[] = {
140 		"__stack_chk_fail",
141 		"panic",
142 		"do_exit",
143 		"do_task_dead",
144 		"__module_put_and_exit",
145 		"complete_and_exit",
146 		"__reiserfs_panic",
147 		"lbug_with_loc",
148 		"fortify_panic",
149 		"usercopy_abort",
150 		"machine_real_restart",
151 		"rewind_stack_do_exit",
152 		"kunit_try_catch_throw",
153 	};
154 
155 	if (!func)
156 		return false;
157 
158 	if (func->bind == STB_WEAK)
159 		return false;
160 
161 	if (func->bind == STB_GLOBAL)
162 		for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
163 			if (!strcmp(func->name, global_noreturns[i]))
164 				return true;
165 
166 	if (!func->len)
167 		return false;
168 
169 	insn = find_insn(file, func->sec, func->offset);
170 	if (!insn->func)
171 		return false;
172 
173 	func_for_each_insn(file, func, insn) {
174 		empty = false;
175 
176 		if (insn->type == INSN_RETURN)
177 			return false;
178 	}
179 
180 	if (empty)
181 		return false;
182 
183 	/*
184 	 * A function can have a sibling call instead of a return.  In that
185 	 * case, the function's dead-end status depends on whether the target
186 	 * of the sibling call returns.
187 	 */
188 	func_for_each_insn(file, func, insn) {
189 		if (is_sibling_call(insn)) {
190 			struct instruction *dest = insn->jump_dest;
191 
192 			if (!dest)
193 				/* sibling call to another file */
194 				return false;
195 
196 			/* local sibling call */
197 			if (recursion == 5) {
198 				/*
199 				 * Infinite recursion: two functions have
200 				 * sibling calls to each other.  This is a very
201 				 * rare case.  It means they aren't dead ends.
202 				 */
203 				return false;
204 			}
205 
206 			return __dead_end_function(file, dest->func, recursion+1);
207 		}
208 	}
209 
210 	return true;
211 }
212 
213 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
214 {
215 	return __dead_end_function(file, func, 0);
216 }
217 
218 static void clear_insn_state(struct insn_state *state)
219 {
220 	int i;
221 
222 	memset(state, 0, sizeof(*state));
223 	state->cfa.base = CFI_UNDEFINED;
224 	for (i = 0; i < CFI_NUM_REGS; i++) {
225 		state->regs[i].base = CFI_UNDEFINED;
226 		state->vals[i].base = CFI_UNDEFINED;
227 	}
228 	state->drap_reg = CFI_UNDEFINED;
229 	state->drap_offset = -1;
230 }
231 
232 /*
233  * Call the arch-specific instruction decoder for all the instructions and add
234  * them to the global instruction list.
235  */
236 static int decode_instructions(struct objtool_file *file)
237 {
238 	struct section *sec;
239 	struct symbol *func;
240 	unsigned long offset;
241 	struct instruction *insn;
242 	unsigned long nr_insns = 0;
243 	int ret;
244 
245 	for_each_sec(file, sec) {
246 
247 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
248 			continue;
249 
250 		if (strcmp(sec->name, ".altinstr_replacement") &&
251 		    strcmp(sec->name, ".altinstr_aux") &&
252 		    strncmp(sec->name, ".discard.", 9))
253 			sec->text = true;
254 
255 		for (offset = 0; offset < sec->len; offset += insn->len) {
256 			insn = malloc(sizeof(*insn));
257 			if (!insn) {
258 				WARN("malloc failed");
259 				return -1;
260 			}
261 			memset(insn, 0, sizeof(*insn));
262 			INIT_LIST_HEAD(&insn->alts);
263 			clear_insn_state(&insn->state);
264 
265 			insn->sec = sec;
266 			insn->offset = offset;
267 
268 			ret = arch_decode_instruction(file->elf, sec, offset,
269 						      sec->len - offset,
270 						      &insn->len, &insn->type,
271 						      &insn->immediate,
272 						      &insn->stack_op);
273 			if (ret)
274 				goto err;
275 
276 			hash_add(file->insn_hash, &insn->hash, insn->offset);
277 			list_add_tail(&insn->list, &file->insn_list);
278 			nr_insns++;
279 		}
280 
281 		list_for_each_entry(func, &sec->symbol_list, list) {
282 			if (func->type != STT_FUNC || func->alias != func)
283 				continue;
284 
285 			if (!find_insn(file, sec, func->offset)) {
286 				WARN("%s(): can't find starting instruction",
287 				     func->name);
288 				return -1;
289 			}
290 
291 			sym_for_each_insn(file, func, insn)
292 				insn->func = func;
293 		}
294 	}
295 
296 	if (stats)
297 		printf("nr_insns: %lu\n", nr_insns);
298 
299 	return 0;
300 
301 err:
302 	free(insn);
303 	return ret;
304 }
305 
306 /*
307  * Mark "ud2" instructions and manually annotated dead ends.
308  */
309 static int add_dead_ends(struct objtool_file *file)
310 {
311 	struct section *sec;
312 	struct rela *rela;
313 	struct instruction *insn;
314 	bool found;
315 
316 	/*
317 	 * By default, "ud2" is a dead end unless otherwise annotated, because
318 	 * GCC 7 inserts it for certain divide-by-zero cases.
319 	 */
320 	for_each_insn(file, insn)
321 		if (insn->type == INSN_BUG)
322 			insn->dead_end = true;
323 
324 	/*
325 	 * Check for manually annotated dead ends.
326 	 */
327 	sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
328 	if (!sec)
329 		goto reachable;
330 
331 	list_for_each_entry(rela, &sec->rela_list, list) {
332 		if (rela->sym->type != STT_SECTION) {
333 			WARN("unexpected relocation symbol type in %s", sec->name);
334 			return -1;
335 		}
336 		insn = find_insn(file, rela->sym->sec, rela->addend);
337 		if (insn)
338 			insn = list_prev_entry(insn, list);
339 		else if (rela->addend == rela->sym->sec->len) {
340 			found = false;
341 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
342 				if (insn->sec == rela->sym->sec) {
343 					found = true;
344 					break;
345 				}
346 			}
347 
348 			if (!found) {
349 				WARN("can't find unreachable insn at %s+0x%x",
350 				     rela->sym->sec->name, rela->addend);
351 				return -1;
352 			}
353 		} else {
354 			WARN("can't find unreachable insn at %s+0x%x",
355 			     rela->sym->sec->name, rela->addend);
356 			return -1;
357 		}
358 
359 		insn->dead_end = true;
360 	}
361 
362 reachable:
363 	/*
364 	 * These manually annotated reachable checks are needed for GCC 4.4,
365 	 * where the Linux unreachable() macro isn't supported.  In that case
366 	 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
367 	 * not a dead end.
368 	 */
369 	sec = find_section_by_name(file->elf, ".rela.discard.reachable");
370 	if (!sec)
371 		return 0;
372 
373 	list_for_each_entry(rela, &sec->rela_list, list) {
374 		if (rela->sym->type != STT_SECTION) {
375 			WARN("unexpected relocation symbol type in %s", sec->name);
376 			return -1;
377 		}
378 		insn = find_insn(file, rela->sym->sec, rela->addend);
379 		if (insn)
380 			insn = list_prev_entry(insn, list);
381 		else if (rela->addend == rela->sym->sec->len) {
382 			found = false;
383 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
384 				if (insn->sec == rela->sym->sec) {
385 					found = true;
386 					break;
387 				}
388 			}
389 
390 			if (!found) {
391 				WARN("can't find reachable insn at %s+0x%x",
392 				     rela->sym->sec->name, rela->addend);
393 				return -1;
394 			}
395 		} else {
396 			WARN("can't find reachable insn at %s+0x%x",
397 			     rela->sym->sec->name, rela->addend);
398 			return -1;
399 		}
400 
401 		insn->dead_end = false;
402 	}
403 
404 	return 0;
405 }
406 
407 /*
408  * Warnings shouldn't be reported for ignored functions.
409  */
410 static void add_ignores(struct objtool_file *file)
411 {
412 	struct instruction *insn;
413 	struct section *sec;
414 	struct symbol *func;
415 	struct rela *rela;
416 
417 	sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
418 	if (!sec)
419 		return;
420 
421 	list_for_each_entry(rela, &sec->rela_list, list) {
422 		switch (rela->sym->type) {
423 		case STT_FUNC:
424 			func = rela->sym;
425 			break;
426 
427 		case STT_SECTION:
428 			func = find_func_by_offset(rela->sym->sec, rela->addend);
429 			if (!func)
430 				continue;
431 			break;
432 
433 		default:
434 			WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
435 			continue;
436 		}
437 
438 		func_for_each_insn(file, func, insn)
439 			insn->ignore = true;
440 	}
441 }
442 
443 /*
444  * This is a whitelist of functions that is allowed to be called with AC set.
445  * The list is meant to be minimal and only contains compiler instrumentation
446  * ABI and a few functions used to implement *_{to,from}_user() functions.
447  *
448  * These functions must not directly change AC, but may PUSHF/POPF.
449  */
450 static const char *uaccess_safe_builtin[] = {
451 	/* KASAN */
452 	"kasan_report",
453 	"check_memory_region",
454 	/* KASAN out-of-line */
455 	"__asan_loadN_noabort",
456 	"__asan_load1_noabort",
457 	"__asan_load2_noabort",
458 	"__asan_load4_noabort",
459 	"__asan_load8_noabort",
460 	"__asan_load16_noabort",
461 	"__asan_storeN_noabort",
462 	"__asan_store1_noabort",
463 	"__asan_store2_noabort",
464 	"__asan_store4_noabort",
465 	"__asan_store8_noabort",
466 	"__asan_store16_noabort",
467 	/* KASAN in-line */
468 	"__asan_report_load_n_noabort",
469 	"__asan_report_load1_noabort",
470 	"__asan_report_load2_noabort",
471 	"__asan_report_load4_noabort",
472 	"__asan_report_load8_noabort",
473 	"__asan_report_load16_noabort",
474 	"__asan_report_store_n_noabort",
475 	"__asan_report_store1_noabort",
476 	"__asan_report_store2_noabort",
477 	"__asan_report_store4_noabort",
478 	"__asan_report_store8_noabort",
479 	"__asan_report_store16_noabort",
480 	/* KCOV */
481 	"write_comp_data",
482 	"__sanitizer_cov_trace_pc",
483 	"__sanitizer_cov_trace_const_cmp1",
484 	"__sanitizer_cov_trace_const_cmp2",
485 	"__sanitizer_cov_trace_const_cmp4",
486 	"__sanitizer_cov_trace_const_cmp8",
487 	"__sanitizer_cov_trace_cmp1",
488 	"__sanitizer_cov_trace_cmp2",
489 	"__sanitizer_cov_trace_cmp4",
490 	"__sanitizer_cov_trace_cmp8",
491 	/* UBSAN */
492 	"ubsan_type_mismatch_common",
493 	"__ubsan_handle_type_mismatch",
494 	"__ubsan_handle_type_mismatch_v1",
495 	"__ubsan_handle_shift_out_of_bounds",
496 	/* misc */
497 	"csum_partial_copy_generic",
498 	"__memcpy_mcsafe",
499 	"mcsafe_handle_tail",
500 	"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
501 	NULL
502 };
503 
504 static void add_uaccess_safe(struct objtool_file *file)
505 {
506 	struct symbol *func;
507 	const char **name;
508 
509 	if (!uaccess)
510 		return;
511 
512 	for (name = uaccess_safe_builtin; *name; name++) {
513 		func = find_symbol_by_name(file->elf, *name);
514 		if (!func)
515 			continue;
516 
517 		func->uaccess_safe = true;
518 	}
519 }
520 
521 /*
522  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
523  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
524  * But it at least allows objtool to understand the control flow *around* the
525  * retpoline.
526  */
527 static int add_ignore_alternatives(struct objtool_file *file)
528 {
529 	struct section *sec;
530 	struct rela *rela;
531 	struct instruction *insn;
532 
533 	sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
534 	if (!sec)
535 		return 0;
536 
537 	list_for_each_entry(rela, &sec->rela_list, list) {
538 		if (rela->sym->type != STT_SECTION) {
539 			WARN("unexpected relocation symbol type in %s", sec->name);
540 			return -1;
541 		}
542 
543 		insn = find_insn(file, rela->sym->sec, rela->addend);
544 		if (!insn) {
545 			WARN("bad .discard.ignore_alts entry");
546 			return -1;
547 		}
548 
549 		insn->ignore_alts = true;
550 	}
551 
552 	return 0;
553 }
554 
555 /*
556  * Find the destination instructions for all jumps.
557  */
558 static int add_jump_destinations(struct objtool_file *file)
559 {
560 	struct instruction *insn;
561 	struct rela *rela;
562 	struct section *dest_sec;
563 	unsigned long dest_off;
564 
565 	for_each_insn(file, insn) {
566 		if (!is_static_jump(insn))
567 			continue;
568 
569 		if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
570 			continue;
571 
572 		rela = find_rela_by_dest_range(file->elf, insn->sec,
573 					       insn->offset, insn->len);
574 		if (!rela) {
575 			dest_sec = insn->sec;
576 			dest_off = insn->offset + insn->len + insn->immediate;
577 		} else if (rela->sym->type == STT_SECTION) {
578 			dest_sec = rela->sym->sec;
579 			dest_off = rela->addend + 4;
580 		} else if (rela->sym->sec->idx) {
581 			dest_sec = rela->sym->sec;
582 			dest_off = rela->sym->sym.st_value + rela->addend + 4;
583 		} else if (strstr(rela->sym->name, "_indirect_thunk_")) {
584 			/*
585 			 * Retpoline jumps are really dynamic jumps in
586 			 * disguise, so convert them accordingly.
587 			 */
588 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
589 				insn->type = INSN_JUMP_DYNAMIC;
590 			else
591 				insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
592 
593 			insn->retpoline_safe = true;
594 			continue;
595 		} else {
596 			/* external sibling call */
597 			insn->call_dest = rela->sym;
598 			continue;
599 		}
600 
601 		insn->jump_dest = find_insn(file, dest_sec, dest_off);
602 		if (!insn->jump_dest) {
603 
604 			/*
605 			 * This is a special case where an alt instruction
606 			 * jumps past the end of the section.  These are
607 			 * handled later in handle_group_alt().
608 			 */
609 			if (!strcmp(insn->sec->name, ".altinstr_replacement"))
610 				continue;
611 
612 			WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
613 				  insn->sec, insn->offset, dest_sec->name,
614 				  dest_off);
615 			return -1;
616 		}
617 
618 		/*
619 		 * Cross-function jump.
620 		 */
621 		if (insn->func && insn->jump_dest->func &&
622 		    insn->func != insn->jump_dest->func) {
623 
624 			/*
625 			 * For GCC 8+, create parent/child links for any cold
626 			 * subfunctions.  This is _mostly_ redundant with a
627 			 * similar initialization in read_symbols().
628 			 *
629 			 * If a function has aliases, we want the *first* such
630 			 * function in the symbol table to be the subfunction's
631 			 * parent.  In that case we overwrite the
632 			 * initialization done in read_symbols().
633 			 *
634 			 * However this code can't completely replace the
635 			 * read_symbols() code because this doesn't detect the
636 			 * case where the parent function's only reference to a
637 			 * subfunction is through a jump table.
638 			 */
639 			if (!strstr(insn->func->name, ".cold.") &&
640 			    strstr(insn->jump_dest->func->name, ".cold.")) {
641 				insn->func->cfunc = insn->jump_dest->func;
642 				insn->jump_dest->func->pfunc = insn->func;
643 
644 			} else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
645 				   insn->jump_dest->offset == insn->jump_dest->func->offset) {
646 
647 				/* internal sibling call */
648 				insn->call_dest = insn->jump_dest->func;
649 			}
650 		}
651 	}
652 
653 	return 0;
654 }
655 
656 /*
657  * Find the destination instructions for all calls.
658  */
659 static int add_call_destinations(struct objtool_file *file)
660 {
661 	struct instruction *insn;
662 	unsigned long dest_off;
663 	struct rela *rela;
664 
665 	for_each_insn(file, insn) {
666 		if (insn->type != INSN_CALL)
667 			continue;
668 
669 		rela = find_rela_by_dest_range(file->elf, insn->sec,
670 					       insn->offset, insn->len);
671 		if (!rela) {
672 			dest_off = insn->offset + insn->len + insn->immediate;
673 			insn->call_dest = find_func_by_offset(insn->sec, dest_off);
674 			if (!insn->call_dest)
675 				insn->call_dest = find_symbol_by_offset(insn->sec, dest_off);
676 
677 			if (insn->ignore)
678 				continue;
679 
680 			if (!insn->call_dest) {
681 				WARN_FUNC("unsupported intra-function call",
682 					  insn->sec, insn->offset);
683 				if (retpoline)
684 					WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
685 				return -1;
686 			}
687 
688 			if (insn->func && insn->call_dest->type != STT_FUNC) {
689 				WARN_FUNC("unsupported call to non-function",
690 					  insn->sec, insn->offset);
691 				return -1;
692 			}
693 
694 		} else if (rela->sym->type == STT_SECTION) {
695 			insn->call_dest = find_func_by_offset(rela->sym->sec,
696 							      rela->addend+4);
697 			if (!insn->call_dest) {
698 				WARN_FUNC("can't find call dest symbol at %s+0x%x",
699 					  insn->sec, insn->offset,
700 					  rela->sym->sec->name,
701 					  rela->addend + 4);
702 				return -1;
703 			}
704 		} else
705 			insn->call_dest = rela->sym;
706 	}
707 
708 	return 0;
709 }
710 
711 /*
712  * The .alternatives section requires some extra special care, over and above
713  * what other special sections require:
714  *
715  * 1. Because alternatives are patched in-place, we need to insert a fake jump
716  *    instruction at the end so that validate_branch() skips all the original
717  *    replaced instructions when validating the new instruction path.
718  *
719  * 2. An added wrinkle is that the new instruction length might be zero.  In
720  *    that case the old instructions are replaced with noops.  We simulate that
721  *    by creating a fake jump as the only new instruction.
722  *
723  * 3. In some cases, the alternative section includes an instruction which
724  *    conditionally jumps to the _end_ of the entry.  We have to modify these
725  *    jumps' destinations to point back to .text rather than the end of the
726  *    entry in .altinstr_replacement.
727  */
728 static int handle_group_alt(struct objtool_file *file,
729 			    struct special_alt *special_alt,
730 			    struct instruction *orig_insn,
731 			    struct instruction **new_insn)
732 {
733 	struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
734 	unsigned long dest_off;
735 
736 	last_orig_insn = NULL;
737 	insn = orig_insn;
738 	sec_for_each_insn_from(file, insn) {
739 		if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
740 			break;
741 
742 		insn->alt_group = true;
743 		last_orig_insn = insn;
744 	}
745 
746 	if (next_insn_same_sec(file, last_orig_insn)) {
747 		fake_jump = malloc(sizeof(*fake_jump));
748 		if (!fake_jump) {
749 			WARN("malloc failed");
750 			return -1;
751 		}
752 		memset(fake_jump, 0, sizeof(*fake_jump));
753 		INIT_LIST_HEAD(&fake_jump->alts);
754 		clear_insn_state(&fake_jump->state);
755 
756 		fake_jump->sec = special_alt->new_sec;
757 		fake_jump->offset = FAKE_JUMP_OFFSET;
758 		fake_jump->type = INSN_JUMP_UNCONDITIONAL;
759 		fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
760 		fake_jump->func = orig_insn->func;
761 	}
762 
763 	if (!special_alt->new_len) {
764 		if (!fake_jump) {
765 			WARN("%s: empty alternative at end of section",
766 			     special_alt->orig_sec->name);
767 			return -1;
768 		}
769 
770 		*new_insn = fake_jump;
771 		return 0;
772 	}
773 
774 	last_new_insn = NULL;
775 	insn = *new_insn;
776 	sec_for_each_insn_from(file, insn) {
777 		if (insn->offset >= special_alt->new_off + special_alt->new_len)
778 			break;
779 
780 		last_new_insn = insn;
781 
782 		insn->ignore = orig_insn->ignore_alts;
783 		insn->func = orig_insn->func;
784 
785 		/*
786 		 * Since alternative replacement code is copy/pasted by the
787 		 * kernel after applying relocations, generally such code can't
788 		 * have relative-address relocation references to outside the
789 		 * .altinstr_replacement section, unless the arch's
790 		 * alternatives code can adjust the relative offsets
791 		 * accordingly.
792 		 *
793 		 * The x86 alternatives code adjusts the offsets only when it
794 		 * encounters a branch instruction at the very beginning of the
795 		 * replacement group.
796 		 */
797 		if ((insn->offset != special_alt->new_off ||
798 		    (insn->type != INSN_CALL && !is_static_jump(insn))) &&
799 		    find_rela_by_dest_range(file->elf, insn->sec, insn->offset, insn->len)) {
800 
801 			WARN_FUNC("unsupported relocation in alternatives section",
802 				  insn->sec, insn->offset);
803 			return -1;
804 		}
805 
806 		if (!is_static_jump(insn))
807 			continue;
808 
809 		if (!insn->immediate)
810 			continue;
811 
812 		dest_off = insn->offset + insn->len + insn->immediate;
813 		if (dest_off == special_alt->new_off + special_alt->new_len) {
814 			if (!fake_jump) {
815 				WARN("%s: alternative jump to end of section",
816 				     special_alt->orig_sec->name);
817 				return -1;
818 			}
819 			insn->jump_dest = fake_jump;
820 		}
821 
822 		if (!insn->jump_dest) {
823 			WARN_FUNC("can't find alternative jump destination",
824 				  insn->sec, insn->offset);
825 			return -1;
826 		}
827 	}
828 
829 	if (!last_new_insn) {
830 		WARN_FUNC("can't find last new alternative instruction",
831 			  special_alt->new_sec, special_alt->new_off);
832 		return -1;
833 	}
834 
835 	if (fake_jump)
836 		list_add(&fake_jump->list, &last_new_insn->list);
837 
838 	return 0;
839 }
840 
841 /*
842  * A jump table entry can either convert a nop to a jump or a jump to a nop.
843  * If the original instruction is a jump, make the alt entry an effective nop
844  * by just skipping the original instruction.
845  */
846 static int handle_jump_alt(struct objtool_file *file,
847 			   struct special_alt *special_alt,
848 			   struct instruction *orig_insn,
849 			   struct instruction **new_insn)
850 {
851 	if (orig_insn->type == INSN_NOP)
852 		return 0;
853 
854 	if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
855 		WARN_FUNC("unsupported instruction at jump label",
856 			  orig_insn->sec, orig_insn->offset);
857 		return -1;
858 	}
859 
860 	*new_insn = list_next_entry(orig_insn, list);
861 	return 0;
862 }
863 
864 /*
865  * Read all the special sections which have alternate instructions which can be
866  * patched in or redirected to at runtime.  Each instruction having alternate
867  * instruction(s) has them added to its insn->alts list, which will be
868  * traversed in validate_branch().
869  */
870 static int add_special_section_alts(struct objtool_file *file)
871 {
872 	struct list_head special_alts;
873 	struct instruction *orig_insn, *new_insn;
874 	struct special_alt *special_alt, *tmp;
875 	struct alternative *alt;
876 	int ret;
877 
878 	ret = special_get_alts(file->elf, &special_alts);
879 	if (ret)
880 		return ret;
881 
882 	list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
883 
884 		orig_insn = find_insn(file, special_alt->orig_sec,
885 				      special_alt->orig_off);
886 		if (!orig_insn) {
887 			WARN_FUNC("special: can't find orig instruction",
888 				  special_alt->orig_sec, special_alt->orig_off);
889 			ret = -1;
890 			goto out;
891 		}
892 
893 		new_insn = NULL;
894 		if (!special_alt->group || special_alt->new_len) {
895 			new_insn = find_insn(file, special_alt->new_sec,
896 					     special_alt->new_off);
897 			if (!new_insn) {
898 				WARN_FUNC("special: can't find new instruction",
899 					  special_alt->new_sec,
900 					  special_alt->new_off);
901 				ret = -1;
902 				goto out;
903 			}
904 		}
905 
906 		if (special_alt->group) {
907 			ret = handle_group_alt(file, special_alt, orig_insn,
908 					       &new_insn);
909 			if (ret)
910 				goto out;
911 		} else if (special_alt->jump_or_nop) {
912 			ret = handle_jump_alt(file, special_alt, orig_insn,
913 					      &new_insn);
914 			if (ret)
915 				goto out;
916 		}
917 
918 		alt = malloc(sizeof(*alt));
919 		if (!alt) {
920 			WARN("malloc failed");
921 			ret = -1;
922 			goto out;
923 		}
924 
925 		alt->insn = new_insn;
926 		alt->skip_orig = special_alt->skip_orig;
927 		orig_insn->ignore_alts |= special_alt->skip_alt;
928 		list_add_tail(&alt->list, &orig_insn->alts);
929 
930 		list_del(&special_alt->list);
931 		free(special_alt);
932 	}
933 
934 out:
935 	return ret;
936 }
937 
938 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
939 			    struct rela *table)
940 {
941 	struct rela *rela = table;
942 	struct instruction *dest_insn;
943 	struct alternative *alt;
944 	struct symbol *pfunc = insn->func->pfunc;
945 	unsigned int prev_offset = 0;
946 
947 	/*
948 	 * Each @rela is a switch table relocation which points to the target
949 	 * instruction.
950 	 */
951 	list_for_each_entry_from(rela, &table->sec->rela_list, list) {
952 
953 		/* Check for the end of the table: */
954 		if (rela != table && rela->jump_table_start)
955 			break;
956 
957 		/* Make sure the table entries are consecutive: */
958 		if (prev_offset && rela->offset != prev_offset + 8)
959 			break;
960 
961 		/* Detect function pointers from contiguous objects: */
962 		if (rela->sym->sec == pfunc->sec &&
963 		    rela->addend == pfunc->offset)
964 			break;
965 
966 		dest_insn = find_insn(file, rela->sym->sec, rela->addend);
967 		if (!dest_insn)
968 			break;
969 
970 		/* Make sure the destination is in the same function: */
971 		if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
972 			break;
973 
974 		alt = malloc(sizeof(*alt));
975 		if (!alt) {
976 			WARN("malloc failed");
977 			return -1;
978 		}
979 
980 		alt->insn = dest_insn;
981 		list_add_tail(&alt->list, &insn->alts);
982 		prev_offset = rela->offset;
983 	}
984 
985 	if (!prev_offset) {
986 		WARN_FUNC("can't find switch jump table",
987 			  insn->sec, insn->offset);
988 		return -1;
989 	}
990 
991 	return 0;
992 }
993 
994 /*
995  * find_jump_table() - Given a dynamic jump, find the switch jump table in
996  * .rodata associated with it.
997  *
998  * There are 3 basic patterns:
999  *
1000  * 1. jmpq *[rodata addr](,%reg,8)
1001  *
1002  *    This is the most common case by far.  It jumps to an address in a simple
1003  *    jump table which is stored in .rodata.
1004  *
1005  * 2. jmpq *[rodata addr](%rip)
1006  *
1007  *    This is caused by a rare GCC quirk, currently only seen in three driver
1008  *    functions in the kernel, only with certain obscure non-distro configs.
1009  *
1010  *    As part of an optimization, GCC makes a copy of an existing switch jump
1011  *    table, modifies it, and then hard-codes the jump (albeit with an indirect
1012  *    jump) to use a single entry in the table.  The rest of the jump table and
1013  *    some of its jump targets remain as dead code.
1014  *
1015  *    In such a case we can just crudely ignore all unreachable instruction
1016  *    warnings for the entire object file.  Ideally we would just ignore them
1017  *    for the function, but that would require redesigning the code quite a
1018  *    bit.  And honestly that's just not worth doing: unreachable instruction
1019  *    warnings are of questionable value anyway, and this is such a rare issue.
1020  *
1021  * 3. mov [rodata addr],%reg1
1022  *    ... some instructions ...
1023  *    jmpq *(%reg1,%reg2,8)
1024  *
1025  *    This is a fairly uncommon pattern which is new for GCC 6.  As of this
1026  *    writing, there are 11 occurrences of it in the allmodconfig kernel.
1027  *
1028  *    As of GCC 7 there are quite a few more of these and the 'in between' code
1029  *    is significant. Esp. with KASAN enabled some of the code between the mov
1030  *    and jmpq uses .rodata itself, which can confuse things.
1031  *
1032  *    TODO: Once we have DWARF CFI and smarter instruction decoding logic,
1033  *    ensure the same register is used in the mov and jump instructions.
1034  *
1035  *    NOTE: RETPOLINE made it harder still to decode dynamic jumps.
1036  */
1037 static struct rela *find_jump_table(struct objtool_file *file,
1038 				      struct symbol *func,
1039 				      struct instruction *insn)
1040 {
1041 	struct rela *text_rela, *table_rela;
1042 	struct instruction *dest_insn, *orig_insn = insn;
1043 	struct section *table_sec;
1044 	unsigned long table_offset;
1045 
1046 	/*
1047 	 * Backward search using the @first_jump_src links, these help avoid
1048 	 * much of the 'in between' code. Which avoids us getting confused by
1049 	 * it.
1050 	 */
1051 	for (;
1052 	     &insn->list != &file->insn_list &&
1053 	     insn->sec == func->sec &&
1054 	     insn->offset >= func->offset;
1055 
1056 	     insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1057 
1058 		if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1059 			break;
1060 
1061 		/* allow small jumps within the range */
1062 		if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1063 		    insn->jump_dest &&
1064 		    (insn->jump_dest->offset <= insn->offset ||
1065 		     insn->jump_dest->offset > orig_insn->offset))
1066 		    break;
1067 
1068 		/* look for a relocation which references .rodata */
1069 		text_rela = find_rela_by_dest_range(file->elf, insn->sec,
1070 						    insn->offset, insn->len);
1071 		if (!text_rela || text_rela->sym->type != STT_SECTION ||
1072 		    !text_rela->sym->sec->rodata)
1073 			continue;
1074 
1075 		table_offset = text_rela->addend;
1076 		table_sec = text_rela->sym->sec;
1077 
1078 		if (text_rela->type == R_X86_64_PC32)
1079 			table_offset += 4;
1080 
1081 		/*
1082 		 * Make sure the .rodata address isn't associated with a
1083 		 * symbol.  GCC jump tables are anonymous data.
1084 		 *
1085 		 * Also support C jump tables which are in the same format as
1086 		 * switch jump tables.  For objtool to recognize them, they
1087 		 * need to be placed in the C_JUMP_TABLE_SECTION section.  They
1088 		 * have symbols associated with them.
1089 		 */
1090 		if (find_symbol_containing(table_sec, table_offset) &&
1091 		    strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1092 			continue;
1093 
1094 		/*
1095 		 * Each table entry has a rela associated with it.  The rela
1096 		 * should reference text in the same function as the original
1097 		 * instruction.
1098 		 */
1099 		table_rela = find_rela_by_dest(file->elf, table_sec, table_offset);
1100 		if (!table_rela)
1101 			continue;
1102 		dest_insn = find_insn(file, table_rela->sym->sec, table_rela->addend);
1103 		if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1104 			continue;
1105 
1106 		/*
1107 		 * Use of RIP-relative switch jumps is quite rare, and
1108 		 * indicates a rare GCC quirk/bug which can leave dead code
1109 		 * behind.
1110 		 */
1111 		if (text_rela->type == R_X86_64_PC32)
1112 			file->ignore_unreachables = true;
1113 
1114 		return table_rela;
1115 	}
1116 
1117 	return NULL;
1118 }
1119 
1120 /*
1121  * First pass: Mark the head of each jump table so that in the next pass,
1122  * we know when a given jump table ends and the next one starts.
1123  */
1124 static void mark_func_jump_tables(struct objtool_file *file,
1125 				    struct symbol *func)
1126 {
1127 	struct instruction *insn, *last = NULL;
1128 	struct rela *rela;
1129 
1130 	func_for_each_insn(file, func, insn) {
1131 		if (!last)
1132 			last = insn;
1133 
1134 		/*
1135 		 * Store back-pointers for unconditional forward jumps such
1136 		 * that find_jump_table() can back-track using those and
1137 		 * avoid some potentially confusing code.
1138 		 */
1139 		if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1140 		    insn->offset > last->offset &&
1141 		    insn->jump_dest->offset > insn->offset &&
1142 		    !insn->jump_dest->first_jump_src) {
1143 
1144 			insn->jump_dest->first_jump_src = insn;
1145 			last = insn->jump_dest;
1146 		}
1147 
1148 		if (insn->type != INSN_JUMP_DYNAMIC)
1149 			continue;
1150 
1151 		rela = find_jump_table(file, func, insn);
1152 		if (rela) {
1153 			rela->jump_table_start = true;
1154 			insn->jump_table = rela;
1155 		}
1156 	}
1157 }
1158 
1159 static int add_func_jump_tables(struct objtool_file *file,
1160 				  struct symbol *func)
1161 {
1162 	struct instruction *insn;
1163 	int ret;
1164 
1165 	func_for_each_insn(file, func, insn) {
1166 		if (!insn->jump_table)
1167 			continue;
1168 
1169 		ret = add_jump_table(file, insn, insn->jump_table);
1170 		if (ret)
1171 			return ret;
1172 	}
1173 
1174 	return 0;
1175 }
1176 
1177 /*
1178  * For some switch statements, gcc generates a jump table in the .rodata
1179  * section which contains a list of addresses within the function to jump to.
1180  * This finds these jump tables and adds them to the insn->alts lists.
1181  */
1182 static int add_jump_table_alts(struct objtool_file *file)
1183 {
1184 	struct section *sec;
1185 	struct symbol *func;
1186 	int ret;
1187 
1188 	if (!file->rodata)
1189 		return 0;
1190 
1191 	for_each_sec(file, sec) {
1192 		list_for_each_entry(func, &sec->symbol_list, list) {
1193 			if (func->type != STT_FUNC)
1194 				continue;
1195 
1196 			mark_func_jump_tables(file, func);
1197 			ret = add_func_jump_tables(file, func);
1198 			if (ret)
1199 				return ret;
1200 		}
1201 	}
1202 
1203 	return 0;
1204 }
1205 
1206 static int read_unwind_hints(struct objtool_file *file)
1207 {
1208 	struct section *sec, *relasec;
1209 	struct rela *rela;
1210 	struct unwind_hint *hint;
1211 	struct instruction *insn;
1212 	struct cfi_reg *cfa;
1213 	int i;
1214 
1215 	sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1216 	if (!sec)
1217 		return 0;
1218 
1219 	relasec = sec->rela;
1220 	if (!relasec) {
1221 		WARN("missing .rela.discard.unwind_hints section");
1222 		return -1;
1223 	}
1224 
1225 	if (sec->len % sizeof(struct unwind_hint)) {
1226 		WARN("struct unwind_hint size mismatch");
1227 		return -1;
1228 	}
1229 
1230 	file->hints = true;
1231 
1232 	for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1233 		hint = (struct unwind_hint *)sec->data->d_buf + i;
1234 
1235 		rela = find_rela_by_dest(file->elf, sec, i * sizeof(*hint));
1236 		if (!rela) {
1237 			WARN("can't find rela for unwind_hints[%d]", i);
1238 			return -1;
1239 		}
1240 
1241 		insn = find_insn(file, rela->sym->sec, rela->addend);
1242 		if (!insn) {
1243 			WARN("can't find insn for unwind_hints[%d]", i);
1244 			return -1;
1245 		}
1246 
1247 		cfa = &insn->state.cfa;
1248 
1249 		if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1250 			insn->save = true;
1251 			continue;
1252 
1253 		} else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1254 			insn->restore = true;
1255 			insn->hint = true;
1256 			continue;
1257 		}
1258 
1259 		insn->hint = true;
1260 
1261 		switch (hint->sp_reg) {
1262 		case ORC_REG_UNDEFINED:
1263 			cfa->base = CFI_UNDEFINED;
1264 			break;
1265 		case ORC_REG_SP:
1266 			cfa->base = CFI_SP;
1267 			break;
1268 		case ORC_REG_BP:
1269 			cfa->base = CFI_BP;
1270 			break;
1271 		case ORC_REG_SP_INDIRECT:
1272 			cfa->base = CFI_SP_INDIRECT;
1273 			break;
1274 		case ORC_REG_R10:
1275 			cfa->base = CFI_R10;
1276 			break;
1277 		case ORC_REG_R13:
1278 			cfa->base = CFI_R13;
1279 			break;
1280 		case ORC_REG_DI:
1281 			cfa->base = CFI_DI;
1282 			break;
1283 		case ORC_REG_DX:
1284 			cfa->base = CFI_DX;
1285 			break;
1286 		default:
1287 			WARN_FUNC("unsupported unwind_hint sp base reg %d",
1288 				  insn->sec, insn->offset, hint->sp_reg);
1289 			return -1;
1290 		}
1291 
1292 		cfa->offset = hint->sp_offset;
1293 		insn->state.type = hint->type;
1294 		insn->state.end = hint->end;
1295 	}
1296 
1297 	return 0;
1298 }
1299 
1300 static int read_retpoline_hints(struct objtool_file *file)
1301 {
1302 	struct section *sec;
1303 	struct instruction *insn;
1304 	struct rela *rela;
1305 
1306 	sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1307 	if (!sec)
1308 		return 0;
1309 
1310 	list_for_each_entry(rela, &sec->rela_list, list) {
1311 		if (rela->sym->type != STT_SECTION) {
1312 			WARN("unexpected relocation symbol type in %s", sec->name);
1313 			return -1;
1314 		}
1315 
1316 		insn = find_insn(file, rela->sym->sec, rela->addend);
1317 		if (!insn) {
1318 			WARN("bad .discard.retpoline_safe entry");
1319 			return -1;
1320 		}
1321 
1322 		if (insn->type != INSN_JUMP_DYNAMIC &&
1323 		    insn->type != INSN_CALL_DYNAMIC) {
1324 			WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1325 				  insn->sec, insn->offset);
1326 			return -1;
1327 		}
1328 
1329 		insn->retpoline_safe = true;
1330 	}
1331 
1332 	return 0;
1333 }
1334 
1335 static void mark_rodata(struct objtool_file *file)
1336 {
1337 	struct section *sec;
1338 	bool found = false;
1339 
1340 	/*
1341 	 * Search for the following rodata sections, each of which can
1342 	 * potentially contain jump tables:
1343 	 *
1344 	 * - .rodata: can contain GCC switch tables
1345 	 * - .rodata.<func>: same, if -fdata-sections is being used
1346 	 * - .rodata..c_jump_table: contains C annotated jump tables
1347 	 *
1348 	 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1349 	 */
1350 	for_each_sec(file, sec) {
1351 		if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1352 		    !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1353 			sec->rodata = true;
1354 			found = true;
1355 		}
1356 	}
1357 
1358 	file->rodata = found;
1359 }
1360 
1361 static int decode_sections(struct objtool_file *file)
1362 {
1363 	int ret;
1364 
1365 	mark_rodata(file);
1366 
1367 	ret = decode_instructions(file);
1368 	if (ret)
1369 		return ret;
1370 
1371 	ret = add_dead_ends(file);
1372 	if (ret)
1373 		return ret;
1374 
1375 	add_ignores(file);
1376 	add_uaccess_safe(file);
1377 
1378 	ret = add_ignore_alternatives(file);
1379 	if (ret)
1380 		return ret;
1381 
1382 	ret = add_jump_destinations(file);
1383 	if (ret)
1384 		return ret;
1385 
1386 	ret = add_special_section_alts(file);
1387 	if (ret)
1388 		return ret;
1389 
1390 	ret = add_call_destinations(file);
1391 	if (ret)
1392 		return ret;
1393 
1394 	ret = add_jump_table_alts(file);
1395 	if (ret)
1396 		return ret;
1397 
1398 	ret = read_unwind_hints(file);
1399 	if (ret)
1400 		return ret;
1401 
1402 	ret = read_retpoline_hints(file);
1403 	if (ret)
1404 		return ret;
1405 
1406 	return 0;
1407 }
1408 
1409 static bool is_fentry_call(struct instruction *insn)
1410 {
1411 	if (insn->type == INSN_CALL &&
1412 	    insn->call_dest->type == STT_NOTYPE &&
1413 	    !strcmp(insn->call_dest->name, "__fentry__"))
1414 		return true;
1415 
1416 	return false;
1417 }
1418 
1419 static bool has_modified_stack_frame(struct insn_state *state)
1420 {
1421 	int i;
1422 
1423 	if (state->cfa.base != initial_func_cfi.cfa.base ||
1424 	    state->cfa.offset != initial_func_cfi.cfa.offset ||
1425 	    state->stack_size != initial_func_cfi.cfa.offset ||
1426 	    state->drap)
1427 		return true;
1428 
1429 	for (i = 0; i < CFI_NUM_REGS; i++)
1430 		if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1431 		    state->regs[i].offset != initial_func_cfi.regs[i].offset)
1432 			return true;
1433 
1434 	return false;
1435 }
1436 
1437 static bool has_valid_stack_frame(struct insn_state *state)
1438 {
1439 	if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1440 	    state->regs[CFI_BP].offset == -16)
1441 		return true;
1442 
1443 	if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1444 		return true;
1445 
1446 	return false;
1447 }
1448 
1449 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1450 {
1451 	struct cfi_reg *cfa = &state->cfa;
1452 	struct stack_op *op = &insn->stack_op;
1453 
1454 	if (cfa->base != CFI_SP)
1455 		return 0;
1456 
1457 	/* push */
1458 	if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1459 		cfa->offset += 8;
1460 
1461 	/* pop */
1462 	if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1463 		cfa->offset -= 8;
1464 
1465 	/* add immediate to sp */
1466 	if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1467 	    op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1468 		cfa->offset -= op->src.offset;
1469 
1470 	return 0;
1471 }
1472 
1473 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1474 		     int offset)
1475 {
1476 	if (arch_callee_saved_reg(reg) &&
1477 	    state->regs[reg].base == CFI_UNDEFINED) {
1478 		state->regs[reg].base = base;
1479 		state->regs[reg].offset = offset;
1480 	}
1481 }
1482 
1483 static void restore_reg(struct insn_state *state, unsigned char reg)
1484 {
1485 	state->regs[reg].base = CFI_UNDEFINED;
1486 	state->regs[reg].offset = 0;
1487 }
1488 
1489 /*
1490  * A note about DRAP stack alignment:
1491  *
1492  * GCC has the concept of a DRAP register, which is used to help keep track of
1493  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1494  * register.  The typical DRAP pattern is:
1495  *
1496  *   4c 8d 54 24 08		lea    0x8(%rsp),%r10
1497  *   48 83 e4 c0		and    $0xffffffffffffffc0,%rsp
1498  *   41 ff 72 f8		pushq  -0x8(%r10)
1499  *   55				push   %rbp
1500  *   48 89 e5			mov    %rsp,%rbp
1501  *				(more pushes)
1502  *   41 52			push   %r10
1503  *				...
1504  *   41 5a			pop    %r10
1505  *				(more pops)
1506  *   5d				pop    %rbp
1507  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1508  *   c3				retq
1509  *
1510  * There are some variations in the epilogues, like:
1511  *
1512  *   5b				pop    %rbx
1513  *   41 5a			pop    %r10
1514  *   41 5c			pop    %r12
1515  *   41 5d			pop    %r13
1516  *   41 5e			pop    %r14
1517  *   c9				leaveq
1518  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1519  *   c3				retq
1520  *
1521  * and:
1522  *
1523  *   4c 8b 55 e8		mov    -0x18(%rbp),%r10
1524  *   48 8b 5d e0		mov    -0x20(%rbp),%rbx
1525  *   4c 8b 65 f0		mov    -0x10(%rbp),%r12
1526  *   4c 8b 6d f8		mov    -0x8(%rbp),%r13
1527  *   c9				leaveq
1528  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1529  *   c3				retq
1530  *
1531  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1532  * restored beforehand:
1533  *
1534  *   41 55			push   %r13
1535  *   4c 8d 6c 24 10		lea    0x10(%rsp),%r13
1536  *   48 83 e4 f0		and    $0xfffffffffffffff0,%rsp
1537  *				...
1538  *   49 8d 65 f0		lea    -0x10(%r13),%rsp
1539  *   41 5d			pop    %r13
1540  *   c3				retq
1541  */
1542 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1543 {
1544 	struct stack_op *op = &insn->stack_op;
1545 	struct cfi_reg *cfa = &state->cfa;
1546 	struct cfi_reg *regs = state->regs;
1547 
1548 	/* stack operations don't make sense with an undefined CFA */
1549 	if (cfa->base == CFI_UNDEFINED) {
1550 		if (insn->func) {
1551 			WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1552 			return -1;
1553 		}
1554 		return 0;
1555 	}
1556 
1557 	if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1558 		return update_insn_state_regs(insn, state);
1559 
1560 	switch (op->dest.type) {
1561 
1562 	case OP_DEST_REG:
1563 		switch (op->src.type) {
1564 
1565 		case OP_SRC_REG:
1566 			if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1567 			    cfa->base == CFI_SP &&
1568 			    regs[CFI_BP].base == CFI_CFA &&
1569 			    regs[CFI_BP].offset == -cfa->offset) {
1570 
1571 				/* mov %rsp, %rbp */
1572 				cfa->base = op->dest.reg;
1573 				state->bp_scratch = false;
1574 			}
1575 
1576 			else if (op->src.reg == CFI_SP &&
1577 				 op->dest.reg == CFI_BP && state->drap) {
1578 
1579 				/* drap: mov %rsp, %rbp */
1580 				regs[CFI_BP].base = CFI_BP;
1581 				regs[CFI_BP].offset = -state->stack_size;
1582 				state->bp_scratch = false;
1583 			}
1584 
1585 			else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1586 
1587 				/*
1588 				 * mov %rsp, %reg
1589 				 *
1590 				 * This is needed for the rare case where GCC
1591 				 * does:
1592 				 *
1593 				 *   mov    %rsp, %rax
1594 				 *   ...
1595 				 *   mov    %rax, %rsp
1596 				 */
1597 				state->vals[op->dest.reg].base = CFI_CFA;
1598 				state->vals[op->dest.reg].offset = -state->stack_size;
1599 			}
1600 
1601 			else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1602 				 cfa->base == CFI_BP) {
1603 
1604 				/*
1605 				 * mov %rbp, %rsp
1606 				 *
1607 				 * Restore the original stack pointer (Clang).
1608 				 */
1609 				state->stack_size = -state->regs[CFI_BP].offset;
1610 			}
1611 
1612 			else if (op->dest.reg == cfa->base) {
1613 
1614 				/* mov %reg, %rsp */
1615 				if (cfa->base == CFI_SP &&
1616 				    state->vals[op->src.reg].base == CFI_CFA) {
1617 
1618 					/*
1619 					 * This is needed for the rare case
1620 					 * where GCC does something dumb like:
1621 					 *
1622 					 *   lea    0x8(%rsp), %rcx
1623 					 *   ...
1624 					 *   mov    %rcx, %rsp
1625 					 */
1626 					cfa->offset = -state->vals[op->src.reg].offset;
1627 					state->stack_size = cfa->offset;
1628 
1629 				} else {
1630 					cfa->base = CFI_UNDEFINED;
1631 					cfa->offset = 0;
1632 				}
1633 			}
1634 
1635 			break;
1636 
1637 		case OP_SRC_ADD:
1638 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1639 
1640 				/* add imm, %rsp */
1641 				state->stack_size -= op->src.offset;
1642 				if (cfa->base == CFI_SP)
1643 					cfa->offset -= op->src.offset;
1644 				break;
1645 			}
1646 
1647 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1648 
1649 				/* lea disp(%rbp), %rsp */
1650 				state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1651 				break;
1652 			}
1653 
1654 			if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1655 
1656 				/* drap: lea disp(%rsp), %drap */
1657 				state->drap_reg = op->dest.reg;
1658 
1659 				/*
1660 				 * lea disp(%rsp), %reg
1661 				 *
1662 				 * This is needed for the rare case where GCC
1663 				 * does something dumb like:
1664 				 *
1665 				 *   lea    0x8(%rsp), %rcx
1666 				 *   ...
1667 				 *   mov    %rcx, %rsp
1668 				 */
1669 				state->vals[op->dest.reg].base = CFI_CFA;
1670 				state->vals[op->dest.reg].offset = \
1671 					-state->stack_size + op->src.offset;
1672 
1673 				break;
1674 			}
1675 
1676 			if (state->drap && op->dest.reg == CFI_SP &&
1677 			    op->src.reg == state->drap_reg) {
1678 
1679 				 /* drap: lea disp(%drap), %rsp */
1680 				cfa->base = CFI_SP;
1681 				cfa->offset = state->stack_size = -op->src.offset;
1682 				state->drap_reg = CFI_UNDEFINED;
1683 				state->drap = false;
1684 				break;
1685 			}
1686 
1687 			if (op->dest.reg == state->cfa.base) {
1688 				WARN_FUNC("unsupported stack register modification",
1689 					  insn->sec, insn->offset);
1690 				return -1;
1691 			}
1692 
1693 			break;
1694 
1695 		case OP_SRC_AND:
1696 			if (op->dest.reg != CFI_SP ||
1697 			    (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1698 			    (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1699 				WARN_FUNC("unsupported stack pointer realignment",
1700 					  insn->sec, insn->offset);
1701 				return -1;
1702 			}
1703 
1704 			if (state->drap_reg != CFI_UNDEFINED) {
1705 				/* drap: and imm, %rsp */
1706 				cfa->base = state->drap_reg;
1707 				cfa->offset = state->stack_size = 0;
1708 				state->drap = true;
1709 			}
1710 
1711 			/*
1712 			 * Older versions of GCC (4.8ish) realign the stack
1713 			 * without DRAP, with a frame pointer.
1714 			 */
1715 
1716 			break;
1717 
1718 		case OP_SRC_POP:
1719 		case OP_SRC_POPF:
1720 			if (!state->drap && op->dest.type == OP_DEST_REG &&
1721 			    op->dest.reg == cfa->base) {
1722 
1723 				/* pop %rbp */
1724 				cfa->base = CFI_SP;
1725 			}
1726 
1727 			if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1728 			    op->dest.type == OP_DEST_REG &&
1729 			    op->dest.reg == state->drap_reg &&
1730 			    state->drap_offset == -state->stack_size) {
1731 
1732 				/* drap: pop %drap */
1733 				cfa->base = state->drap_reg;
1734 				cfa->offset = 0;
1735 				state->drap_offset = -1;
1736 
1737 			} else if (regs[op->dest.reg].offset == -state->stack_size) {
1738 
1739 				/* pop %reg */
1740 				restore_reg(state, op->dest.reg);
1741 			}
1742 
1743 			state->stack_size -= 8;
1744 			if (cfa->base == CFI_SP)
1745 				cfa->offset -= 8;
1746 
1747 			break;
1748 
1749 		case OP_SRC_REG_INDIRECT:
1750 			if (state->drap && op->src.reg == CFI_BP &&
1751 			    op->src.offset == state->drap_offset) {
1752 
1753 				/* drap: mov disp(%rbp), %drap */
1754 				cfa->base = state->drap_reg;
1755 				cfa->offset = 0;
1756 				state->drap_offset = -1;
1757 			}
1758 
1759 			if (state->drap && op->src.reg == CFI_BP &&
1760 			    op->src.offset == regs[op->dest.reg].offset) {
1761 
1762 				/* drap: mov disp(%rbp), %reg */
1763 				restore_reg(state, op->dest.reg);
1764 
1765 			} else if (op->src.reg == cfa->base &&
1766 			    op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1767 
1768 				/* mov disp(%rbp), %reg */
1769 				/* mov disp(%rsp), %reg */
1770 				restore_reg(state, op->dest.reg);
1771 			}
1772 
1773 			break;
1774 
1775 		default:
1776 			WARN_FUNC("unknown stack-related instruction",
1777 				  insn->sec, insn->offset);
1778 			return -1;
1779 		}
1780 
1781 		break;
1782 
1783 	case OP_DEST_PUSH:
1784 	case OP_DEST_PUSHF:
1785 		state->stack_size += 8;
1786 		if (cfa->base == CFI_SP)
1787 			cfa->offset += 8;
1788 
1789 		if (op->src.type != OP_SRC_REG)
1790 			break;
1791 
1792 		if (state->drap) {
1793 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1794 
1795 				/* drap: push %drap */
1796 				cfa->base = CFI_BP_INDIRECT;
1797 				cfa->offset = -state->stack_size;
1798 
1799 				/* save drap so we know when to restore it */
1800 				state->drap_offset = -state->stack_size;
1801 
1802 			} else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1803 
1804 				/* drap: push %rbp */
1805 				state->stack_size = 0;
1806 
1807 			} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1808 
1809 				/* drap: push %reg */
1810 				save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1811 			}
1812 
1813 		} else {
1814 
1815 			/* push %reg */
1816 			save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1817 		}
1818 
1819 		/* detect when asm code uses rbp as a scratch register */
1820 		if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1821 		    cfa->base != CFI_BP)
1822 			state->bp_scratch = true;
1823 		break;
1824 
1825 	case OP_DEST_REG_INDIRECT:
1826 
1827 		if (state->drap) {
1828 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1829 
1830 				/* drap: mov %drap, disp(%rbp) */
1831 				cfa->base = CFI_BP_INDIRECT;
1832 				cfa->offset = op->dest.offset;
1833 
1834 				/* save drap offset so we know when to restore it */
1835 				state->drap_offset = op->dest.offset;
1836 			}
1837 
1838 			else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1839 
1840 				/* drap: mov reg, disp(%rbp) */
1841 				save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1842 			}
1843 
1844 		} else if (op->dest.reg == cfa->base) {
1845 
1846 			/* mov reg, disp(%rbp) */
1847 			/* mov reg, disp(%rsp) */
1848 			save_reg(state, op->src.reg, CFI_CFA,
1849 				 op->dest.offset - state->cfa.offset);
1850 		}
1851 
1852 		break;
1853 
1854 	case OP_DEST_LEAVE:
1855 		if ((!state->drap && cfa->base != CFI_BP) ||
1856 		    (state->drap && cfa->base != state->drap_reg)) {
1857 			WARN_FUNC("leave instruction with modified stack frame",
1858 				  insn->sec, insn->offset);
1859 			return -1;
1860 		}
1861 
1862 		/* leave (mov %rbp, %rsp; pop %rbp) */
1863 
1864 		state->stack_size = -state->regs[CFI_BP].offset - 8;
1865 		restore_reg(state, CFI_BP);
1866 
1867 		if (!state->drap) {
1868 			cfa->base = CFI_SP;
1869 			cfa->offset -= 8;
1870 		}
1871 
1872 		break;
1873 
1874 	case OP_DEST_MEM:
1875 		if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1876 			WARN_FUNC("unknown stack-related memory operation",
1877 				  insn->sec, insn->offset);
1878 			return -1;
1879 		}
1880 
1881 		/* pop mem */
1882 		state->stack_size -= 8;
1883 		if (cfa->base == CFI_SP)
1884 			cfa->offset -= 8;
1885 
1886 		break;
1887 
1888 	default:
1889 		WARN_FUNC("unknown stack-related instruction",
1890 			  insn->sec, insn->offset);
1891 		return -1;
1892 	}
1893 
1894 	return 0;
1895 }
1896 
1897 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1898 {
1899 	struct insn_state *state1 = &insn->state, *state2 = state;
1900 	int i;
1901 
1902 	if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1903 		WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1904 			  insn->sec, insn->offset,
1905 			  state1->cfa.base, state1->cfa.offset,
1906 			  state2->cfa.base, state2->cfa.offset);
1907 
1908 	} else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1909 		for (i = 0; i < CFI_NUM_REGS; i++) {
1910 			if (!memcmp(&state1->regs[i], &state2->regs[i],
1911 				    sizeof(struct cfi_reg)))
1912 				continue;
1913 
1914 			WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1915 				  insn->sec, insn->offset,
1916 				  i, state1->regs[i].base, state1->regs[i].offset,
1917 				  i, state2->regs[i].base, state2->regs[i].offset);
1918 			break;
1919 		}
1920 
1921 	} else if (state1->type != state2->type) {
1922 		WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1923 			  insn->sec, insn->offset, state1->type, state2->type);
1924 
1925 	} else if (state1->drap != state2->drap ||
1926 		 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1927 		 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1928 		WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1929 			  insn->sec, insn->offset,
1930 			  state1->drap, state1->drap_reg, state1->drap_offset,
1931 			  state2->drap, state2->drap_reg, state2->drap_offset);
1932 
1933 	} else
1934 		return true;
1935 
1936 	return false;
1937 }
1938 
1939 static inline bool func_uaccess_safe(struct symbol *func)
1940 {
1941 	if (func)
1942 		return func->uaccess_safe;
1943 
1944 	return false;
1945 }
1946 
1947 static inline const char *call_dest_name(struct instruction *insn)
1948 {
1949 	if (insn->call_dest)
1950 		return insn->call_dest->name;
1951 
1952 	return "{dynamic}";
1953 }
1954 
1955 static int validate_call(struct instruction *insn, struct insn_state *state)
1956 {
1957 	if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1958 		WARN_FUNC("call to %s() with UACCESS enabled",
1959 				insn->sec, insn->offset, call_dest_name(insn));
1960 		return 1;
1961 	}
1962 
1963 	if (state->df) {
1964 		WARN_FUNC("call to %s() with DF set",
1965 				insn->sec, insn->offset, call_dest_name(insn));
1966 		return 1;
1967 	}
1968 
1969 	return 0;
1970 }
1971 
1972 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1973 {
1974 	if (has_modified_stack_frame(state)) {
1975 		WARN_FUNC("sibling call from callable instruction with modified stack frame",
1976 				insn->sec, insn->offset);
1977 		return 1;
1978 	}
1979 
1980 	return validate_call(insn, state);
1981 }
1982 
1983 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
1984 {
1985 	if (state->uaccess && !func_uaccess_safe(func)) {
1986 		WARN_FUNC("return with UACCESS enabled",
1987 			  insn->sec, insn->offset);
1988 		return 1;
1989 	}
1990 
1991 	if (!state->uaccess && func_uaccess_safe(func)) {
1992 		WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
1993 			  insn->sec, insn->offset);
1994 		return 1;
1995 	}
1996 
1997 	if (state->df) {
1998 		WARN_FUNC("return with DF set",
1999 			  insn->sec, insn->offset);
2000 		return 1;
2001 	}
2002 
2003 	if (func && has_modified_stack_frame(state)) {
2004 		WARN_FUNC("return with modified stack frame",
2005 			  insn->sec, insn->offset);
2006 		return 1;
2007 	}
2008 
2009 	if (state->bp_scratch) {
2010 		WARN("%s uses BP as a scratch register",
2011 		     func->name);
2012 		return 1;
2013 	}
2014 
2015 	return 0;
2016 }
2017 
2018 /*
2019  * Follow the branch starting at the given instruction, and recursively follow
2020  * any other branches (jumps).  Meanwhile, track the frame pointer state at
2021  * each instruction and validate all the rules described in
2022  * tools/objtool/Documentation/stack-validation.txt.
2023  */
2024 static int validate_branch(struct objtool_file *file, struct symbol *func,
2025 			   struct instruction *first, struct insn_state state)
2026 {
2027 	struct alternative *alt;
2028 	struct instruction *insn, *next_insn;
2029 	struct section *sec;
2030 	u8 visited;
2031 	int ret;
2032 
2033 	insn = first;
2034 	sec = insn->sec;
2035 
2036 	if (insn->alt_group && list_empty(&insn->alts)) {
2037 		WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
2038 			  sec, insn->offset);
2039 		return 1;
2040 	}
2041 
2042 	while (1) {
2043 		next_insn = next_insn_same_sec(file, insn);
2044 
2045 		if (file->c_file && func && insn->func && func != insn->func->pfunc) {
2046 			WARN("%s() falls through to next function %s()",
2047 			     func->name, insn->func->name);
2048 			return 1;
2049 		}
2050 
2051 		if (func && insn->ignore) {
2052 			WARN_FUNC("BUG: why am I validating an ignored function?",
2053 				  sec, insn->offset);
2054 			return 1;
2055 		}
2056 
2057 		visited = 1 << state.uaccess;
2058 		if (insn->visited) {
2059 			if (!insn->hint && !insn_state_match(insn, &state))
2060 				return 1;
2061 
2062 			if (insn->visited & visited)
2063 				return 0;
2064 		}
2065 
2066 		if (insn->hint) {
2067 			if (insn->restore) {
2068 				struct instruction *save_insn, *i;
2069 
2070 				i = insn;
2071 				save_insn = NULL;
2072 				sym_for_each_insn_continue_reverse(file, func, i) {
2073 					if (i->save) {
2074 						save_insn = i;
2075 						break;
2076 					}
2077 				}
2078 
2079 				if (!save_insn) {
2080 					WARN_FUNC("no corresponding CFI save for CFI restore",
2081 						  sec, insn->offset);
2082 					return 1;
2083 				}
2084 
2085 				if (!save_insn->visited) {
2086 					/*
2087 					 * Oops, no state to copy yet.
2088 					 * Hopefully we can reach this
2089 					 * instruction from another branch
2090 					 * after the save insn has been
2091 					 * visited.
2092 					 */
2093 					if (insn == first)
2094 						return 0;
2095 
2096 					WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2097 						  sec, insn->offset);
2098 					return 1;
2099 				}
2100 
2101 				insn->state = save_insn->state;
2102 			}
2103 
2104 			state = insn->state;
2105 
2106 		} else
2107 			insn->state = state;
2108 
2109 		insn->visited |= visited;
2110 
2111 		if (!insn->ignore_alts) {
2112 			bool skip_orig = false;
2113 
2114 			list_for_each_entry(alt, &insn->alts, list) {
2115 				if (alt->skip_orig)
2116 					skip_orig = true;
2117 
2118 				ret = validate_branch(file, func, alt->insn, state);
2119 				if (ret) {
2120 					if (backtrace)
2121 						BT_FUNC("(alt)", insn);
2122 					return ret;
2123 				}
2124 			}
2125 
2126 			if (skip_orig)
2127 				return 0;
2128 		}
2129 
2130 		switch (insn->type) {
2131 
2132 		case INSN_RETURN:
2133 			return validate_return(func, insn, &state);
2134 
2135 		case INSN_CALL:
2136 		case INSN_CALL_DYNAMIC:
2137 			ret = validate_call(insn, &state);
2138 			if (ret)
2139 				return ret;
2140 
2141 			if (!no_fp && func && !is_fentry_call(insn) &&
2142 			    !has_valid_stack_frame(&state)) {
2143 				WARN_FUNC("call without frame pointer save/setup",
2144 					  sec, insn->offset);
2145 				return 1;
2146 			}
2147 
2148 			if (dead_end_function(file, insn->call_dest))
2149 				return 0;
2150 
2151 			break;
2152 
2153 		case INSN_JUMP_CONDITIONAL:
2154 		case INSN_JUMP_UNCONDITIONAL:
2155 			if (func && is_sibling_call(insn)) {
2156 				ret = validate_sibling_call(insn, &state);
2157 				if (ret)
2158 					return ret;
2159 
2160 			} else if (insn->jump_dest) {
2161 				ret = validate_branch(file, func,
2162 						      insn->jump_dest, state);
2163 				if (ret) {
2164 					if (backtrace)
2165 						BT_FUNC("(branch)", insn);
2166 					return ret;
2167 				}
2168 			}
2169 
2170 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
2171 				return 0;
2172 
2173 			break;
2174 
2175 		case INSN_JUMP_DYNAMIC:
2176 		case INSN_JUMP_DYNAMIC_CONDITIONAL:
2177 			if (func && is_sibling_call(insn)) {
2178 				ret = validate_sibling_call(insn, &state);
2179 				if (ret)
2180 					return ret;
2181 			}
2182 
2183 			if (insn->type == INSN_JUMP_DYNAMIC)
2184 				return 0;
2185 
2186 			break;
2187 
2188 		case INSN_CONTEXT_SWITCH:
2189 			if (func && (!next_insn || !next_insn->hint)) {
2190 				WARN_FUNC("unsupported instruction in callable function",
2191 					  sec, insn->offset);
2192 				return 1;
2193 			}
2194 			return 0;
2195 
2196 		case INSN_STACK:
2197 			if (update_insn_state(insn, &state))
2198 				return 1;
2199 
2200 			if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2201 				if (!state.uaccess_stack) {
2202 					state.uaccess_stack = 1;
2203 				} else if (state.uaccess_stack >> 31) {
2204 					WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2205 					return 1;
2206 				}
2207 				state.uaccess_stack <<= 1;
2208 				state.uaccess_stack  |= state.uaccess;
2209 			}
2210 
2211 			if (insn->stack_op.src.type == OP_SRC_POPF) {
2212 				if (state.uaccess_stack) {
2213 					state.uaccess = state.uaccess_stack & 1;
2214 					state.uaccess_stack >>= 1;
2215 					if (state.uaccess_stack == 1)
2216 						state.uaccess_stack = 0;
2217 				}
2218 			}
2219 
2220 			break;
2221 
2222 		case INSN_STAC:
2223 			if (state.uaccess) {
2224 				WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2225 				return 1;
2226 			}
2227 
2228 			state.uaccess = true;
2229 			break;
2230 
2231 		case INSN_CLAC:
2232 			if (!state.uaccess && func) {
2233 				WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2234 				return 1;
2235 			}
2236 
2237 			if (func_uaccess_safe(func) && !state.uaccess_stack) {
2238 				WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2239 				return 1;
2240 			}
2241 
2242 			state.uaccess = false;
2243 			break;
2244 
2245 		case INSN_STD:
2246 			if (state.df)
2247 				WARN_FUNC("recursive STD", sec, insn->offset);
2248 
2249 			state.df = true;
2250 			break;
2251 
2252 		case INSN_CLD:
2253 			if (!state.df && func)
2254 				WARN_FUNC("redundant CLD", sec, insn->offset);
2255 
2256 			state.df = false;
2257 			break;
2258 
2259 		default:
2260 			break;
2261 		}
2262 
2263 		if (insn->dead_end)
2264 			return 0;
2265 
2266 		if (!next_insn) {
2267 			if (state.cfa.base == CFI_UNDEFINED)
2268 				return 0;
2269 			WARN("%s: unexpected end of section", sec->name);
2270 			return 1;
2271 		}
2272 
2273 		insn = next_insn;
2274 	}
2275 
2276 	return 0;
2277 }
2278 
2279 static int validate_unwind_hints(struct objtool_file *file)
2280 {
2281 	struct instruction *insn;
2282 	int ret, warnings = 0;
2283 	struct insn_state state;
2284 
2285 	if (!file->hints)
2286 		return 0;
2287 
2288 	clear_insn_state(&state);
2289 
2290 	for_each_insn(file, insn) {
2291 		if (insn->hint && !insn->visited) {
2292 			ret = validate_branch(file, insn->func, insn, state);
2293 			if (ret && backtrace)
2294 				BT_FUNC("<=== (hint)", insn);
2295 			warnings += ret;
2296 		}
2297 	}
2298 
2299 	return warnings;
2300 }
2301 
2302 static int validate_retpoline(struct objtool_file *file)
2303 {
2304 	struct instruction *insn;
2305 	int warnings = 0;
2306 
2307 	for_each_insn(file, insn) {
2308 		if (insn->type != INSN_JUMP_DYNAMIC &&
2309 		    insn->type != INSN_CALL_DYNAMIC)
2310 			continue;
2311 
2312 		if (insn->retpoline_safe)
2313 			continue;
2314 
2315 		/*
2316 		 * .init.text code is ran before userspace and thus doesn't
2317 		 * strictly need retpolines, except for modules which are
2318 		 * loaded late, they very much do need retpoline in their
2319 		 * .init.text
2320 		 */
2321 		if (!strcmp(insn->sec->name, ".init.text") && !module)
2322 			continue;
2323 
2324 		WARN_FUNC("indirect %s found in RETPOLINE build",
2325 			  insn->sec, insn->offset,
2326 			  insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2327 
2328 		warnings++;
2329 	}
2330 
2331 	return warnings;
2332 }
2333 
2334 static bool is_kasan_insn(struct instruction *insn)
2335 {
2336 	return (insn->type == INSN_CALL &&
2337 		!strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2338 }
2339 
2340 static bool is_ubsan_insn(struct instruction *insn)
2341 {
2342 	return (insn->type == INSN_CALL &&
2343 		!strcmp(insn->call_dest->name,
2344 			"__ubsan_handle_builtin_unreachable"));
2345 }
2346 
2347 static bool ignore_unreachable_insn(struct instruction *insn)
2348 {
2349 	int i;
2350 
2351 	if (insn->ignore || insn->type == INSN_NOP)
2352 		return true;
2353 
2354 	/*
2355 	 * Ignore any unused exceptions.  This can happen when a whitelisted
2356 	 * function has an exception table entry.
2357 	 *
2358 	 * Also ignore alternative replacement instructions.  This can happen
2359 	 * when a whitelisted function uses one of the ALTERNATIVE macros.
2360 	 */
2361 	if (!strcmp(insn->sec->name, ".fixup") ||
2362 	    !strcmp(insn->sec->name, ".altinstr_replacement") ||
2363 	    !strcmp(insn->sec->name, ".altinstr_aux"))
2364 		return true;
2365 
2366 	/*
2367 	 * Check if this (or a subsequent) instruction is related to
2368 	 * CONFIG_UBSAN or CONFIG_KASAN.
2369 	 *
2370 	 * End the search at 5 instructions to avoid going into the weeds.
2371 	 */
2372 	if (!insn->func)
2373 		return false;
2374 	for (i = 0; i < 5; i++) {
2375 
2376 		if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2377 			return true;
2378 
2379 		if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2380 			if (insn->jump_dest &&
2381 			    insn->jump_dest->func == insn->func) {
2382 				insn = insn->jump_dest;
2383 				continue;
2384 			}
2385 
2386 			break;
2387 		}
2388 
2389 		if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2390 			break;
2391 
2392 		insn = list_next_entry(insn, list);
2393 	}
2394 
2395 	return false;
2396 }
2397 
2398 static int validate_section(struct objtool_file *file, struct section *sec)
2399 {
2400 	struct symbol *func;
2401 	struct instruction *insn;
2402 	struct insn_state state;
2403 	int ret, warnings = 0;
2404 
2405 	clear_insn_state(&state);
2406 
2407 	state.cfa = initial_func_cfi.cfa;
2408 	memcpy(&state.regs, &initial_func_cfi.regs,
2409 	       CFI_NUM_REGS * sizeof(struct cfi_reg));
2410 	state.stack_size = initial_func_cfi.cfa.offset;
2411 
2412 	list_for_each_entry(func, &sec->symbol_list, list) {
2413 		if (func->type != STT_FUNC)
2414 			continue;
2415 
2416 		if (!func->len) {
2417 			WARN("%s() is missing an ELF size annotation",
2418 			     func->name);
2419 			warnings++;
2420 		}
2421 
2422 		if (func->pfunc != func || func->alias != func)
2423 			continue;
2424 
2425 		insn = find_insn(file, sec, func->offset);
2426 		if (!insn || insn->ignore || insn->visited)
2427 			continue;
2428 
2429 		state.uaccess = func->uaccess_safe;
2430 
2431 		ret = validate_branch(file, func, insn, state);
2432 		if (ret && backtrace)
2433 			BT_FUNC("<=== (func)", insn);
2434 		warnings += ret;
2435 	}
2436 
2437 	return warnings;
2438 }
2439 
2440 static int validate_functions(struct objtool_file *file)
2441 {
2442 	struct section *sec;
2443 	int warnings = 0;
2444 
2445 	for_each_sec(file, sec)
2446 		warnings += validate_section(file, sec);
2447 
2448 	return warnings;
2449 }
2450 
2451 static int validate_reachable_instructions(struct objtool_file *file)
2452 {
2453 	struct instruction *insn;
2454 
2455 	if (file->ignore_unreachables)
2456 		return 0;
2457 
2458 	for_each_insn(file, insn) {
2459 		if (insn->visited || ignore_unreachable_insn(insn))
2460 			continue;
2461 
2462 		WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2463 		return 1;
2464 	}
2465 
2466 	return 0;
2467 }
2468 
2469 static struct objtool_file file;
2470 
2471 int check(const char *_objname, bool orc)
2472 {
2473 	int ret, warnings = 0;
2474 
2475 	objname = _objname;
2476 
2477 	file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2478 	if (!file.elf)
2479 		return 1;
2480 
2481 	INIT_LIST_HEAD(&file.insn_list);
2482 	hash_init(file.insn_hash);
2483 	file.c_file = find_section_by_name(file.elf, ".comment");
2484 	file.ignore_unreachables = no_unreachable;
2485 	file.hints = false;
2486 
2487 	arch_initial_func_cfi_state(&initial_func_cfi);
2488 
2489 	ret = decode_sections(&file);
2490 	if (ret < 0)
2491 		goto out;
2492 	warnings += ret;
2493 
2494 	if (list_empty(&file.insn_list))
2495 		goto out;
2496 
2497 	if (retpoline) {
2498 		ret = validate_retpoline(&file);
2499 		if (ret < 0)
2500 			return ret;
2501 		warnings += ret;
2502 	}
2503 
2504 	ret = validate_functions(&file);
2505 	if (ret < 0)
2506 		goto out;
2507 	warnings += ret;
2508 
2509 	ret = validate_unwind_hints(&file);
2510 	if (ret < 0)
2511 		goto out;
2512 	warnings += ret;
2513 
2514 	if (!warnings) {
2515 		ret = validate_reachable_instructions(&file);
2516 		if (ret < 0)
2517 			goto out;
2518 		warnings += ret;
2519 	}
2520 
2521 	if (orc) {
2522 		ret = create_orc(&file);
2523 		if (ret < 0)
2524 			goto out;
2525 
2526 		ret = create_orc_sections(&file);
2527 		if (ret < 0)
2528 			goto out;
2529 
2530 		ret = elf_write(file.elf);
2531 		if (ret < 0)
2532 			goto out;
2533 	}
2534 
2535 out:
2536 	if (ret < 0) {
2537 		/*
2538 		 *  Fatal error.  The binary is corrupt or otherwise broken in
2539 		 *  some way, or objtool itself is broken.  Fail the kernel
2540 		 *  build.
2541 		 */
2542 		return ret;
2543 	}
2544 
2545 	return 0;
2546 }
2547