1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6 #include <stdio.h>
7 #include <stdlib.h>
8
9 #define unlikely(cond) (cond)
10 #include <asm/insn.h>
11 #include "../../../arch/x86/lib/inat.c"
12 #include "../../../arch/x86/lib/insn.c"
13
14 #define CONFIG_64BIT 1
15 #include <asm/nops.h>
16
17 #include <asm/orc_types.h>
18 #include <objtool/check.h>
19 #include <objtool/elf.h>
20 #include <objtool/arch.h>
21 #include <objtool/warn.h>
22 #include <objtool/endianness.h>
23 #include <objtool/builtin.h>
24 #include <arch/elf.h>
25
arch_ftrace_match(char * name)26 int arch_ftrace_match(char *name)
27 {
28 return !strcmp(name, "__fentry__");
29 }
30
is_x86_64(const struct elf * elf)31 static int is_x86_64(const struct elf *elf)
32 {
33 switch (elf->ehdr.e_machine) {
34 case EM_X86_64:
35 return 1;
36 case EM_386:
37 return 0;
38 default:
39 ERROR("unexpected ELF machine type %d", elf->ehdr.e_machine);
40 return -1;
41 }
42 }
43
arch_callee_saved_reg(unsigned char reg)44 bool arch_callee_saved_reg(unsigned char reg)
45 {
46 switch (reg) {
47 case CFI_BP:
48 case CFI_BX:
49 case CFI_R12:
50 case CFI_R13:
51 case CFI_R14:
52 case CFI_R15:
53 return true;
54
55 case CFI_AX:
56 case CFI_CX:
57 case CFI_DX:
58 case CFI_SI:
59 case CFI_DI:
60 case CFI_SP:
61 case CFI_R8:
62 case CFI_R9:
63 case CFI_R10:
64 case CFI_R11:
65 case CFI_RA:
66 default:
67 return false;
68 }
69 }
70
arch_dest_reloc_offset(int addend)71 unsigned long arch_dest_reloc_offset(int addend)
72 {
73 return addend + 4;
74 }
75
arch_jump_destination(struct instruction * insn)76 unsigned long arch_jump_destination(struct instruction *insn)
77 {
78 return insn->offset + insn->len + insn->immediate;
79 }
80
arch_pc_relative_reloc(struct reloc * reloc)81 bool arch_pc_relative_reloc(struct reloc *reloc)
82 {
83 /*
84 * All relocation types where P (the address of the target)
85 * is included in the computation.
86 */
87 switch (reloc_type(reloc)) {
88 case R_X86_64_PC8:
89 case R_X86_64_PC16:
90 case R_X86_64_PC32:
91 case R_X86_64_PC64:
92
93 case R_X86_64_PLT32:
94 case R_X86_64_GOTPC32:
95 case R_X86_64_GOTPCREL:
96 return true;
97
98 default:
99 break;
100 }
101
102 return false;
103 }
104
105 #define ADD_OP(op) \
106 if (!(op = calloc(1, sizeof(*op)))) \
107 return -1; \
108 else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110 /*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX CX DX BX | SP | BP | SI DI |
114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
119 * 11 | r/ m |
120 */
121
122 #define mod_is_mem() (modrm_mod != 3)
123 #define mod_is_reg() (modrm_mod == 3)
124
125 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128 /*
129 * Check the ModRM register. If there is a SIB byte then check with
130 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and
131 * ModRM mod is 0 then there is no base register.
132 */
133 #define rm_is(reg) (have_SIB() ? \
134 sib_base == (reg) && sib_index == CFI_SP && \
135 (sib_base != CFI_BP || modrm_mod != 0) : \
136 modrm_rm == (reg))
137
138 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
139 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
140
has_notrack_prefix(struct insn * insn)141 static bool has_notrack_prefix(struct insn *insn)
142 {
143 int i;
144
145 for (i = 0; i < insn->prefixes.nbytes; i++) {
146 if (insn->prefixes.bytes[i] == 0x3e)
147 return true;
148 }
149
150 return false;
151 }
152
arch_decode_instruction(struct objtool_file * file,const struct section * sec,unsigned long offset,unsigned int maxlen,struct instruction * insn)153 int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
154 unsigned long offset, unsigned int maxlen,
155 struct instruction *insn)
156 {
157 struct stack_op **ops_list = &insn->stack_ops;
158 const struct elf *elf = file->elf;
159 struct insn ins;
160 int x86_64, ret;
161 unsigned char op1, op2, op3, prefix,
162 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
163 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
164 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
165 struct stack_op *op = NULL;
166 struct symbol *sym;
167 u64 imm;
168
169 x86_64 = is_x86_64(elf);
170 if (x86_64 == -1)
171 return -1;
172
173 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
174 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
175 if (ret < 0) {
176 ERROR("can't decode instruction at %s:0x%lx", sec->name, offset);
177 return -1;
178 }
179
180 insn->len = ins.length;
181 insn->type = INSN_OTHER;
182
183 if (ins.vex_prefix.nbytes)
184 return 0;
185
186 prefix = ins.prefixes.bytes[0];
187
188 op1 = ins.opcode.bytes[0];
189 op2 = ins.opcode.bytes[1];
190 op3 = ins.opcode.bytes[2];
191
192 /*
193 * XXX hack, decoder is buggered and thinks 0xea is 7 bytes long.
194 */
195 if (op1 == 0xea) {
196 insn->len = 1;
197 insn->type = INSN_BUG;
198 return 0;
199 }
200
201 if (ins.rex_prefix.nbytes) {
202 rex = ins.rex_prefix.bytes[0];
203 rex_w = X86_REX_W(rex) >> 3;
204 rex_r = X86_REX_R(rex) >> 2;
205 rex_x = X86_REX_X(rex) >> 1;
206 rex_b = X86_REX_B(rex);
207 }
208
209 if (ins.modrm.nbytes) {
210 modrm = ins.modrm.bytes[0];
211 modrm_mod = X86_MODRM_MOD(modrm);
212 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
213 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
214 }
215
216 if (ins.sib.nbytes) {
217 sib = ins.sib.bytes[0];
218 /* sib_scale = X86_SIB_SCALE(sib); */
219 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
220 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
221 }
222
223 switch (op1) {
224
225 case 0x1:
226 case 0x29:
227 if (rex_w && rm_is_reg(CFI_SP)) {
228
229 /* add/sub reg, %rsp */
230 ADD_OP(op) {
231 op->src.type = OP_SRC_ADD;
232 op->src.reg = modrm_reg;
233 op->dest.type = OP_DEST_REG;
234 op->dest.reg = CFI_SP;
235 }
236 }
237 break;
238
239 case 0x50 ... 0x57:
240
241 /* push reg */
242 ADD_OP(op) {
243 op->src.type = OP_SRC_REG;
244 op->src.reg = (op1 & 0x7) + 8*rex_b;
245 op->dest.type = OP_DEST_PUSH;
246 }
247
248 break;
249
250 case 0x58 ... 0x5f:
251
252 /* pop reg */
253 ADD_OP(op) {
254 op->src.type = OP_SRC_POP;
255 op->dest.type = OP_DEST_REG;
256 op->dest.reg = (op1 & 0x7) + 8*rex_b;
257 }
258
259 break;
260
261 case 0x68:
262 case 0x6a:
263 /* push immediate */
264 ADD_OP(op) {
265 op->src.type = OP_SRC_CONST;
266 op->dest.type = OP_DEST_PUSH;
267 }
268 break;
269
270 case 0x70 ... 0x7f:
271 insn->type = INSN_JUMP_CONDITIONAL;
272 break;
273
274 case 0x80 ... 0x83:
275 /*
276 * 1000 00sw : mod OP r/m : immediate
277 *
278 * s - sign extend immediate
279 * w - imm8 / imm32
280 *
281 * OP: 000 ADD 100 AND
282 * 001 OR 101 SUB
283 * 010 ADC 110 XOR
284 * 011 SBB 111 CMP
285 */
286
287 /* 64bit only */
288 if (!rex_w)
289 break;
290
291 /* %rsp target only */
292 if (!rm_is_reg(CFI_SP))
293 break;
294
295 imm = ins.immediate.value;
296 if (op1 & 2) { /* sign extend */
297 if (op1 & 1) { /* imm32 */
298 imm <<= 32;
299 imm = (s64)imm >> 32;
300 } else { /* imm8 */
301 imm <<= 56;
302 imm = (s64)imm >> 56;
303 }
304 }
305
306 switch (modrm_reg & 7) {
307 case 5:
308 imm = -imm;
309 fallthrough;
310 case 0:
311 /* add/sub imm, %rsp */
312 ADD_OP(op) {
313 op->src.type = OP_SRC_ADD;
314 op->src.reg = CFI_SP;
315 op->src.offset = imm;
316 op->dest.type = OP_DEST_REG;
317 op->dest.reg = CFI_SP;
318 }
319 break;
320
321 case 4:
322 /* and imm, %rsp */
323 ADD_OP(op) {
324 op->src.type = OP_SRC_AND;
325 op->src.reg = CFI_SP;
326 op->src.offset = ins.immediate.value;
327 op->dest.type = OP_DEST_REG;
328 op->dest.reg = CFI_SP;
329 }
330 break;
331
332 default:
333 /* ERROR ? */
334 break;
335 }
336
337 break;
338
339 case 0x89:
340 if (!rex_w)
341 break;
342
343 if (modrm_reg == CFI_SP) {
344
345 if (mod_is_reg()) {
346 /* mov %rsp, reg */
347 ADD_OP(op) {
348 op->src.type = OP_SRC_REG;
349 op->src.reg = CFI_SP;
350 op->dest.type = OP_DEST_REG;
351 op->dest.reg = modrm_rm;
352 }
353 break;
354
355 } else {
356 /* skip RIP relative displacement */
357 if (is_RIP())
358 break;
359
360 /* skip nontrivial SIB */
361 if (have_SIB()) {
362 modrm_rm = sib_base;
363 if (sib_index != CFI_SP)
364 break;
365 }
366
367 /* mov %rsp, disp(%reg) */
368 ADD_OP(op) {
369 op->src.type = OP_SRC_REG;
370 op->src.reg = CFI_SP;
371 op->dest.type = OP_DEST_REG_INDIRECT;
372 op->dest.reg = modrm_rm;
373 op->dest.offset = ins.displacement.value;
374 }
375 break;
376 }
377
378 break;
379 }
380
381 if (rm_is_reg(CFI_SP)) {
382
383 /* mov reg, %rsp */
384 ADD_OP(op) {
385 op->src.type = OP_SRC_REG;
386 op->src.reg = modrm_reg;
387 op->dest.type = OP_DEST_REG;
388 op->dest.reg = CFI_SP;
389 }
390 break;
391 }
392
393 fallthrough;
394 case 0x88:
395 if (!rex_w)
396 break;
397
398 if (rm_is_mem(CFI_BP)) {
399
400 /* mov reg, disp(%rbp) */
401 ADD_OP(op) {
402 op->src.type = OP_SRC_REG;
403 op->src.reg = modrm_reg;
404 op->dest.type = OP_DEST_REG_INDIRECT;
405 op->dest.reg = CFI_BP;
406 op->dest.offset = ins.displacement.value;
407 }
408 break;
409 }
410
411 if (rm_is_mem(CFI_SP)) {
412
413 /* mov reg, disp(%rsp) */
414 ADD_OP(op) {
415 op->src.type = OP_SRC_REG;
416 op->src.reg = modrm_reg;
417 op->dest.type = OP_DEST_REG_INDIRECT;
418 op->dest.reg = CFI_SP;
419 op->dest.offset = ins.displacement.value;
420 }
421 break;
422 }
423
424 break;
425
426 case 0x8b:
427 if (!rex_w)
428 break;
429
430 if (rm_is_mem(CFI_BP)) {
431
432 /* mov disp(%rbp), reg */
433 ADD_OP(op) {
434 op->src.type = OP_SRC_REG_INDIRECT;
435 op->src.reg = CFI_BP;
436 op->src.offset = ins.displacement.value;
437 op->dest.type = OP_DEST_REG;
438 op->dest.reg = modrm_reg;
439 }
440 break;
441 }
442
443 if (rm_is_mem(CFI_SP)) {
444
445 /* mov disp(%rsp), reg */
446 ADD_OP(op) {
447 op->src.type = OP_SRC_REG_INDIRECT;
448 op->src.reg = CFI_SP;
449 op->src.offset = ins.displacement.value;
450 op->dest.type = OP_DEST_REG;
451 op->dest.reg = modrm_reg;
452 }
453 break;
454 }
455
456 break;
457
458 case 0x8d:
459 if (mod_is_reg()) {
460 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
461 break;
462 }
463
464 /* skip non 64bit ops */
465 if (!rex_w)
466 break;
467
468 /* skip nontrivial SIB */
469 if (have_SIB()) {
470 modrm_rm = sib_base;
471 if (sib_index != CFI_SP)
472 break;
473 }
474
475 /* lea disp(%rip), %dst */
476 if (is_RIP()) {
477 insn->type = INSN_LEA_RIP;
478 break;
479 }
480
481 /* lea disp(%src), %dst */
482 ADD_OP(op) {
483 op->src.offset = ins.displacement.value;
484 if (!op->src.offset) {
485 /* lea (%src), %dst */
486 op->src.type = OP_SRC_REG;
487 } else {
488 /* lea disp(%src), %dst */
489 op->src.type = OP_SRC_ADD;
490 }
491 op->src.reg = modrm_rm;
492 op->dest.type = OP_DEST_REG;
493 op->dest.reg = modrm_reg;
494 }
495 break;
496
497 case 0x8f:
498 /* pop to mem */
499 ADD_OP(op) {
500 op->src.type = OP_SRC_POP;
501 op->dest.type = OP_DEST_MEM;
502 }
503 break;
504
505 case 0x90:
506 insn->type = INSN_NOP;
507 break;
508
509 case 0x9c:
510 /* pushf */
511 ADD_OP(op) {
512 op->src.type = OP_SRC_CONST;
513 op->dest.type = OP_DEST_PUSHF;
514 }
515 break;
516
517 case 0x9d:
518 /* popf */
519 ADD_OP(op) {
520 op->src.type = OP_SRC_POPF;
521 op->dest.type = OP_DEST_MEM;
522 }
523 break;
524
525 case 0x0f:
526
527 if (op2 == 0x01) {
528
529 switch (insn_last_prefix_id(&ins)) {
530 case INAT_PFX_REPE:
531 case INAT_PFX_REPNE:
532 if (modrm == 0xca)
533 /* eretu/erets */
534 insn->type = INSN_SYSRET;
535 break;
536 default:
537 if (modrm == 0xca)
538 insn->type = INSN_CLAC;
539 else if (modrm == 0xcb)
540 insn->type = INSN_STAC;
541 break;
542 }
543 } else if (op2 >= 0x80 && op2 <= 0x8f) {
544
545 insn->type = INSN_JUMP_CONDITIONAL;
546
547 } else if (op2 == 0x05 || op2 == 0x34) {
548
549 /* syscall, sysenter */
550 insn->type = INSN_SYSCALL;
551
552 } else if (op2 == 0x07 || op2 == 0x35) {
553
554 /* sysret, sysexit */
555 insn->type = INSN_SYSRET;
556
557 } else if (op2 == 0x0b || op2 == 0xb9) {
558
559 /* ud2 */
560 insn->type = INSN_BUG;
561
562 } else if (op2 == 0x0d || op2 == 0x1f) {
563
564 /* nopl/nopw */
565 insn->type = INSN_NOP;
566
567 } else if (op2 == 0x1e) {
568
569 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
570 insn->type = INSN_ENDBR;
571
572
573 } else if (op2 == 0x38 && op3 == 0xf8) {
574 if (ins.prefixes.nbytes == 1 &&
575 ins.prefixes.bytes[0] == 0xf2) {
576 /* ENQCMD cannot be used in the kernel. */
577 WARN("ENQCMD instruction at %s:%lx", sec->name, offset);
578 }
579
580 } else if (op2 == 0xa0 || op2 == 0xa8) {
581
582 /* push fs/gs */
583 ADD_OP(op) {
584 op->src.type = OP_SRC_CONST;
585 op->dest.type = OP_DEST_PUSH;
586 }
587
588 } else if (op2 == 0xa1 || op2 == 0xa9) {
589
590 /* pop fs/gs */
591 ADD_OP(op) {
592 op->src.type = OP_SRC_POP;
593 op->dest.type = OP_DEST_MEM;
594 }
595 }
596
597 break;
598
599 case 0xc9:
600 /*
601 * leave
602 *
603 * equivalent to:
604 * mov bp, sp
605 * pop bp
606 */
607 ADD_OP(op) {
608 op->src.type = OP_SRC_REG;
609 op->src.reg = CFI_BP;
610 op->dest.type = OP_DEST_REG;
611 op->dest.reg = CFI_SP;
612 }
613 ADD_OP(op) {
614 op->src.type = OP_SRC_POP;
615 op->dest.type = OP_DEST_REG;
616 op->dest.reg = CFI_BP;
617 }
618 break;
619
620 case 0xcc:
621 /* int3 */
622 insn->type = INSN_TRAP;
623 break;
624
625 case 0xe3:
626 /* jecxz/jrcxz */
627 insn->type = INSN_JUMP_CONDITIONAL;
628 break;
629
630 case 0xe9:
631 case 0xeb:
632 insn->type = INSN_JUMP_UNCONDITIONAL;
633 break;
634
635 case 0xc2:
636 case 0xc3:
637 insn->type = INSN_RETURN;
638 break;
639
640 case 0xc7: /* mov imm, r/m */
641 if (!opts.noinstr)
642 break;
643
644 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
645 struct reloc *immr, *disp;
646 struct symbol *func;
647 int idx;
648
649 immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
650 disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
651
652 if (!immr || strcmp(immr->sym->name, "pv_ops"))
653 break;
654
655 idx = (reloc_addend(immr) + 8) / sizeof(void *);
656
657 func = disp->sym;
658 if (disp->sym->type == STT_SECTION)
659 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
660 if (!func) {
661 ERROR("no func for pv_ops[]");
662 return -1;
663 }
664
665 objtool_pv_add(file, idx, func);
666 }
667
668 break;
669
670 case 0xcf: /* iret */
671 /*
672 * Handle sync_core(), which has an IRET to self.
673 * All other IRET are in STT_NONE entry code.
674 */
675 sym = find_symbol_containing(sec, offset);
676 if (sym && sym->type == STT_FUNC) {
677 ADD_OP(op) {
678 /* add $40, %rsp */
679 op->src.type = OP_SRC_ADD;
680 op->src.reg = CFI_SP;
681 op->src.offset = 5*8;
682 op->dest.type = OP_DEST_REG;
683 op->dest.reg = CFI_SP;
684 }
685 break;
686 }
687
688 fallthrough;
689
690 case 0xca: /* retf */
691 case 0xcb: /* retf */
692 insn->type = INSN_SYSRET;
693 break;
694
695 case 0xe0: /* loopne */
696 case 0xe1: /* loope */
697 case 0xe2: /* loop */
698 insn->type = INSN_JUMP_CONDITIONAL;
699 break;
700
701 case 0xe8:
702 insn->type = INSN_CALL;
703 /*
704 * For the impact on the stack, a CALL behaves like
705 * a PUSH of an immediate value (the return address).
706 */
707 ADD_OP(op) {
708 op->src.type = OP_SRC_CONST;
709 op->dest.type = OP_DEST_PUSH;
710 }
711 break;
712
713 case 0xfc:
714 insn->type = INSN_CLD;
715 break;
716
717 case 0xfd:
718 insn->type = INSN_STD;
719 break;
720
721 case 0xff:
722 if (modrm_reg == 2 || modrm_reg == 3) {
723
724 insn->type = INSN_CALL_DYNAMIC;
725 if (has_notrack_prefix(&ins))
726 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
727
728 } else if (modrm_reg == 4) {
729
730 insn->type = INSN_JUMP_DYNAMIC;
731 if (has_notrack_prefix(&ins))
732 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
733
734 } else if (modrm_reg == 5) {
735
736 /* jmpf */
737 insn->type = INSN_SYSRET;
738
739 } else if (modrm_reg == 6) {
740
741 /* push from mem */
742 ADD_OP(op) {
743 op->src.type = OP_SRC_CONST;
744 op->dest.type = OP_DEST_PUSH;
745 }
746 }
747
748 break;
749
750 default:
751 break;
752 }
753
754 if (ins.immediate.nbytes)
755 insn->immediate = ins.immediate.value;
756 else if (ins.displacement.nbytes)
757 insn->immediate = ins.displacement.value;
758
759 return 0;
760 }
761
arch_initial_func_cfi_state(struct cfi_init_state * state)762 void arch_initial_func_cfi_state(struct cfi_init_state *state)
763 {
764 int i;
765
766 for (i = 0; i < CFI_NUM_REGS; i++) {
767 state->regs[i].base = CFI_UNDEFINED;
768 state->regs[i].offset = 0;
769 }
770
771 /* initial CFA (call frame address) */
772 state->cfa.base = CFI_SP;
773 state->cfa.offset = 8;
774
775 /* initial RA (return address) */
776 state->regs[CFI_RA].base = CFI_CFA;
777 state->regs[CFI_RA].offset = -8;
778 }
779
arch_nop_insn(int len)780 const char *arch_nop_insn(int len)
781 {
782 static const char nops[5][5] = {
783 { BYTES_NOP1 },
784 { BYTES_NOP2 },
785 { BYTES_NOP3 },
786 { BYTES_NOP4 },
787 { BYTES_NOP5 },
788 };
789
790 if (len < 1 || len > 5) {
791 ERROR("invalid NOP size: %d\n", len);
792 return NULL;
793 }
794
795 return nops[len-1];
796 }
797
798 #define BYTE_RET 0xC3
799
arch_ret_insn(int len)800 const char *arch_ret_insn(int len)
801 {
802 static const char ret[5][5] = {
803 { BYTE_RET },
804 { BYTE_RET, 0xcc },
805 { BYTE_RET, 0xcc, BYTES_NOP1 },
806 { BYTE_RET, 0xcc, BYTES_NOP2 },
807 { BYTE_RET, 0xcc, BYTES_NOP3 },
808 };
809
810 if (len < 1 || len > 5) {
811 ERROR("invalid RET size: %d\n", len);
812 return NULL;
813 }
814
815 return ret[len-1];
816 }
817
arch_decode_hint_reg(u8 sp_reg,int * base)818 int arch_decode_hint_reg(u8 sp_reg, int *base)
819 {
820 switch (sp_reg) {
821 case ORC_REG_UNDEFINED:
822 *base = CFI_UNDEFINED;
823 break;
824 case ORC_REG_SP:
825 *base = CFI_SP;
826 break;
827 case ORC_REG_BP:
828 *base = CFI_BP;
829 break;
830 case ORC_REG_SP_INDIRECT:
831 *base = CFI_SP_INDIRECT;
832 break;
833 case ORC_REG_R10:
834 *base = CFI_R10;
835 break;
836 case ORC_REG_R13:
837 *base = CFI_R13;
838 break;
839 case ORC_REG_DI:
840 *base = CFI_DI;
841 break;
842 case ORC_REG_DX:
843 *base = CFI_DX;
844 break;
845 default:
846 return -1;
847 }
848
849 return 0;
850 }
851
arch_is_retpoline(struct symbol * sym)852 bool arch_is_retpoline(struct symbol *sym)
853 {
854 return !strncmp(sym->name, "__x86_indirect_", 15) ||
855 !strncmp(sym->name, "__pi___x86_indirect_", 20);
856 }
857
arch_is_rethunk(struct symbol * sym)858 bool arch_is_rethunk(struct symbol *sym)
859 {
860 return !strcmp(sym->name, "__x86_return_thunk") ||
861 !strcmp(sym->name, "__pi___x86_return_thunk");
862 }
863
arch_is_embedded_insn(struct symbol * sym)864 bool arch_is_embedded_insn(struct symbol *sym)
865 {
866 return !strcmp(sym->name, "retbleed_return_thunk") ||
867 !strcmp(sym->name, "srso_alias_safe_ret") ||
868 !strcmp(sym->name, "srso_safe_ret");
869 }
870
arch_reloc_size(struct reloc * reloc)871 unsigned int arch_reloc_size(struct reloc *reloc)
872 {
873 switch (reloc_type(reloc)) {
874 case R_X86_64_32:
875 case R_X86_64_32S:
876 case R_X86_64_PC32:
877 case R_X86_64_PLT32:
878 return 4;
879 default:
880 return 8;
881 }
882 }
883