1*2dd0e8d2SSandeepa Prabhu /* 2*2dd0e8d2SSandeepa Prabhu * arch/arm64/kernel/probes/decode-insn.c 3*2dd0e8d2SSandeepa Prabhu * 4*2dd0e8d2SSandeepa Prabhu * Copyright (C) 2013 Linaro Limited. 5*2dd0e8d2SSandeepa Prabhu * 6*2dd0e8d2SSandeepa Prabhu * This program is free software; you can redistribute it and/or modify 7*2dd0e8d2SSandeepa Prabhu * it under the terms of the GNU General Public License version 2 as 8*2dd0e8d2SSandeepa Prabhu * published by the Free Software Foundation. 9*2dd0e8d2SSandeepa Prabhu * 10*2dd0e8d2SSandeepa Prabhu * This program is distributed in the hope that it will be useful, 11*2dd0e8d2SSandeepa Prabhu * but WITHOUT ANY WARRANTY; without even the implied warranty of 12*2dd0e8d2SSandeepa Prabhu * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 13*2dd0e8d2SSandeepa Prabhu * General Public License for more details. 14*2dd0e8d2SSandeepa Prabhu */ 15*2dd0e8d2SSandeepa Prabhu 16*2dd0e8d2SSandeepa Prabhu #include <linux/kernel.h> 17*2dd0e8d2SSandeepa Prabhu #include <linux/kprobes.h> 18*2dd0e8d2SSandeepa Prabhu #include <linux/module.h> 19*2dd0e8d2SSandeepa Prabhu #include <asm/kprobes.h> 20*2dd0e8d2SSandeepa Prabhu #include <asm/insn.h> 21*2dd0e8d2SSandeepa Prabhu #include <asm/sections.h> 22*2dd0e8d2SSandeepa Prabhu 23*2dd0e8d2SSandeepa Prabhu #include "decode-insn.h" 24*2dd0e8d2SSandeepa Prabhu 25*2dd0e8d2SSandeepa Prabhu static bool __kprobes aarch64_insn_is_steppable(u32 insn) 26*2dd0e8d2SSandeepa Prabhu { 27*2dd0e8d2SSandeepa Prabhu /* 28*2dd0e8d2SSandeepa Prabhu * Branch instructions will write a new value into the PC which is 29*2dd0e8d2SSandeepa Prabhu * likely to be relative to the XOL address and therefore invalid. 30*2dd0e8d2SSandeepa Prabhu * Deliberate generation of an exception during stepping is also not 31*2dd0e8d2SSandeepa Prabhu * currently safe. Lastly, MSR instructions can do any number of nasty 32*2dd0e8d2SSandeepa Prabhu * things we can't handle during single-stepping. 33*2dd0e8d2SSandeepa Prabhu */ 34*2dd0e8d2SSandeepa Prabhu if (aarch64_get_insn_class(insn) == AARCH64_INSN_CLS_BR_SYS) { 35*2dd0e8d2SSandeepa Prabhu if (aarch64_insn_is_branch(insn) || 36*2dd0e8d2SSandeepa Prabhu aarch64_insn_is_msr_imm(insn) || 37*2dd0e8d2SSandeepa Prabhu aarch64_insn_is_msr_reg(insn) || 38*2dd0e8d2SSandeepa Prabhu aarch64_insn_is_exception(insn) || 39*2dd0e8d2SSandeepa Prabhu aarch64_insn_is_eret(insn)) 40*2dd0e8d2SSandeepa Prabhu return false; 41*2dd0e8d2SSandeepa Prabhu 42*2dd0e8d2SSandeepa Prabhu /* 43*2dd0e8d2SSandeepa Prabhu * The MRS instruction may not return a correct value when 44*2dd0e8d2SSandeepa Prabhu * executing in the single-stepping environment. We do make one 45*2dd0e8d2SSandeepa Prabhu * exception, for reading the DAIF bits. 46*2dd0e8d2SSandeepa Prabhu */ 47*2dd0e8d2SSandeepa Prabhu if (aarch64_insn_is_mrs(insn)) 48*2dd0e8d2SSandeepa Prabhu return aarch64_insn_extract_system_reg(insn) 49*2dd0e8d2SSandeepa Prabhu != AARCH64_INSN_SPCLREG_DAIF; 50*2dd0e8d2SSandeepa Prabhu 51*2dd0e8d2SSandeepa Prabhu /* 52*2dd0e8d2SSandeepa Prabhu * The HINT instruction is is problematic when single-stepping, 53*2dd0e8d2SSandeepa Prabhu * except for the NOP case. 54*2dd0e8d2SSandeepa Prabhu */ 55*2dd0e8d2SSandeepa Prabhu if (aarch64_insn_is_hint(insn)) 56*2dd0e8d2SSandeepa Prabhu return aarch64_insn_is_nop(insn); 57*2dd0e8d2SSandeepa Prabhu 58*2dd0e8d2SSandeepa Prabhu return true; 59*2dd0e8d2SSandeepa Prabhu } 60*2dd0e8d2SSandeepa Prabhu 61*2dd0e8d2SSandeepa Prabhu /* 62*2dd0e8d2SSandeepa Prabhu * Instructions which load PC relative literals are not going to work 63*2dd0e8d2SSandeepa Prabhu * when executed from an XOL slot. Instructions doing an exclusive 64*2dd0e8d2SSandeepa Prabhu * load/store are not going to complete successfully when single-step 65*2dd0e8d2SSandeepa Prabhu * exception handling happens in the middle of the sequence. 66*2dd0e8d2SSandeepa Prabhu */ 67*2dd0e8d2SSandeepa Prabhu if (aarch64_insn_uses_literal(insn) || 68*2dd0e8d2SSandeepa Prabhu aarch64_insn_is_exclusive(insn)) 69*2dd0e8d2SSandeepa Prabhu return false; 70*2dd0e8d2SSandeepa Prabhu 71*2dd0e8d2SSandeepa Prabhu return true; 72*2dd0e8d2SSandeepa Prabhu } 73*2dd0e8d2SSandeepa Prabhu 74*2dd0e8d2SSandeepa Prabhu /* Return: 75*2dd0e8d2SSandeepa Prabhu * INSN_REJECTED If instruction is one not allowed to kprobe, 76*2dd0e8d2SSandeepa Prabhu * INSN_GOOD If instruction is supported and uses instruction slot, 77*2dd0e8d2SSandeepa Prabhu */ 78*2dd0e8d2SSandeepa Prabhu static enum kprobe_insn __kprobes 79*2dd0e8d2SSandeepa Prabhu arm_probe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi) 80*2dd0e8d2SSandeepa Prabhu { 81*2dd0e8d2SSandeepa Prabhu /* 82*2dd0e8d2SSandeepa Prabhu * Instructions reading or modifying the PC won't work from the XOL 83*2dd0e8d2SSandeepa Prabhu * slot. 84*2dd0e8d2SSandeepa Prabhu */ 85*2dd0e8d2SSandeepa Prabhu if (aarch64_insn_is_steppable(insn)) 86*2dd0e8d2SSandeepa Prabhu return INSN_GOOD; 87*2dd0e8d2SSandeepa Prabhu else 88*2dd0e8d2SSandeepa Prabhu return INSN_REJECTED; 89*2dd0e8d2SSandeepa Prabhu } 90*2dd0e8d2SSandeepa Prabhu 91*2dd0e8d2SSandeepa Prabhu static bool __kprobes 92*2dd0e8d2SSandeepa Prabhu is_probed_address_atomic(kprobe_opcode_t *scan_start, kprobe_opcode_t *scan_end) 93*2dd0e8d2SSandeepa Prabhu { 94*2dd0e8d2SSandeepa Prabhu while (scan_start > scan_end) { 95*2dd0e8d2SSandeepa Prabhu /* 96*2dd0e8d2SSandeepa Prabhu * atomic region starts from exclusive load and ends with 97*2dd0e8d2SSandeepa Prabhu * exclusive store. 98*2dd0e8d2SSandeepa Prabhu */ 99*2dd0e8d2SSandeepa Prabhu if (aarch64_insn_is_store_ex(le32_to_cpu(*scan_start))) 100*2dd0e8d2SSandeepa Prabhu return false; 101*2dd0e8d2SSandeepa Prabhu else if (aarch64_insn_is_load_ex(le32_to_cpu(*scan_start))) 102*2dd0e8d2SSandeepa Prabhu return true; 103*2dd0e8d2SSandeepa Prabhu scan_start--; 104*2dd0e8d2SSandeepa Prabhu } 105*2dd0e8d2SSandeepa Prabhu 106*2dd0e8d2SSandeepa Prabhu return false; 107*2dd0e8d2SSandeepa Prabhu } 108*2dd0e8d2SSandeepa Prabhu 109*2dd0e8d2SSandeepa Prabhu enum kprobe_insn __kprobes 110*2dd0e8d2SSandeepa Prabhu arm_kprobe_decode_insn(kprobe_opcode_t *addr, struct arch_specific_insn *asi) 111*2dd0e8d2SSandeepa Prabhu { 112*2dd0e8d2SSandeepa Prabhu enum kprobe_insn decoded; 113*2dd0e8d2SSandeepa Prabhu kprobe_opcode_t insn = le32_to_cpu(*addr); 114*2dd0e8d2SSandeepa Prabhu kprobe_opcode_t *scan_start = addr - 1; 115*2dd0e8d2SSandeepa Prabhu kprobe_opcode_t *scan_end = addr - MAX_ATOMIC_CONTEXT_SIZE; 116*2dd0e8d2SSandeepa Prabhu #if defined(CONFIG_MODULES) && defined(MODULES_VADDR) 117*2dd0e8d2SSandeepa Prabhu struct module *mod; 118*2dd0e8d2SSandeepa Prabhu #endif 119*2dd0e8d2SSandeepa Prabhu 120*2dd0e8d2SSandeepa Prabhu if (addr >= (kprobe_opcode_t *)_text && 121*2dd0e8d2SSandeepa Prabhu scan_end < (kprobe_opcode_t *)_text) 122*2dd0e8d2SSandeepa Prabhu scan_end = (kprobe_opcode_t *)_text; 123*2dd0e8d2SSandeepa Prabhu #if defined(CONFIG_MODULES) && defined(MODULES_VADDR) 124*2dd0e8d2SSandeepa Prabhu else { 125*2dd0e8d2SSandeepa Prabhu preempt_disable(); 126*2dd0e8d2SSandeepa Prabhu mod = __module_address((unsigned long)addr); 127*2dd0e8d2SSandeepa Prabhu if (mod && within_module_init((unsigned long)addr, mod) && 128*2dd0e8d2SSandeepa Prabhu !within_module_init((unsigned long)scan_end, mod)) 129*2dd0e8d2SSandeepa Prabhu scan_end = (kprobe_opcode_t *)mod->init_layout.base; 130*2dd0e8d2SSandeepa Prabhu else if (mod && within_module_core((unsigned long)addr, mod) && 131*2dd0e8d2SSandeepa Prabhu !within_module_core((unsigned long)scan_end, mod)) 132*2dd0e8d2SSandeepa Prabhu scan_end = (kprobe_opcode_t *)mod->core_layout.base; 133*2dd0e8d2SSandeepa Prabhu preempt_enable(); 134*2dd0e8d2SSandeepa Prabhu } 135*2dd0e8d2SSandeepa Prabhu #endif 136*2dd0e8d2SSandeepa Prabhu decoded = arm_probe_decode_insn(insn, asi); 137*2dd0e8d2SSandeepa Prabhu 138*2dd0e8d2SSandeepa Prabhu if (decoded == INSN_REJECTED || 139*2dd0e8d2SSandeepa Prabhu is_probed_address_atomic(scan_start, scan_end)) 140*2dd0e8d2SSandeepa Prabhu return INSN_REJECTED; 141*2dd0e8d2SSandeepa Prabhu 142*2dd0e8d2SSandeepa Prabhu return decoded; 143*2dd0e8d2SSandeepa Prabhu } 144