Home
last modified time | relevance | path

Searched refs:esr (Results 1 – 25 of 68) sorted by relevance

123

/linux/arch/arm64/mm/
H A Dfault.c48 int (*fn)(unsigned long far, unsigned long esr,
57 static inline const struct fault_info *esr_to_fault_info(unsigned long esr) in esr_to_fault_info() argument
59 return fault_info + (esr & ESR_ELx_FSC); in esr_to_fault_info()
62 static void data_abort_decode(unsigned long esr) in data_abort_decode() argument
64 unsigned long iss2 = ESR_ELx_ISS2(esr); in data_abort_decode()
68 if (esr & ESR_ELx_ISV) { in data_abort_decode()
70 1U << ((esr & ESR_ELx_SAS) >> ESR_ELx_SAS_SHIFT)); in data_abort_decode()
72 (esr & ESR_ELx_SSE) >> ESR_ELx_SSE_SHIFT, in data_abort_decode()
73 (esr & ESR_ELx_SRT_MASK) >> ESR_ELx_SRT_SHIFT); in data_abort_decode()
75 (esr & ESR_ELx_SF) >> ESR_ELx_SF_SHIFT, in data_abort_decode()
[all …]
/linux/arch/arm64/kernel/
H A Dentry-common.c146 unsigned long esr) in __panic_unhandled() argument
153 vector, smp_processor_id(), esr, in __panic_unhandled()
154 esr_get_class_string(esr)); in __panic_unhandled()
296 static void noinstr el1_abort(struct pt_regs *regs, unsigned long esr) in el1_abort() argument
303 do_mem_abort(far, esr, regs); in el1_abort()
308 static void noinstr el1_pc(struct pt_regs *regs, unsigned long esr) in el1_pc() argument
315 do_sp_pc_abort(far, esr, regs); in el1_pc()
320 static void noinstr el1_undef(struct pt_regs *regs, unsigned long esr) in el1_undef() argument
326 do_el1_undef(regs, esr); in el1_undef()
331 static void noinstr el1_bti(struct pt_regs *regs, unsigned long esr) in el1_bti() argument
[all …]
H A Ddebug-monitors.c181 void do_el0_softstep(unsigned long esr, struct pt_regs *regs) in do_el0_softstep() argument
183 if (uprobe_single_step_handler(regs, esr) == DBG_HOOK_HANDLED) in do_el0_softstep()
196 void do_el1_softstep(unsigned long esr, struct pt_regs *regs) in do_el1_softstep() argument
198 if (kgdb_single_step_handler(regs, esr) == DBG_HOOK_HANDLED) in do_el1_softstep()
210 static int call_el1_break_hook(struct pt_regs *regs, unsigned long esr) in call_el1_break_hook() argument
212 if (esr_brk_comment(esr) == BUG_BRK_IMM) in call_el1_break_hook()
213 return bug_brk_handler(regs, esr); in call_el1_break_hook()
215 if (IS_ENABLED(CONFIG_CFI) && esr_is_cfi_brk(esr)) in call_el1_break_hook()
216 return cfi_brk_handler(regs, esr); in call_el1_break_hook()
218 if (esr_brk_comment(esr) == FAULT_BRK_IMM) in call_el1_break_hook()
[all …]
H A Dkgdb.c237 int kgdb_brk_handler(struct pt_regs *regs, unsigned long esr) in kgdb_brk_handler() argument
244 int kgdb_compiled_brk_handler(struct pt_regs *regs, unsigned long esr) in NOKPROBE_SYMBOL()
253 int kgdb_single_step_handler(struct pt_regs *regs, unsigned long esr) in kgdb_single_step_handler() argument
/linux/arch/arm64/include/asm/
H A Dexception.h19 unsigned long esr = ESR_ELx_EC_SERROR << ESR_ELx_EC_SHIFT; in disr_to_esr() local
22 esr |= (disr & DISR_EL1_ESR_MASK); in disr_to_esr()
24 esr |= (disr & ESR_ELx_ISS_MASK); in disr_to_esr()
26 return esr; in disr_to_esr()
55 void do_mem_abort(unsigned long far, unsigned long esr, struct pt_regs *regs);
56 void do_el0_undef(struct pt_regs *regs, unsigned long esr);
57 void do_el1_undef(struct pt_regs *regs, unsigned long esr);
59 void do_el1_bti(struct pt_regs *regs, unsigned long esr);
60 void do_el0_gcs(struct pt_regs *regs, unsigned long esr);
61 void do_el1_gcs(struct pt_regs *regs, unsigned long esr);
[all …]
H A Duprobes.h31 int uprobe_brk_handler(struct pt_regs *regs, unsigned long esr);
33 int uprobe_single_step_handler(struct pt_regs *regs, unsigned long esr);
36 unsigned long esr) in uprobe_single_step_handler() argument
H A Dkprobes.h46 unsigned long esr);
48 unsigned long esr);
50 unsigned long esr);
/linux/tools/arch/arm64/include/asm/
H A Desr.h73 #define ESR_ELx_EC(esr) (((esr) & ESR_ELx_EC_MASK) >> ESR_ELx_EC_SHIFT) argument
78 #define ESR_ELx_ISS(esr) ((esr) & ESR_ELx_ISS_MASK) argument
81 #define ESR_ELx_ISS2(esr) (((esr) & ESR_ELx_ISS2_MASK) >> ESR_ELx_ISS2_SHIFT) argument
226 #define ESR_ELx_SYS64_ISS_RT(esr) \ argument
227 (((esr) & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT)
386 #define ESR_ELx_MOPS_ISS_DESTREG(esr) (((esr) & (UL(0x1f) << 10)) >> 10) argument
387 #define ESR_ELx_MOPS_ISS_SRCREG(esr) (((esr) & (UL(0x1f) << 5)) >> 5) argument
388 #define ESR_ELx_MOPS_ISS_SIZEREG(esr) (((esr) & (UL(0x1f) << 0)) >> 0) argument
393 static inline unsigned long esr_brk_comment(unsigned long esr) in esr_brk_comment() argument
395 return esr & ESR_ELx_BRK64_ISS_COMMENT_MASK; in esr_brk_comment()
[all …]
/linux/arch/arm64/kvm/hyp/include/hyp/
H A Dfault.h15 static inline bool __fault_safe_to_translate(u64 esr) in __fault_safe_to_translate() argument
17 u64 fsc = esr & ESR_ELx_FSC; in __fault_safe_to_translate()
19 if (esr_fsc_is_sea_ttw(esr) || esr_fsc_is_secc_ttw(esr)) in __fault_safe_to_translate()
22 return !(fsc == ESR_ELx_FSC_EXTABT && (esr & ESR_ELx_FnV)); in __fault_safe_to_translate()
60 static inline bool __hpfar_valid(u64 esr) in __hpfar_valid() argument
70 esr_fsc_is_translation_fault(esr)) in __hpfar_valid()
73 if (esr_fsc_is_translation_fault(esr) || esr_fsc_is_access_flag_fault(esr)) in __hpfar_valid()
76 if ((esr & ESR_ELx_S1PTW) && esr_fsc_is_permission_fault(esr)) in __hpfar_valid()
79 return esr_fsc_is_addr_sz_fault(esr); in __hpfar_valid()
82 static inline bool __get_fault_info(u64 esr, struct kvm_vcpu_fault_info *fault) in __get_fault_info() argument
[all …]
/linux/arch/arm64/kvm/
H A Dhandle_exit.c32 static void kvm_handle_guest_serror(struct kvm_vcpu *vcpu, u64 esr) in kvm_handle_guest_serror() argument
34 if (!arm64_is_ras_serror(esr) || arm64_is_fatal_ras_serror(NULL, esr)) in kvm_handle_guest_serror()
132 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_wfx() local
133 bool is_wfe = !!(esr & ESR_ELx_WFx_ISS_WFE); in kvm_handle_wfx()
146 if (esr & ESR_ELx_WFx_ISS_WFxT) { in kvm_handle_wfx()
147 if (esr & ESR_ELx_WFx_ISS_RV) { in kvm_handle_wfx()
162 esr &= ~ESR_ELx_WFx_ISS_WFxT; in kvm_handle_wfx()
166 if (esr & ESR_ELx_WFx_ISS_WFE) { in kvm_handle_wfx()
169 if (esr & ESR_ELx_WFx_ISS_WFxT) in kvm_handle_wfx()
194 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_guest_debug() local
[all …]
H A Dsys_regs.h37 #define esr_sys64_to_params(esr) \ argument
38 ((struct sys_reg_params){ .Op0 = ((esr) >> 20) & 3, \
39 .Op1 = ((esr) >> 14) & 0x7, \
40 .CRn = ((esr) >> 10) & 0xf, \
41 .CRm = ((esr) >> 1) & 0xf, \
42 .Op2 = ((esr) >> 17) & 0x7, \
43 .is_write = !((esr) & 1) })
45 #define esr_cp1x_32_to_params(esr) \ argument
46 ((struct sys_reg_params){ .Op1 = ((esr) >> 14) & 0x7, \
47 .CRn = ((esr) >> 10) & 0xf, \
[all …]
H A Dnested.c143 u32 esr; in esr_s2_fault() local
145 esr = kvm_vcpu_get_esr(vcpu) & ~ESR_ELx_FSC; in esr_s2_fault()
146 esr |= compute_fsc(level, fsc); in esr_s2_fault()
147 return esr; in esr_s2_fault()
281 out->esr = compute_fsc(level, ESR_ELx_FSC_ADDRSZ); in walk_nested_s2_pgd()
303 out->esr = compute_fsc(level, ESR_ELx_FSC_FAULT); in walk_nested_s2_pgd()
312 out->esr = compute_fsc(level, ESR_ELx_FSC_FAULT); in walk_nested_s2_pgd()
322 out->esr = compute_fsc(level, ESR_ELx_FSC_ADDRSZ); in walk_nested_s2_pgd()
334 out->esr = compute_fsc(level, ESR_ELx_FSC_FAULT); in walk_nested_s2_pgd()
340 out->esr = compute_fsc(level, ESR_ELx_FSC_ADDRSZ); in walk_nested_s2_pgd()
[all …]
H A Dmmu.c1951 static bool host_owns_sea(struct kvm_vcpu *vcpu, u64 esr) in host_owns_sea() argument
1962 if (is_hyp_ctxt(vcpu) && !kvm_vcpu_trap_is_iabt(vcpu) && (esr & ESR_ELx_VNCR)) in host_owns_sea()
1972 return (esr_fsc_is_sea_ttw(esr) && (esr & ESR_ELx_S1PTW)); in host_owns_sea()
1979 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_guest_sea() local
1997 if (host_owns_sea(vcpu, esr) || in kvm_handle_guest_sea()
2012 run->arm_sea.esr = esr & esr_mask; in kvm_handle_guest_sea()
2014 if (!(esr & ESR_ELx_FnV)) in kvm_handle_guest_sea()
2040 unsigned long esr; in kvm_handle_guest_abort() local
2052 esr = kvm_vcpu_get_esr(vcpu); in kvm_handle_guest_abort()
2064 if (esr_fsc_is_translation_fault(esr)) { in kvm_handle_guest_abort()
[all …]
H A Dtrace_arm.h140 TP_PROTO(unsigned long vcpu_pc, unsigned long esr,
142 TP_ARGS(vcpu_pc, esr, far, ipa),
146 __field( unsigned long, esr )
153 __entry->esr = esr;
159 __entry->ipa, __entry->esr,
H A Dpauth.c159 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_auth_eretax() local
166 if (esr_iss_is_eretab(esr)) { in kvm_auth_eretax()
/linux/tools/testing/selftests/kvm/arm64/
H A Dsea_to_user.c159 u64 esr = read_sysreg(esr_el1); in expect_sea_handler() local
164 GUEST_PRINTF("ESR_EL1=%#lx, FAR_EL1=%#lx\n", esr, far); in expect_sea_handler()
166 GUEST_ASSERT_EQ(ESR_ELx_EC(esr), ESR_ELx_EC_DABT_CUR); in expect_sea_handler()
167 GUEST_ASSERT_EQ(esr & ESR_ELx_FSC_TYPE, ESR_ELx_FSC_EXTABT); in expect_sea_handler()
170 GUEST_ASSERT_EQ(esr & ESR_ELx_FnV, ESR_ELx_FnV); in expect_sea_handler()
173 GUEST_ASSERT_EQ(esr & ESR_ELx_FnV, 0); in expect_sea_handler()
193 u64 esr; in run_vm() local
201 run->arm_sea.esr, run->arm_sea.flags); in run_vm()
207 esr = run->arm_sea.esr; in run_vm()
208 TEST_ASSERT_EQ(ESR_ELx_EC(esr), ESR_ELx_EC_DABT_LOW); in run_vm()
[all …]
H A Dexternal_aborts.c17 u64 esr = read_sysreg(esr_el1); in expect_sea_handler() local
20 GUEST_ASSERT_EQ(ESR_ELx_EC(esr), ESR_ELx_EC_DABT_CUR); in expect_sea_handler()
21 GUEST_ASSERT_EQ(esr & ESR_ELx_FSC_TYPE, ESR_ELx_FSC_EXTABT); in expect_sea_handler()
220 u64 esr = read_sysreg(esr_el1); in expect_serror_handler() local
222 GUEST_ASSERT_EQ(ESR_ELx_EC(esr), ESR_ELx_EC_SERROR); in expect_serror_handler()
224 GUEST_ASSERT_EQ(ESR_ELx_ISS(esr), EXPECTED_SERROR_ISS); in expect_serror_handler()
255 u64 esr = read_sysreg(esr_el1); in expect_sea_s1ptw_handler() local
258 GUEST_ASSERT_EQ(ESR_ELx_EC(esr), ESR_ELx_EC_DABT_CUR); in expect_sea_s1ptw_handler()
259 GUEST_ASSERT_EQ((esr & ESR_ELx_FSC), ESR_ELx_FSC_SEA_TTW(3)); in expect_sea_s1ptw_handler()
/linux/arch/sh/boards/mach-dreamcast/
H A Dirq.c90 __u32 esr = ESR_BASE + (LEVEL(irq) << 2); in mask_ack_systemasic_irq() local
92 outl((1 << EVENT_BIT(irq)), esr); in mask_ack_systemasic_irq()
107 __u32 emr, esr, status, level; in systemasic_irq_demux() local
124 esr = ESR_BASE + (level << 2); in systemasic_irq_demux()
127 status = inl(esr); in systemasic_irq_demux()
/linux/drivers/net/can/
H A Dbxcan.c158 u32 esr; /* 0x18 - error status */ member
466 static void bxcan_handle_state_change(struct net_device *ndev, u32 esr) in bxcan_handle_state_change() argument
476 if (!(esr & (BXCAN_ESR_EWGF | BXCAN_ESR_EPVF | BXCAN_ESR_BOFF))) in bxcan_handle_state_change()
479 bec.txerr = FIELD_GET(BXCAN_ESR_TEC_MASK, esr); in bxcan_handle_state_change()
480 bec.rxerr = FIELD_GET(BXCAN_ESR_REC_MASK, esr); in bxcan_handle_state_change()
482 if (esr & BXCAN_ESR_BOFF) in bxcan_handle_state_change()
484 else if (esr & BXCAN_ESR_EPVF) in bxcan_handle_state_change()
486 else if (esr & BXCAN_ESR_EWGF) in bxcan_handle_state_change()
517 static void bxcan_handle_bus_err(struct net_device *ndev, u32 esr) in bxcan_handle_bus_err() argument
524 lec_code = FIELD_GET(BXCAN_ESR_LEC_MASK, esr); in bxcan_handle_bus_err()
[all …]
/linux/arch/arm64/kvm/hyp/vhe/
H A Dswitch.c263 u64 esr, val; in kvm_hyp_handle_timer() local
274 esr = kvm_vcpu_get_esr(vcpu); in kvm_hyp_handle_timer()
275 if ((esr & ESR_ELx_SYS64_ISS_DIR_MASK) != ESR_ELx_SYS64_ISS_DIR_READ) in kvm_hyp_handle_timer()
278 switch (esr_sys64_to_sysreg(esr)) { in kvm_hyp_handle_timer()
339 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_hyp_handle_eret() local
375 if (esr_iss_is_eretax(esr)) { in kvm_hyp_handle_eret()
441 u64 esr = kvm_vcpu_get_esr(vcpu); in kvm_hyp_handle_cpacr_el1() local
444 if (!is_hyp_ctxt(vcpu) || esr_sys64_to_sysreg(esr) != SYS_CPACR_EL1) in kvm_hyp_handle_cpacr_el1()
449 if ((esr & ESR_ELx_SYS64_ISS_DIR_MASK) == ESR_ELx_SYS64_ISS_DIR_READ) { in kvm_hyp_handle_cpacr_el1()
/linux/drivers/net/ethernet/ibm/emac/
H A Dmal.c222 u32 esr = get_mal_dcrn(mal, MAL_ESR); in mal_serr() local
225 set_mal_dcrn(mal, MAL_ESR, esr); in mal_serr()
227 MAL_DBG(mal, "SERR %08x" NL, esr); in mal_serr()
229 if (esr & MAL_ESR_EVB) { in mal_serr()
230 if (esr & MAL_ESR_DE) { in mal_serr()
237 if (esr & MAL_ESR_PEIN) { in mal_serr()
245 mal->index, esr); in mal_serr()
255 mal->index, esr); in mal_serr()
355 u32 esr = get_mal_dcrn(mal, MAL_ESR); in mal_int() local
357 if (esr & MAL_ESR_EVB) { in mal_int()
[all …]
/linux/arch/powerpc/platforms/44x/
H A Dmachine_check.c14 unsigned long reason = regs->esr; in machine_check_4xx()
29 unsigned long reason = regs->esr; in machine_check_440A()
66 unsigned long reason = regs->esr; in machine_check_47x()
/linux/arch/mips/include/asm/octeon/
H A Dcvmx-sli-defs.h107 __BITFIELD_FIELD(uint64_t esr:2,
119 __BITFIELD_FIELD(uint64_t esr:2,
/linux/arch/arm64/kernel/probes/
H A Dkprobes.c311 kprobe_brk_handler(struct pt_regs *regs, unsigned long esr) in kprobe_brk_handler() argument
355 kprobe_ss_brk_handler(struct pt_regs *regs, unsigned long esr) in kprobe_ss_brk_handler() argument
374 kretprobe_brk_handler(struct pt_regs *regs, unsigned long esr) in kretprobe_brk_handler() argument
/linux/arch/arm64/kvm/hyp/
H A Dvgic-v3-sr.c548 u64 esr = kvm_vcpu_get_esr(vcpu); in __vgic_v3_get_group() local
549 u8 crm = (esr & ESR_ELx_SYS64_ISS_CRM_MASK) >> ESR_ELx_SYS64_ISS_CRM_SHIFT; in __vgic_v3_get_group()
1163 u64 esr; in __vgic_v3_perform_cpuif_access() local
1172 esr = kvm_vcpu_get_esr(vcpu); in __vgic_v3_perform_cpuif_access()
1179 sysreg = esr_cp15_to_sysreg(esr); in __vgic_v3_perform_cpuif_access()
1181 sysreg = esr_sys64_to_sysreg(esr); in __vgic_v3_perform_cpuif_access()
1184 is_read = (esr & ESR_ELx_SYS64_ISS_DIR_MASK) == ESR_ELx_SYS64_ISS_DIR_READ; in __vgic_v3_perform_cpuif_access()

123