| /linux/arch/arm64/kvm/hyp/vhe/ |
| H A D | sysreg-sr.c | 90 write_sysreg(__vcpu_sys_reg(vcpu, PAR_EL1), par_el1); in __sysreg_restore_vel2_state() 91 write_sysreg(__vcpu_sys_reg(vcpu, TPIDR_EL1), tpidr_el1); in __sysreg_restore_vel2_state() 94 write_sysreg(__vcpu_sys_reg(vcpu, MPIDR_EL1), vmpidr_el2); in __sysreg_restore_vel2_state() 95 write_sysreg_el1(__vcpu_sys_reg(vcpu, MAIR_EL2), SYS_MAIR); in __sysreg_restore_vel2_state() 96 write_sysreg_el1(__vcpu_sys_reg(vcpu, VBAR_EL2), SYS_VBAR); in __sysreg_restore_vel2_state() 97 write_sysreg_el1(__vcpu_sys_reg(vcpu, CONTEXTIDR_EL2), SYS_CONTEXTIDR); in __sysreg_restore_vel2_state() 98 write_sysreg_el1(__vcpu_sys_reg(vcpu, AMAIR_EL2), SYS_AMAIR); in __sysreg_restore_vel2_state() 105 write_sysreg_el1(__vcpu_sys_reg(vcpu, SCTLR_EL2), SYS_SCTLR); in __sysreg_restore_vel2_state() 106 write_sysreg_el1(__vcpu_sys_reg(vcpu, CPTR_EL2), SYS_CPACR); in __sysreg_restore_vel2_state() 107 write_sysreg_el1(__vcpu_sys_reg(vcpu, TTBR0_EL2), SYS_TTBR0); in __sysreg_restore_vel2_state() [all …]
|
| H A D | switch.c | 87 guest_hcr = __vcpu_sys_reg(vcpu, HCR_EL2); in __compute_hcr() 92 va |= __vcpu_sys_reg(vcpu, VNCR_EL2) & GENMASK(PAGE_SHIFT - 1, 0); in __compute_hcr() 129 val = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in __activate_traps() 131 val = __vcpu_sys_reg(vcpu, CNTHP_CVAL_EL2); in __activate_traps() 242 cval = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in compute_emulated_cntx_ctl_el0() 243 ctl = __vcpu_sys_reg(vcpu, CNTP_CTL_EL0); in compute_emulated_cntx_ctl_el0() 247 cval = __vcpu_sys_reg(vcpu, CNTV_CVAL_EL0); in compute_emulated_cntx_ctl_el0() 248 ctl = __vcpu_sys_reg(vcpu, CNTV_CTL_EL0); in compute_emulated_cntx_ctl_el0() 289 val = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in kvm_hyp_handle_timer() 298 val = __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in kvm_hyp_handle_timer() [all …]
|
| /linux/arch/arm64/kvm/vgic/ |
| H A D | vgic-v3-nested.c | 123 xmo = __vcpu_sys_reg(vcpu, HCR_EL2) & (HCR_IMO | HCR_FMO); in vgic_state_is_nested() 149 u64 lr = __vcpu_sys_reg(vcpu, ICH_LRN(i)); in vgic_compute_mi_state() 184 hcr = __vcpu_sys_reg(vcpu, ICH_HCR_EL2); in vgic_v3_get_misr() 185 vmcr = __vcpu_sys_reg(vcpu, ICH_VMCR_EL2); in vgic_v3_get_misr() 192 if (__vcpu_sys_reg(vcpu, ICH_HCR_EL2) & ICH_HCR_EL2_UIE) { in vgic_v3_get_misr() 257 u64 lr = __vcpu_sys_reg(vcpu, ICH_LRN(i)); in vgic_v3_create_shadow_lr() 273 u64 val = __vcpu_sys_reg(vcpu, ICH_HCR_EL2); in vgic_v3_flush_nested() 289 lr = __vcpu_sys_reg(vcpu, ICH_LRN(i)); in vgic_v3_sync_nested() 329 s_cpu_if->vgic_hcr = __vcpu_sys_reg(vcpu, ICH_HCR_EL2); in vgic_v3_create_shadow_state() 330 s_cpu_if->vgic_vmcr = __vcpu_sys_reg(vcpu, ICH_VMCR_EL2); in vgic_v3_create_shadow_state() [all …]
|
| /linux/arch/arm64/kvm/ |
| H A D | pauth.c | 40 mod = __vcpu_sys_reg(vcpu, SP_EL2); in compute_pac() 170 ikey.lo = __vcpu_sys_reg(vcpu, APIBKEYLO_EL1); in kvm_auth_eretax() 171 ikey.hi = __vcpu_sys_reg(vcpu, APIBKEYHI_EL1); in kvm_auth_eretax() 176 ikey.lo = __vcpu_sys_reg(vcpu, APIAKEYLO_EL1); in kvm_auth_eretax() 177 ikey.hi = __vcpu_sys_reg(vcpu, APIAKEYHI_EL1); in kvm_auth_eretax()
|
| H A D | pmu-emul.c | 101 return __vcpu_sys_reg(vcpu, MDCR_EL2) & MDCR_EL2_HLP; in kvm_pmc_has_64bit_overflow() 125 return __vcpu_sys_reg(kvm_pmc_to_vcpu(pmc), counter_index_to_evtreg(pmc->idx)); in kvm_pmc_read_evtreg() 134 counter = __vcpu_sys_reg(vcpu, reg); in kvm_pmu_get_pmc_value() 177 val = __vcpu_sys_reg(vcpu, reg) & GENMASK(63, 32); in kvm_pmu_set_pmc_value() 282 hpmn = SYS_FIELD_GET(MDCR_EL2, HPMN, __vcpu_sys_reg(vcpu, MDCR_EL2)); in kvm_pmu_hyp_counter_mask() 375 u64 reg = __vcpu_sys_reg(vcpu, PMOVSSET_EL0); in kvm_pmu_overflow_status() 377 reg &= __vcpu_sys_reg(vcpu, PMINTENSET_EL1); in kvm_pmu_overflow_status() 490 mask &= __vcpu_sys_reg(vcpu, PMCNTENSET_EL0); in kvm_pmu_counter_increment() 497 type = __vcpu_sys_reg(vcpu, counter_index_to_evtreg(i)); in kvm_pmu_counter_increment() 503 reg = __vcpu_sys_reg(vcpu, counter_index_to_reg(i)) + 1; in kvm_pmu_counter_increment() [all …]
|
| H A D | arch_timer.c | 73 return __vcpu_sys_reg(vcpu, CNTV_CTL_EL0); in timer_get_ctl() 75 return __vcpu_sys_reg(vcpu, CNTP_CTL_EL0); in timer_get_ctl() 77 return __vcpu_sys_reg(vcpu, CNTHV_CTL_EL2); in timer_get_ctl() 79 return __vcpu_sys_reg(vcpu, CNTHP_CTL_EL2); in timer_get_ctl() 92 return __vcpu_sys_reg(vcpu, CNTV_CVAL_EL0); in timer_get_cval() 94 return __vcpu_sys_reg(vcpu, CNTP_CVAL_EL0); in timer_get_cval() 96 return __vcpu_sys_reg(vcpu, CNTHV_CVAL_EL2); in timer_get_cval() 98 return __vcpu_sys_reg(vcpu, CNTHP_CVAL_EL2); in timer_get_cval() 825 u64 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2); in timer_set_traps()
|
| H A D | handle_exit.c | 320 u64 hcrx = __vcpu_sys_reg(vcpu, HCRX_EL2); in handle_other() 354 fwd &= (__vcpu_sys_reg(vcpu, HFGITR2_EL2) & HFGITR2_EL2_TSBCSYNC); in handle_other() 358 fwd &= (__vcpu_sys_reg(vcpu, HFGITR_EL2) & HFGITR_EL2_PSBCSYNC); in handle_other()
|
| H A D | sys_regs.c | 315 val |= __vcpu_sys_reg(vcpu, reg) & ~CNTKCTL_VALID_BITS; in vcpu_read_sys_reg() 338 return __vcpu_sys_reg(vcpu, reg); in vcpu_read_sys_reg() 733 p->regval = __vcpu_sys_reg(vcpu, r->reg); in trap_oslsr_el1() 941 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmu_reg() 949 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmevcntr() 961 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmevtyper() 969 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmselr() 985 return __vcpu_sys_reg(vcpu, r->reg); in reset_pmcr() 990 u64 reg = __vcpu_sys_reg(vcpu, PMUSERENR_EL0); in check_pmu_access_disabled() 1058 p->regval = __vcpu_sys_reg(vcpu, PMSELR_EL0) in access_pmselr() [all …]
|
| H A D | sys_regs.h | 141 return __vcpu_sys_reg(vcpu, r->reg); in reset_unknown() 149 return __vcpu_sys_reg(vcpu, r->reg); in reset_val()
|
| H A D | trace_arm.h | 352 __entry->hcr_el2 = __vcpu_sys_reg(vcpu, HCR_EL2); 382 __entry->hcr_el2 = __vcpu_sys_reg(vcpu, HCR_EL2);
|
| H A D | emulate-nested.c | 493 u64 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2); in get_sanitized_cnthctl() 521 val = __vcpu_sys_reg(vcpu, HCR_EL2); in is_nested_nv2_guest() 528 !(__vcpu_sys_reg(vcpu, CNTHCTL_EL2) & CNTHCTL_EL1NVPCT)) in check_cnthctl_el1nvpct() 537 !(__vcpu_sys_reg(vcpu, CNTHCTL_EL2) & CNTHCTL_EL1NVVCT)) in check_cnthctl_el1nvvct() 545 u64 val = __vcpu_sys_reg(vcpu, CPTR_EL2); in check_cptr_tta() 570 __vcpu_sys_reg(vcpu, PMSELR_EL0)); in check_mdcr_hpmn() 2380 val = __vcpu_sys_reg(vcpu, tb->index); in get_behaviour() 2447 val = __vcpu_sys_reg(vcpu, sr); in check_fgt_bit() 2540 tmp = __vcpu_sys_reg(vcpu, HCRX_EL2); in triage_sysreg_trap() 2609 (__vcpu_sys_reg(vcpu, reg) & control_bit)) { in __forward_traps() [all …]
|
| H A D | at.c | 104 !(__vcpu_sys_reg(vcpu, HCRX_EL2) & HCRX_EL2_TCR2En)) in effective_tcr2() 148 hcr = __vcpu_sys_reg(vcpu, HCR_EL2); in setup_s1_walk() 822 if (__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_FWB) { in compute_par_s12() 876 if ((__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_CD) && in compute_par_s12() 908 (__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_DC)) { in compute_par_s1()
|
| H A D | nested.c | 786 if (__vcpu_sys_reg(vcpu, HCR_EL2) & HCR_NV) in kvm_vcpu_load_hw_mmu() 1216 return (u64)sign_extend64(__vcpu_sys_reg(vcpu, VNCR_EL2), 48); in read_vncr_el2() 1905 u64 guest_mdcr = __vcpu_sys_reg(vcpu, MDCR_EL2); in kvm_nested_setup_mdcr_el2()
|
| H A D | vgic-sys-reg-v3.c | 310 *val = __vcpu_sys_reg(vcpu, r->reg); in get_gic_ich_reg()
|
| H A D | config.c | 1474 u64 nested = __vcpu_sys_reg(vcpu, reg); in __compute_fgt()
|
| H A D | arm.c | 544 val = __vcpu_sys_reg(vcpu, HCR_EL2); in vcpu_set_pauth_traps()
|
| /linux/arch/arm64/kvm/hyp/include/hyp/ |
| H A D | switch.h | 323 u64 val = __vcpu_sys_reg(vcpu, HCRX_EL2); in __activate_traps_common() 381 vsesr = __vcpu_sys_reg(vcpu, VSESR_EL2); in ___activate_traps() 451 sve_cond_update_zcr_vq(__vcpu_sys_reg(vcpu, ZCR_EL2), SYS_ZCR_EL2); in __hyp_sve_restore_guest() 453 write_sysreg_el1(__vcpu_sys_reg(vcpu, vcpu_sve_zcr_elx(vcpu)), SYS_ZCR); in __hyp_sve_restore_guest() 477 zcr_el2 = __vcpu_sys_reg(vcpu, ZCR_EL2); in fpsimd_lazy_switch_to_guest() 483 zcr_el1 = __vcpu_sys_reg(vcpu, vcpu_sve_zcr_elx(vcpu)); in fpsimd_lazy_switch_to_guest() 599 write_sysreg_s(__vcpu_sys_reg(vcpu, FPMR), SYS_FPMR); in kvm_hyp_handle_fpsimd() 603 write_sysreg(__vcpu_sys_reg(vcpu, FPEXC32_EL2), fpexc32_el2); in kvm_hyp_handle_fpsimd() 714 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2); in kvm_handle_cntxct() 728 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2); in kvm_handle_cntxct()
|
| H A D | sysreg-sr.h | 370 write_sysreg(__vcpu_sys_reg(vcpu, DACR32_EL2), dacr32_el2); in __sysreg32_restore_state() 371 write_sysreg(__vcpu_sys_reg(vcpu, IFSR32_EL2), ifsr32_el2); in __sysreg32_restore_state() 374 write_sysreg(__vcpu_sys_reg(vcpu, DBGVCR32_EL2), dbgvcr32_el2); in __sysreg32_restore_state()
|
| /linux/arch/arm64/kvm/hyp/ |
| H A D | vgic-v3-sr.c | 1099 ich_hcr = __vcpu_sys_reg(vcpu, ICH_HCR_EL2); in __vgic_v3_check_trap_forwarding() 1104 (__vcpu_sys_reg(vcpu, HFGRTR_EL2) & HFGRTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding() 1108 (__vcpu_sys_reg(vcpu, HFGWTR_EL2) & HFGWTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding() 1125 (__vcpu_sys_reg(vcpu, HFGRTR_EL2) & HFGRTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding() 1129 (__vcpu_sys_reg(vcpu, HFGWTR_EL2) & HFGWTR_EL2_ICC_IGRPENn_EL1)) in __vgic_v3_check_trap_forwarding()
|
| H A D | exception.c | 28 return __vcpu_sys_reg(vcpu, reg); in __vcpu_read_sys_reg()
|
| /linux/arch/arm64/include/asm/ |
| H A D | kvm_host.h | 1163 #define __vcpu_sys_reg(v,r) \ macro 1388 (!!(__vcpu_sys_reg(vcpu, OSLSR_EL1) & OSLSR_EL1_OSLK))
|