Lines Matching full:cp
20 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_sbi_ext_time_handler() local
23 if (cp->a6 != SBI_EXT_TIME_SET_TIMER) { in kvm_sbi_ext_time_handler()
30 next_cycle = ((u64)cp->a1 << 32) | (u64)cp->a0; in kvm_sbi_ext_time_handler()
32 next_cycle = (u64)cp->a0; in kvm_sbi_ext_time_handler()
51 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_sbi_ext_ipi_handler() local
52 unsigned long hmask = cp->a0; in kvm_sbi_ext_ipi_handler()
53 unsigned long hbase = cp->a1; in kvm_sbi_ext_ipi_handler()
56 if (cp->a6 != SBI_EXT_IPI_SEND_IPI) { in kvm_sbi_ext_ipi_handler()
95 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_sbi_ext_rfence_handler() local
96 unsigned long hmask = cp->a0; in kvm_sbi_ext_rfence_handler()
97 unsigned long hbase = cp->a1; in kvm_sbi_ext_rfence_handler()
98 unsigned long funcid = cp->a6; in kvm_sbi_ext_rfence_handler()
108 if ((cp->a2 == 0 && cp->a3 == 0) || cp->a3 == -1UL) in kvm_sbi_ext_rfence_handler()
112 cp->a2, cp->a3, PAGE_SHIFT, vmid); in kvm_sbi_ext_rfence_handler()
117 if ((cp->a2 == 0 && cp->a3 == 0) || cp->a3 == -1UL) in kvm_sbi_ext_rfence_handler()
119 cp->a4, vmid); in kvm_sbi_ext_rfence_handler()
121 kvm_riscv_hfence_vvma_asid_gva(vcpu->kvm, hbase, hmask, cp->a2, in kvm_sbi_ext_rfence_handler()
122 cp->a3, PAGE_SHIFT, cp->a4, vmid); in kvm_sbi_ext_rfence_handler()
151 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_sbi_ext_srst_handler() local
152 unsigned long funcid = cp->a6; in kvm_sbi_ext_srst_handler()
153 u32 reason = cp->a1; in kvm_sbi_ext_srst_handler()
154 u32 type = cp->a0; in kvm_sbi_ext_srst_handler()
193 struct kvm_cpu_context *cp = &vcpu->arch.guest_context; in kvm_sbi_ext_dbcn_handler() local
194 unsigned long funcid = cp->a6; in kvm_sbi_ext_dbcn_handler()