Lines Matching refs:vcpu
62 extern int kvmppc_vcpu_run(struct kvm_vcpu *vcpu);
63 extern int __kvmppc_vcpu_run(struct kvm_vcpu *vcpu);
66 extern void kvmppc_dump_vcpu(struct kvm_vcpu *vcpu);
67 extern int kvmppc_handle_load(struct kvm_vcpu *vcpu,
70 extern int kvmppc_handle_loads(struct kvm_vcpu *vcpu,
73 extern int kvmppc_handle_vsx_load(struct kvm_vcpu *vcpu,
76 extern int kvmppc_handle_vmx_load(struct kvm_vcpu *vcpu,
78 extern int kvmppc_handle_vmx_store(struct kvm_vcpu *vcpu,
80 extern int kvmppc_handle_store(struct kvm_vcpu *vcpu,
83 extern int kvmppc_handle_vsx_store(struct kvm_vcpu *vcpu,
87 extern int kvmppc_load_last_inst(struct kvm_vcpu *vcpu,
91 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
93 extern int kvmppc_st(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
95 extern int kvmppc_emulate_instruction(struct kvm_vcpu *vcpu);
96 extern int kvmppc_emulate_loadstore(struct kvm_vcpu *vcpu);
97 extern int kvmppc_emulate_mmio(struct kvm_vcpu *vcpu);
98 extern void kvmppc_emulate_dec(struct kvm_vcpu *vcpu);
99 extern u32 kvmppc_get_dec(struct kvm_vcpu *vcpu, u64 tb);
100 extern void kvmppc_decrementer_func(struct kvm_vcpu *vcpu);
101 extern int kvmppc_sanity_check(struct kvm_vcpu *vcpu);
102 extern int kvmppc_subarch_vcpu_init(struct kvm_vcpu *vcpu);
103 extern void kvmppc_subarch_vcpu_uninit(struct kvm_vcpu *vcpu);
107 extern void kvmppc_mmu_map(struct kvm_vcpu *vcpu, u64 gvaddr, gpa_t gpaddr,
109 extern void kvmppc_mmu_switch_pid(struct kvm_vcpu *vcpu, u32 pid);
110 extern int kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
111 extern int kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
112 extern gpa_t kvmppc_mmu_xlate(struct kvm_vcpu *vcpu, unsigned int gtlb_index,
114 extern void kvmppc_mmu_dtlb_miss(struct kvm_vcpu *vcpu);
115 extern void kvmppc_mmu_itlb_miss(struct kvm_vcpu *vcpu);
116 extern int kvmppc_xlate(struct kvm_vcpu *vcpu, ulong eaddr,
120 extern int kvmppc_core_vcpu_create(struct kvm_vcpu *vcpu);
121 extern void kvmppc_core_vcpu_free(struct kvm_vcpu *vcpu);
122 extern int kvmppc_core_vcpu_setup(struct kvm_vcpu *vcpu);
123 extern int kvmppc_core_vcpu_translate(struct kvm_vcpu *vcpu,
126 extern void kvmppc_core_vcpu_load(struct kvm_vcpu *vcpu, int cpu);
127 extern void kvmppc_core_vcpu_put(struct kvm_vcpu *vcpu);
129 extern int kvmppc_core_prepare_to_enter(struct kvm_vcpu *vcpu);
130 extern int kvmppc_core_pending_dec(struct kvm_vcpu *vcpu);
132 extern void kvmppc_core_queue_machine_check(struct kvm_vcpu *vcpu,
134 extern void kvmppc_core_queue_syscall(struct kvm_vcpu *vcpu);
135 extern void kvmppc_core_queue_program(struct kvm_vcpu *vcpu,
137 extern void kvmppc_core_queue_fpunavail(struct kvm_vcpu *vcpu,
139 extern void kvmppc_core_queue_vec_unavail(struct kvm_vcpu *vcpu,
141 extern void kvmppc_core_queue_vsx_unavail(struct kvm_vcpu *vcpu,
143 extern void kvmppc_core_queue_dec(struct kvm_vcpu *vcpu);
144 extern void kvmppc_core_dequeue_dec(struct kvm_vcpu *vcpu);
145 extern void kvmppc_core_queue_external(struct kvm_vcpu *vcpu,
147 extern void kvmppc_core_dequeue_external(struct kvm_vcpu *vcpu);
148 extern void kvmppc_core_queue_dtlb_miss(struct kvm_vcpu *vcpu,
151 extern void kvmppc_core_queue_data_storage(struct kvm_vcpu *vcpu,
155 extern void kvmppc_core_queue_itlb_miss(struct kvm_vcpu *vcpu);
156 extern void kvmppc_core_queue_inst_storage(struct kvm_vcpu *vcpu,
159 extern void kvmppc_core_flush_tlb(struct kvm_vcpu *vcpu);
160 extern int kvmppc_core_check_requests(struct kvm_vcpu *vcpu);
165 extern int kvmppc_kvm_pv(struct kvm_vcpu *vcpu);
166 extern void kvmppc_map_magic(struct kvm_vcpu *vcpu);
173 extern void kvmppc_map_vrma(struct kvm_vcpu *vcpu,
175 extern int kvmppc_pseries_do_hcall(struct kvm_vcpu *vcpu);
190 extern long kvmppc_h_put_tce(struct kvm_vcpu *vcpu, unsigned long liobn,
192 extern long kvmppc_h_put_tce_indirect(struct kvm_vcpu *vcpu,
195 extern long kvmppc_h_stuff_tce(struct kvm_vcpu *vcpu,
198 extern long kvmppc_h_get_tce(struct kvm_vcpu *vcpu, unsigned long liobn,
222 extern int kvmppc_prepare_to_enter(struct kvm_vcpu *vcpu);
230 int kvm_vcpu_ioctl_interrupt(struct kvm_vcpu *vcpu, struct kvm_interrupt *irq);
233 extern int kvmppc_rtas_hcall(struct kvm_vcpu *vcpu);
243 void kvmppc_core_dequeue_debug(struct kvm_vcpu *vcpu);
244 void kvmppc_core_queue_debug(struct kvm_vcpu *vcpu);
263 int (*get_sregs)(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
264 int (*set_sregs)(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
265 int (*get_one_reg)(struct kvm_vcpu *vcpu, u64 id,
267 int (*set_one_reg)(struct kvm_vcpu *vcpu, u64 id,
269 void (*vcpu_load)(struct kvm_vcpu *vcpu, int cpu);
270 void (*vcpu_put)(struct kvm_vcpu *vcpu);
271 void (*inject_interrupt)(struct kvm_vcpu *vcpu, int vec, u64 srr1_flags);
272 void (*set_msr)(struct kvm_vcpu *vcpu, u64 msr);
273 int (*vcpu_run)(struct kvm_vcpu *vcpu);
274 int (*vcpu_create)(struct kvm_vcpu *vcpu);
275 void (*vcpu_free)(struct kvm_vcpu *vcpu);
276 int (*check_requests)(struct kvm_vcpu *vcpu);
294 int (*emulate_op)(struct kvm_vcpu *vcpu,
296 int (*emulate_mtspr)(struct kvm_vcpu *vcpu, int sprn, ulong spr_val);
297 int (*emulate_mfspr)(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val);
298 void (*fast_vcpu_kick)(struct kvm_vcpu *vcpu);
310 void (*giveup_ext)(struct kvm_vcpu *vcpu, ulong msr);
312 int (*load_from_eaddr)(struct kvm_vcpu *vcpu, ulong *eaddr, void *ptr,
314 int (*store_to_eaddr)(struct kvm_vcpu *vcpu, ulong *eaddr, void *ptr,
321 int (*create_vcpu_debugfs)(struct kvm_vcpu *vcpu, struct dentry *debugfs_dentry);
327 static inline int kvmppc_get_last_inst(struct kvm_vcpu *vcpu, in kvmppc_get_last_inst() argument
335 if (vcpu->arch.last_inst == KVM_INST_FETCH_FAILED) in kvmppc_get_last_inst()
336 ret = kvmppc_load_last_inst(vcpu, type, &vcpu->arch.last_inst); in kvmppc_get_last_inst()
346 if ((vcpu->arch.last_inst >> 32) != 0) { in kvmppc_get_last_inst()
347 u32 prefix = vcpu->arch.last_inst >> 32; in kvmppc_get_last_inst()
348 u32 suffix = vcpu->arch.last_inst; in kvmppc_get_last_inst()
349 if (kvmppc_need_byteswap(vcpu)) { in kvmppc_get_last_inst()
358 fetched_inst = kvmppc_need_byteswap(vcpu) ? in kvmppc_get_last_inst()
359 swab32(vcpu->arch.last_inst) : in kvmppc_get_last_inst()
360 vcpu->arch.last_inst; in kvmppc_get_last_inst()
429 int kvmppc_core_get_sregs(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
430 int kvmppc_core_set_sregs(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
432 int kvmppc_get_sregs_ivor(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
433 int kvmppc_set_sregs_ivor(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
435 int kvm_vcpu_ioctl_get_one_reg(struct kvm_vcpu *vcpu, struct kvm_one_reg *reg);
436 int kvm_vcpu_ioctl_set_one_reg(struct kvm_vcpu *vcpu, struct kvm_one_reg *reg);
437 int kvmppc_get_one_reg(struct kvm_vcpu *vcpu, u64 id, union kvmppc_one_reg *);
438 int kvmppc_set_one_reg(struct kvm_vcpu *vcpu, u64 id, union kvmppc_one_reg *);
440 void kvmppc_set_pid(struct kvm_vcpu *vcpu, u32 pid);
564 static inline void kvmppc_fast_vcpu_kick(struct kvm_vcpu *vcpu) in kvmppc_fast_vcpu_kick() argument
566 vcpu->kvm->arch.kvm_ops->fast_vcpu_kick(vcpu); in kvmppc_fast_vcpu_kick()
598 static inline void kvmppc_fast_vcpu_kick(struct kvm_vcpu *vcpu) in kvmppc_fast_vcpu_kick() argument
600 kvm_vcpu_kick(vcpu); in kvmppc_fast_vcpu_kick()
632 static inline int kvmhv_nestedv2_reload_ptregs(struct kvm_vcpu *vcpu, in kvmhv_nestedv2_reload_ptregs() argument
637 static inline int kvmhv_nestedv2_mark_dirty_ptregs(struct kvm_vcpu *vcpu, in kvmhv_nestedv2_mark_dirty_ptregs() argument
643 static inline int kvmhv_nestedv2_mark_dirty(struct kvm_vcpu *vcpu, u16 iden) in kvmhv_nestedv2_mark_dirty() argument
648 static inline int kvmhv_nestedv2_cached_reload(struct kvm_vcpu *vcpu, u16 iden) in kvmhv_nestedv2_cached_reload() argument
656 static inline int kvmppc_xics_enabled(struct kvm_vcpu *vcpu) in kvmppc_xics_enabled() argument
658 return vcpu->arch.irq_type == KVMPPC_IRQ_XICS; in kvmppc_xics_enabled()
672 extern int kvmppc_xics_rm_complete(struct kvm_vcpu *vcpu, u32 hcall);
673 extern void kvmppc_xics_free_icp(struct kvm_vcpu *vcpu);
674 extern int kvmppc_xics_hcall(struct kvm_vcpu *vcpu, u32 cmd);
675 extern int kvmppc_xive_xics_hcall(struct kvm_vcpu *vcpu, u32 req);
676 extern u64 kvmppc_xics_get_icp(struct kvm_vcpu *vcpu);
677 extern int kvmppc_xics_set_icp(struct kvm_vcpu *vcpu, u64 icpval);
679 struct kvm_vcpu *vcpu, u32 cpu);
685 extern long kvmppc_deliver_irq_passthru(struct kvm_vcpu *vcpu, __be32 xirr,
701 static inline int kvmppc_xics_rm_complete(struct kvm_vcpu *vcpu, u32 hcall) in kvmppc_xics_rm_complete() argument
703 static inline int kvmppc_xics_enabled(struct kvm_vcpu *vcpu) in kvmppc_xics_enabled() argument
705 static inline void kvmppc_xics_free_icp(struct kvm_vcpu *vcpu) { } in kvmppc_xics_free_icp() argument
706 static inline int kvmppc_xics_hcall(struct kvm_vcpu *vcpu, u32 cmd) in kvmppc_xics_hcall() argument
708 static inline int kvmppc_xive_xics_hcall(struct kvm_vcpu *vcpu, u32 req) in kvmppc_xive_xics_hcall() argument
729 struct kvm_vcpu *vcpu, u32 cpu);
730 extern void kvmppc_xive_cleanup_vcpu(struct kvm_vcpu *vcpu);
735 extern u64 kvmppc_xive_get_icp(struct kvm_vcpu *vcpu);
736 extern int kvmppc_xive_set_icp(struct kvm_vcpu *vcpu, u64 icpval);
740 extern void kvmppc_xive_push_vcpu(struct kvm_vcpu *vcpu);
741 extern void kvmppc_xive_pull_vcpu(struct kvm_vcpu *vcpu);
742 extern bool kvmppc_xive_rearm_escalation(struct kvm_vcpu *vcpu);
744 static inline int kvmppc_xive_enabled(struct kvm_vcpu *vcpu) in kvmppc_xive_enabled() argument
746 return vcpu->arch.irq_type == KVMPPC_IRQ_XIVE; in kvmppc_xive_enabled()
750 struct kvm_vcpu *vcpu, u32 cpu);
751 extern void kvmppc_xive_native_cleanup_vcpu(struct kvm_vcpu *vcpu);
752 extern int kvmppc_xive_native_get_vp(struct kvm_vcpu *vcpu,
754 extern int kvmppc_xive_native_set_vp(struct kvm_vcpu *vcpu,
767 struct kvm_vcpu *vcpu, u32 cpu) { return -EBUSY; } in kvmppc_xive_connect_vcpu() argument
768 static inline void kvmppc_xive_cleanup_vcpu(struct kvm_vcpu *vcpu) { } in kvmppc_xive_cleanup_vcpu() argument
773 static inline u64 kvmppc_xive_get_icp(struct kvm_vcpu *vcpu) { return 0; } in kvmppc_xive_get_icp() argument
774 static inline int kvmppc_xive_set_icp(struct kvm_vcpu *vcpu, u64 icpval) { return -ENOENT; } in kvmppc_xive_set_icp() argument
778 static inline void kvmppc_xive_push_vcpu(struct kvm_vcpu *vcpu) { } in kvmppc_xive_push_vcpu() argument
779 static inline void kvmppc_xive_pull_vcpu(struct kvm_vcpu *vcpu) { } in kvmppc_xive_pull_vcpu() argument
780 static inline bool kvmppc_xive_rearm_escalation(struct kvm_vcpu *vcpu) { return true; } in kvmppc_xive_rearm_escalation() argument
782 static inline int kvmppc_xive_enabled(struct kvm_vcpu *vcpu) in kvmppc_xive_enabled() argument
785 struct kvm_vcpu *vcpu, u32 cpu) { return -EBUSY; } in kvmppc_xive_native_connect_vcpu() argument
786 static inline void kvmppc_xive_native_cleanup_vcpu(struct kvm_vcpu *vcpu) { } in kvmppc_xive_native_cleanup_vcpu() argument
787 static inline int kvmppc_xive_native_get_vp(struct kvm_vcpu *vcpu, in kvmppc_xive_native_get_vp() argument
790 static inline int kvmppc_xive_native_set_vp(struct kvm_vcpu *vcpu, in kvmppc_xive_native_set_vp() argument
812 long kvmppc_rm_h_put_tce(struct kvm_vcpu *vcpu, unsigned long liobn,
814 long kvmppc_rm_h_put_tce_indirect(struct kvm_vcpu *vcpu,
817 long kvmppc_rm_h_stuff_tce(struct kvm_vcpu *vcpu,
820 long int kvmppc_rm_h_confer(struct kvm_vcpu *vcpu, int target,
822 long kvmppc_rm_h_random(struct kvm_vcpu *vcpu);
824 void kvmppc_realmode_machine_check(struct kvm_vcpu *vcpu);
828 long kvmppc_p9_realmode_hmi_handler(struct kvm_vcpu *vcpu);
829 long kvmppc_h_enter(struct kvm_vcpu *vcpu, unsigned long flags,
831 long kvmppc_h_remove(struct kvm_vcpu *vcpu, unsigned long flags,
833 long kvmppc_h_bulk_remove(struct kvm_vcpu *vcpu);
834 long kvmppc_h_protect(struct kvm_vcpu *vcpu, unsigned long flags,
836 long kvmppc_h_read(struct kvm_vcpu *vcpu, unsigned long flags,
838 long kvmppc_h_clear_ref(struct kvm_vcpu *vcpu, unsigned long flags,
840 long kvmppc_h_clear_mod(struct kvm_vcpu *vcpu, unsigned long flags,
842 long kvmppc_rm_h_page_init(struct kvm_vcpu *vcpu, unsigned long flags,
844 long kvmppc_hpte_hv_fault(struct kvm_vcpu *vcpu, unsigned long addr,
846 void kvmppc_guest_entry_inject_int(struct kvm_vcpu *vcpu);
870 void (*vcpu_kick)(struct kvm_vcpu *vcpu);
875 static inline unsigned long kvmppc_get_epr(struct kvm_vcpu *vcpu) in kvmppc_get_epr() argument
880 return vcpu->arch.epr; in kvmppc_get_epr()
886 static inline void kvmppc_set_epr(struct kvm_vcpu *vcpu, u32 epr) in kvmppc_set_epr() argument
891 vcpu->arch.epr = epr; in kvmppc_set_epr()
897 void kvmppc_mpic_set_epr(struct kvm_vcpu *vcpu);
898 int kvmppc_mpic_connect_vcpu(struct kvm_device *dev, struct kvm_vcpu *vcpu,
900 void kvmppc_mpic_disconnect_vcpu(struct openpic *opp, struct kvm_vcpu *vcpu);
904 static inline void kvmppc_mpic_set_epr(struct kvm_vcpu *vcpu) in kvmppc_mpic_set_epr() argument
909 struct kvm_vcpu *vcpu, u32 cpu) in kvmppc_mpic_connect_vcpu() argument
915 struct kvm_vcpu *vcpu) in kvmppc_mpic_disconnect_vcpu() argument
921 int kvm_vcpu_ioctl_config_tlb(struct kvm_vcpu *vcpu,
923 int kvm_vcpu_ioctl_dirty_tlb(struct kvm_vcpu *vcpu,
952 static inline bool kvmppc_shared_big_endian(struct kvm_vcpu *vcpu) in kvmppc_shared_big_endian() argument
956 return vcpu->arch.shared_big_endian; in kvmppc_shared_big_endian()
966 static inline ulong kvmppc_get_##reg(struct kvm_vcpu *vcpu) \
972 static inline void kvmppc_set_##reg(struct kvm_vcpu *vcpu, ulong val) \
978 static inline u##size kvmppc_get_##reg(struct kvm_vcpu *vcpu) \
981 WARN_ON(kvmhv_nestedv2_cached_reload(vcpu, iden) < 0); \
982 if (kvmppc_shared_big_endian(vcpu)) \
983 return be##size##_to_cpu((__be##size __force)vcpu->arch.shared->reg); \
985 return le##size##_to_cpu((__le##size __force)vcpu->arch.shared->reg); \
989 static inline void kvmppc_set_##reg(struct kvm_vcpu *vcpu, u##size val) \
991 if (kvmppc_shared_big_endian(vcpu)) \
992 vcpu->arch.shared->reg = (u##size __force)cpu_to_be##size(val); \
994 vcpu->arch.shared->reg = (u##size __force)cpu_to_le##size(val); \
997 kvmhv_nestedv2_mark_dirty(vcpu, iden); \
1030 static inline void kvmppc_set_msr_fast(struct kvm_vcpu *vcpu, u64 val) in kvmppc_set_msr_fast() argument
1032 if (kvmppc_shared_big_endian(vcpu)) in kvmppc_set_msr_fast()
1033 vcpu->arch.shared->msr = cpu_to_be64(val); in kvmppc_set_msr_fast()
1035 vcpu->arch.shared->msr = cpu_to_le64(val); in kvmppc_set_msr_fast()
1036 kvmhv_nestedv2_mark_dirty(vcpu, KVMPPC_GSID_MSR); in kvmppc_set_msr_fast()
1045 static inline u32 kvmppc_get_sr(struct kvm_vcpu *vcpu, int nr) in kvmppc_get_sr() argument
1047 if (kvmppc_shared_big_endian(vcpu)) in kvmppc_get_sr()
1048 return be32_to_cpu(vcpu->arch.shared->sr[nr]); in kvmppc_get_sr()
1050 return le32_to_cpu(vcpu->arch.shared->sr[nr]); in kvmppc_get_sr()
1053 static inline void kvmppc_set_sr(struct kvm_vcpu *vcpu, int nr, u32 val) in kvmppc_set_sr() argument
1055 if (kvmppc_shared_big_endian(vcpu)) in kvmppc_set_sr()
1056 vcpu->arch.shared->sr[nr] = cpu_to_be32(val); in kvmppc_set_sr()
1058 vcpu->arch.shared->sr[nr] = cpu_to_le32(val); in kvmppc_set_sr()
1095 static inline ulong kvmppc_get_ea_indexed(struct kvm_vcpu *vcpu, int ra, int rb) in kvmppc_get_ea_indexed() argument
1100 ea = kvmppc_get_gpr(vcpu, rb); in kvmppc_get_ea_indexed()
1102 ea += kvmppc_get_gpr(vcpu, ra); in kvmppc_get_ea_indexed()
1110 if (!(kvmppc_get_msr(vcpu) & msr_64bit)) in kvmppc_get_ea_indexed()