| /linux/virt/kvm/ |
| H A D | pfncache.c | 75 struct kvm_memslots *slots = kvm_memslots(gpc->kvm); in kvm_gpc_check() 293 struct kvm_memslots *slots = kvm_memslots(gpc->kvm); in __kvm_gpc_refresh()
|
| H A D | kvm_main.c | 571 struct kvm_memslots *slots; in kvm_handle_hva_range() 967 static void kvm_free_memslots(struct kvm *kvm, struct kvm_memslots *slots) in kvm_free_memslots() 1118 struct kvm_memslots *slots; in kvm_create_vm() 1455 static struct kvm_memslots *kvm_get_inactive_memslots(struct kvm *kvm, int as_id) in kvm_get_inactive_memslots() 1457 struct kvm_memslots *active = __kvm_memslots(kvm, as_id); in kvm_get_inactive_memslots() 1483 static void kvm_insert_gfn_node(struct kvm_memslots *slots, in kvm_insert_gfn_node() 1508 static void kvm_erase_gfn_node(struct kvm_memslots *slots, in kvm_erase_gfn_node() 1514 static void kvm_replace_gfn_node(struct kvm_memslots *slots, in kvm_replace_gfn_node() 1540 struct kvm_memslots *slots = kvm_get_inactive_memslots(kvm, as_id); in kvm_replace_memslot() 1625 struct kvm_memslots *slots = kvm_get_inactive_memslots(kvm, as_id); in kvm_swap_active_memslots() [all …]
|
| /linux/include/linux/ |
| H A D | kvm_host.h | 752 struct kvm_memslots { struct 789 struct kvm_memslots __memslots[KVM_MAX_NR_ADDRESS_SPACES][2]; argument 791 struct kvm_memslots __rcu *memslots[KVM_MAX_NR_ADDRESS_SPACES]; 1072 static inline struct kvm_memslots *__kvm_memslots(struct kvm *kvm, int as_id) in __kvm_memslots() 1080 static inline struct kvm_memslots *kvm_memslots(struct kvm *kvm) in kvm_memslots() function 1085 static inline struct kvm_memslots *kvm_vcpu_memslots(struct kvm_vcpu *vcpu) in kvm_vcpu_memslots() 1092 static inline bool kvm_memslots_empty(struct kvm_memslots *slots) in kvm_memslots_empty() 1105 struct kvm_memory_slot *id_to_memslot(struct kvm_memslots *slots, int id) in id_to_memslot() 1120 struct kvm_memslots *slots; 1135 struct kvm_memslots *slots, in kvm_memslot_iter_start() [all …]
|
| /linux/arch/powerpc/kvm/ |
| H A D | book3s_hv_uvmem.c | 467 struct kvm_memslots *slots; in kvmppc_h_svm_init_start() 488 slots = kvm_memslots(kvm); in kvmppc_h_svm_init_start() 496 slots = kvm_memslots(kvm); in kvmppc_h_svm_init_start() 676 kvm_for_each_memslot(memslot, bkt, kvm_memslots(kvm)) in kvmppc_h_svm_init_abort() 832 struct kvm_memslots *slots; in kvmppc_h_svm_init_done() 842 slots = kvm_memslots(kvm); in kvmppc_h_svm_init_done()
|
| H A D | book3s_64_vio.c | 358 memslot = __gfn_to_memslot(kvm_memslots(kvm), gfn); in kvmppc_tce_to_ua()
|
| /linux/arch/x86/kvm/ |
| H A D | x86.h | 60 (sizeof(((struct kvm_memslots *)0)->id_hash) * 2 * KVM_MAX_NR_ADDRESS_SPACES) 351 u64 gen = kvm_memslots(vcpu->kvm)->generation; in vcpu_cache_mmio_info() 368 return vcpu->arch.mmio_gen == kvm_memslots(vcpu->kvm)->generation; in vcpu_match_mmio_gen()
|
| H A D | debugfs.c | 94 struct kvm_memslots *slots; in kvm_mmu_rmaps_stat_show()
|
| H A D | x86.c | 3737 struct kvm_memslots *slots; in record_steal_time() 3753 slots = kvm_memslots(vcpu->kvm); in record_steal_time() 5227 struct kvm_memslots *slots; in kvm_steal_time_set_preempted() 5254 slots = kvm_memslots(vcpu->kvm); in kvm_steal_time_set_preempted() 13261 struct kvm_memslots *slots = kvm_memslots(kvm); in __x86_set_memory_region()
|
| /linux/arch/arm64/kvm/ |
| H A D | mmu.c | 369 struct kvm_memslots *slots; in stage2_flush_vm() 376 slots = kvm_memslots(kvm); in stage2_flush_vm() 1078 struct kvm_memslots *slots; in stage2_unmap_vm() 1086 slots = kvm_memslots(kvm); in stage2_unmap_vm() 1240 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_mmu_wp_memory_region() 1268 struct kvm_memslots *slots; in kvm_mmu_split_memory_region() 1274 slots = kvm_memslots(kvm); in kvm_mmu_split_memory_region()
|
| /linux/arch/riscv/kvm/ |
| H A D | mmu.c | 21 struct kvm_memslots *slots = kvm_memslots(kvm); in mmu_wp_memory_region()
|
| /linux/arch/x86/kvm/mmu/ |
| H A D | page_track.c | 171 struct kvm_memslots *slots; in kvm_enable_external_write_tracking()
|
| H A D | mmu.c | 751 struct kvm_memslots *slots; in account_shadowed() 810 struct kvm_memslots *slots; in unaccount_shadowed() 1186 struct kvm_memslots *slots; in rmap_remove() 3991 struct kvm_memslots *slots; in mmu_first_shadow_root_alloc() 6889 struct kvm_memslots *slots; in kvm_rmap_zap_gfn_range()
|
| /linux/Documentation/virt/kvm/x86/ |
| H A D | mmu.rst | 197 determines which of the kvm_memslots array was used to build this 476 kvm_memslots(kvm)->generation, and increased whenever guest memory info 487 Unfortunately, a single memory access might access kvm_memslots(kvm) multiple 493 returns; thus, bit 63 of kvm_memslots(kvm)->generation set to 1 only during a
|
| /linux/arch/s390/kvm/ |
| H A D | kvm-s390.h | 257 static inline unsigned long kvm_s390_get_gfn_end(struct kvm_memslots *slots) in kvm_s390_get_gfn_end()
|
| H A D | pv.c | 243 npages = kvm_s390_get_gfn_end(kvm_memslots(kvm)); in kvm_s390_pv_alloc_vm()
|
| H A D | kvm-s390.c | 1186 struct kvm_memslots *slots; in kvm_s390_vm_start_migration() 1193 slots = kvm_memslots(kvm); in kvm_s390_vm_start_migration() 2261 static struct kvm_memory_slot *gfn_to_memslot_approx(struct kvm_memslots *slots, in gfn_to_memslot_approx() 2267 static unsigned long kvm_s390_next_dirty_cmma(struct kvm_memslots *slots, in kvm_s390_next_dirty_cmma() 2299 struct kvm_memslots *slots = kvm_memslots(kvm); in kvm_s390_get_cmma()
|
| /linux/arch/loongarch/kvm/ |
| H A D | vcpu.c | 165 struct kvm_memslots *slots; in kvm_update_stolen_time() 175 slots = kvm_memslots(vcpu->kvm); in kvm_update_stolen_time()
|
| /linux/arch/x86/kvm/vmx/ |
| H A D | vmx.c | 6860 struct kvm_memslots *slots = kvm_memslots(kvm); in vmx_set_apic_access_page_addr()
|