Home
last modified time | relevance | path

Searched refs:mmu_lock (Results 1 – 25 of 28) sorted by relevance

12

/linux/virt/kvm/
H A Dkvm_mm.h14 #define KVM_MMU_LOCK_INIT(kvm) rwlock_init(&(kvm)->mmu_lock)
15 #define KVM_MMU_LOCK(kvm) write_lock(&(kvm)->mmu_lock)
16 #define KVM_MMU_UNLOCK(kvm) write_unlock(&(kvm)->mmu_lock)
18 #define KVM_MMU_LOCK_INIT(kvm) spin_lock_init(&(kvm)->mmu_lock)
19 #define KVM_MMU_LOCK(kvm) spin_lock(&(kvm)->mmu_lock)
20 #define KVM_MMU_UNLOCK(kvm) spin_unlock(&(kvm)->mmu_lock)
/linux/arch/x86/kvm/mmu/
H A Dpage_track.c93 lockdep_assert_held_write(&kvm->mmu_lock); in __kvm_write_track_add_gfn()
116 lockdep_assert_held_write(&kvm->mmu_lock); in __kvm_write_track_remove_gfn()
239 write_lock(&kvm->mmu_lock); in kvm_page_track_register_notifier()
241 write_unlock(&kvm->mmu_lock); in kvm_page_track_register_notifier()
257 write_lock(&kvm->mmu_lock); in kvm_page_track_unregister_notifier()
259 write_unlock(&kvm->mmu_lock); in kvm_page_track_unregister_notifier()
335 write_lock(&kvm->mmu_lock); in kvm_write_track_add_gfn()
337 write_unlock(&kvm->mmu_lock); in kvm_write_track_add_gfn()
365 write_lock(&kvm->mmu_lock); in kvm_write_track_remove_gfn()
367 write_unlock(&kvm->mmu_lock); in kvm_write_track_remove_gfn()
H A Dpaging_tmpl.h831 write_lock(&vcpu->kvm->mmu_lock); in FNAME()
843 write_unlock(&vcpu->kvm->mmu_lock); in FNAME()
/linux/arch/riscv/kvm/
H A Dmmu.c29 spin_lock(&kvm->mmu_lock); in mmu_wp_memory_region()
31 spin_unlock(&kvm->mmu_lock); in mmu_wp_memory_region()
68 spin_lock(&kvm->mmu_lock); in kvm_riscv_mmu_ioremap()
70 spin_unlock(&kvm->mmu_lock); in kvm_riscv_mmu_ioremap()
88 spin_lock(&kvm->mmu_lock); in kvm_riscv_mmu_iounmap()
90 spin_unlock(&kvm->mmu_lock); in kvm_riscv_mmu_iounmap()
134 spin_lock(&kvm->mmu_lock); in kvm_arch_flush_shadow_memslot()
136 spin_unlock(&kvm->mmu_lock); in kvm_arch_flush_shadow_memslot()
239 mmu_locked = spin_trylock(&kvm->mmu_lock); in kvm_unmap_gfn_range()
244 spin_unlock(&kvm->mmu_lock); in kvm_unmap_gfn_range()
[all …]
/linux/arch/powerpc/kvm/
H A Dbook3s_hv_nested.c783 spin_lock(&kvm->mmu_lock); in kvmhv_remove_nested()
789 spin_unlock(&kvm->mmu_lock); in kvmhv_remove_nested()
808 spin_lock(&kvm->mmu_lock); in kvmhv_release_all_nested()
818 spin_unlock(&kvm->mmu_lock); in kvmhv_release_all_nested()
835 spin_lock(&kvm->mmu_lock); in kvmhv_flush_nested()
837 spin_unlock(&kvm->mmu_lock); in kvmhv_flush_nested()
852 spin_lock(&kvm->mmu_lock); in kvmhv_get_nested()
856 spin_unlock(&kvm->mmu_lock); in kvmhv_get_nested()
870 spin_lock(&kvm->mmu_lock); in kvmhv_get_nested()
879 spin_unlock(&kvm->mmu_lock); in kvmhv_get_nested()
[all …]
H A Dbook3s_64_mmu_radix.c647 spin_lock(&kvm->mmu_lock); in kvmppc_create_pte()
783 spin_unlock(&kvm->mmu_lock); in kvmppc_create_pte()
852 spin_lock(&kvm->mmu_lock); in kvmppc_book3s_instantiate_page()
857 spin_unlock(&kvm->mmu_lock); in kvmppc_book3s_instantiate_page()
990 spin_lock(&kvm->mmu_lock); in kvmppc_book3s_radix_page_fault()
994 spin_unlock(&kvm->mmu_lock); in kvmppc_book3s_radix_page_fault()
1100 spin_lock(&kvm->mmu_lock); in kvm_radix_test_clear_dirty()
1112 spin_unlock(&kvm->mmu_lock); in kvm_radix_test_clear_dirty()
1127 spin_unlock(&kvm->mmu_lock); in kvm_radix_test_clear_dirty()
1172 spin_lock(&kvm->mmu_lock); in kvmppc_radix_flush_memslot()
[all …]
H A Dbook3s_hv_rm_mmu.c248 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter()
263 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter()
277 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter()
938 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_zero()
950 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_zero()
966 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_copy()
981 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_copy()
H A Dbook3s_64_mmu_hv.c615 spin_lock(&kvm->mmu_lock); in kvmppc_book3s_hv_page_fault()
620 spin_unlock(&kvm->mmu_lock); in kvmppc_book3s_hv_page_fault()
749 spin_lock(&kvm->mmu_lock); in kvmppc_rmap_reset()
756 spin_unlock(&kvm->mmu_lock); in kvmppc_rmap_reset()
1377 spin_lock(&kvm->mmu_lock); in resize_hpt_pivot()
1384 spin_unlock(&kvm->mmu_lock); in resize_hpt_pivot()
H A De500_mmu_host.c363 spin_lock(&kvm->mmu_lock); in kvmppc_e500_shadow_map()
465 spin_unlock(&kvm->mmu_lock); in kvmppc_e500_shadow_map()
/linux/arch/loongarch/kvm/
H A Dmmu.c304 spin_lock(&kvm->mmu_lock); in kvm_flush_range()
307 spin_unlock(&kvm->mmu_lock); in kvm_flush_range()
477 spin_lock(&kvm->mmu_lock); in kvm_arch_commit_memory_region()
481 spin_unlock(&kvm->mmu_lock); in kvm_arch_commit_memory_region()
560 spin_lock(&kvm->mmu_lock); in kvm_map_page_fast()
597 spin_unlock(&kvm->mmu_lock); in kvm_map_page_fast()
604 spin_unlock(&kvm->mmu_lock); in kvm_map_page_fast()
830 spin_lock(&kvm->mmu_lock); in kvm_map_page()
837 spin_unlock(&kvm->mmu_lock); in kvm_map_page()
905 spin_unlock(&kvm->mmu_lock); in kvm_map_page()
/linux/arch/arm64/kvm/
H A Dmmu.c82 cond_resched_rwlock_write(&kvm->mmu_lock); in stage2_apply_range()
111 if (need_resched() || rwlock_needbreak(&kvm->mmu_lock)) in need_split_memcache_topup_or_resched()
128 lockdep_assert_held_write(&kvm->mmu_lock); in kvm_mmu_split_huge_pages()
140 write_unlock(&kvm->mmu_lock); in kvm_mmu_split_huge_pages()
146 write_lock(&kvm->mmu_lock); in kvm_mmu_split_huge_pages()
334 lockdep_assert_held_write(&kvm->mmu_lock); in __unmap_stage2_range()
377 write_lock(&kvm->mmu_lock); in stage2_flush_vm()
385 write_unlock(&kvm->mmu_lock); in stage2_flush_vm()
1085 write_lock(&kvm->mmu_lock); in stage2_unmap_vm()
1093 write_unlock(&kvm->mmu_lock); in stage2_unmap_vm()
[all …]
H A Dnested.c519 lockdep_assert_held_write(&kvm_s2_mmu_to_kvm(mmu)->mmu_lock); in get_guest_mapping_ttl()
643 write_lock(&kvm->mmu_lock); in kvm_s2_mmu_iterate_by_vmid()
655 write_unlock(&kvm->mmu_lock); in kvm_s2_mmu_iterate_by_vmid()
664 lockdep_assert_held_write(&kvm->mmu_lock); in lookup_s2_mmu()
713 lockdep_assert_held_write(&vcpu->kvm->mmu_lock); in get_s2_mmu_nested()
791 scoped_guard(write_lock, &vcpu->kvm->mmu_lock) in kvm_vcpu_load_hw_mmu()
905 lockdep_assert_held_write(&kvm->mmu_lock); in kvm_invalidate_vncr_ipa()
959 lockdep_assert_held_write(&kvm->mmu_lock); in invalidate_vncr_va()
1130 guard(write_lock)(&vcpu->kvm->mmu_lock); in kvm_handle_s1e2_tlbi()
1138 lockdep_assert_held_write(&kvm->mmu_lock); in kvm_nested_s2_wp()
[all …]
H A Dat.c1341 guard(write_lock_irqsave)(&vcpu->kvm->mmu_lock); in __kvm_at_s1e01_fast()
1477 scoped_guard(write_lock_irqsave, &vcpu->kvm->mmu_lock) { in __kvm_at_s1e2()
/linux/arch/s390/kvm/
H A Dgmap.c141 scoped_guard(write_lock, &gmap->kvm->mmu_lock) in gmap_set_limit()
436 lockdep_assert_held_write(&gmap->kvm->mmu_lock); in gmap_unmap_gfn_range()
518 lockdep_assert_held(&gmap->kvm->mmu_lock); in gmap_sync_dirty_log()
599 lockdep_assert_held(&gmap->kvm->mmu_lock); in gmap_try_fixup_minor()
707 lockdep_assert_held(&gmap->kvm->mmu_lock); in gmap_link()
796 scoped_guard(read_lock, &gmap->kvm->mmu_lock) { in gmap_ucas_translate()
802 scoped_guard(write_lock, &gmap->kvm->mmu_lock) { in gmap_ucas_translate()
833 scoped_guard(write_lock, &gmap->kvm->mmu_lock) in gmap_ucas_map()
866 guard(read_lock)(&gmap->kvm->mmu_lock); in gmap_ucas_unmap()
905 scoped_guard(read_lock, &gmap->kvm->mmu_lock) in gmap_split_huge_pages()
[all …]
H A Dgmap.h173 lockdep_assert_held(&gmap->kvm->mmu_lock); in _gmap_ptep_xchg()
210 lockdep_assert_held(&gmap->kvm->mmu_lock); in _gmap_crstep_xchg_atomic()
H A Dgaccess.c659 scoped_guard(read_lock, &kvm->mmu_lock) in vm_check_access_key_gpa()
718 scoped_guard(read_lock, &vcpu->kvm->mmu_lock) in vcpu_check_access_key_gpa()
1429 lockdep_assert_held(&sg->kvm->mmu_lock); in _do_shadow_pte()
1471 lockdep_assert_held(&sg->kvm->mmu_lock); in _do_shadow_crste()
1513 lockdep_assert_held(&sg->kvm->mmu_lock); in _gaccess_do_shadow()
1595 scoped_guard(read_lock, &vcpu->kvm->mmu_lock) { in _gaccess_shadow_fault()
H A Dkvm-s390.c564 guard(read_lock)(&kvm->mmu_lock); in kvm_s390_keyop()
725 scoped_guard(read_lock, &kvm->mmu_lock) in kvm_arch_sync_dirty_log()
2117 scoped_guard(read_lock, &kvm->mmu_lock) { in kvm_s390_get_skeys()
2184 scoped_guard(read_lock, &kvm->mmu_lock) { in kvm_s390_set_skeys()
2238 scoped_guard(read_lock, &kvm->mmu_lock) { in kvm_s390_get_cmma_bits()
2300 scoped_guard(read_lock, &kvm->mmu_lock) { in kvm_s390_set_cmma_bits()
4260 scoped_guard(write_lock, &vcpu->kvm->mmu_lock) in kvm_s390_fixup_prefix()
5713 scoped_guard(write_lock, &kvm->mmu_lock) { in kvm_arch_commit_memory_region()
5749 scoped_guard(read_lock, &kvm->mmu_lock) in kvm_test_age_gfn()
5763 scoped_guard(read_lock, &kvm->mmu_lock) in kvm_age_gfn()
/linux/drivers/accel/habanalabs/common/
H A Dcommand_buffer.c41 mutex_lock(&hdev->mmu_lock); in cb_map_mem()
53 mutex_unlock(&hdev->mmu_lock); in cb_map_mem()
62 mutex_unlock(&hdev->mmu_lock); in cb_map_mem()
72 mutex_lock(&hdev->mmu_lock); in cb_unmap_mem()
75 mutex_unlock(&hdev->mmu_lock); in cb_unmap_mem()
H A Dmemory.c1185 mutex_lock(&hdev->mmu_lock); in map_device_va()
1191 mutex_unlock(&hdev->mmu_lock); in map_device_va()
1197 mutex_unlock(&hdev->mmu_lock); in map_device_va()
1352 mutex_lock(&hdev->mmu_lock); in unmap_device_va()
1365 mutex_unlock(&hdev->mmu_lock); in unmap_device_va()
2780 mutex_lock(&hdev->mmu_lock); in hl_vm_ctx_fini()
2786 mutex_unlock(&hdev->mmu_lock); in hl_vm_ctx_fini()
/linux/arch/x86/kvm/
H A Ddebugfs.c112 write_lock(&kvm->mmu_lock); in kvm_mmu_rmaps_stat_show()
132 write_unlock(&kvm->mmu_lock); in kvm_mmu_rmaps_stat_show()
/linux/drivers/accel/habanalabs/common/mmu/
H A Dmmu.c51 mutex_init(&hdev->mmu_lock); in hl_mmu_init()
95 mutex_destroy(&hdev->mmu_lock); in hl_mmu_fini()
573 mutex_lock(&hdev->mmu_lock); in hl_mmu_get_tlb_info()
575 mutex_unlock(&hdev->mmu_lock); in hl_mmu_get_tlb_info()
681 mutex_lock(&hdev->mmu_lock); in hl_mmu_prefetch_work_function()
685 mutex_unlock(&hdev->mmu_lock); in hl_mmu_prefetch_work_function()
/linux/Documentation/virt/kvm/
H A Dlocking.rst57 - kvm->arch.mmu_lock is an rwlock; critical sections for
59 also take kvm->arch.mmu_lock
270 ``kvm->mmu_lock``
/linux/arch/mips/kvm/
H A Dmips.c196 spin_lock(&kvm->mmu_lock); in kvm_arch_flush_shadow_memslot()
201 spin_unlock(&kvm->mmu_lock); in kvm_arch_flush_shadow_memslot()
231 spin_lock(&kvm->mmu_lock); in kvm_arch_commit_memory_region()
237 spin_unlock(&kvm->mmu_lock); in kvm_arch_commit_memory_region()
/linux/include/linux/
H A Dkvm_host.h772 rwlock_t mmu_lock; member
774 spinlock_t mmu_lock;
1288 lockdep_assert_once(lockdep_is_held(&kvm->mmu_lock) || unused); in kvm_release_faultin_page()
2146 lockdep_assert_held(&kvm->mmu_lock); in mmu_invalidate_retry_gfn()
/linux/drivers/accel/habanalabs/gaudi2/
H A Dgaudi2.c11062 mutex_lock(&hdev->mmu_lock); in gaudi2_debugfs_read_dma()
11078 mutex_unlock(&hdev->mmu_lock); in gaudi2_debugfs_read_dma()
11107 mutex_lock(&hdev->mmu_lock); in gaudi2_debugfs_read_dma()
11117 mutex_unlock(&hdev->mmu_lock); in gaudi2_debugfs_read_dma()
11169 mutex_lock(&hdev->mmu_lock); in gaudi2_internal_cb_pool_init()
11180 mutex_unlock(&hdev->mmu_lock); in gaudi2_internal_cb_pool_init()
11187 mutex_unlock(&hdev->mmu_lock); in gaudi2_internal_cb_pool_init()
11205 mutex_lock(&hdev->mmu_lock); in gaudi2_internal_cb_pool_fini()
11209 mutex_unlock(&hdev->mmu_lock); in gaudi2_internal_cb_pool_fini()

12