Lines Matching full:mmu

12 #include <asm/mmu.h>
150 #define kvm_phys_shift(mmu) VTCR_EL2_IPA((mmu)->vtcr) argument
151 #define kvm_phys_size(mmu) (_AC(1, ULL) << kvm_phys_shift(mmu)) argument
152 #define kvm_phys_mask(mmu) (kvm_phys_size(mmu) - _AC(1, ULL)) argument
171 void kvm_stage2_unmap_range(struct kvm_s2_mmu *mmu, phys_addr_t start,
173 void kvm_stage2_flush_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end);
174 void kvm_stage2_wp_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end);
177 int kvm_init_stage2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu, unsigned long type);
179 void kvm_free_stage2_pgd(struct kvm_s2_mmu *mmu);
303 static __always_inline u64 kvm_get_vttbr(struct kvm_s2_mmu *mmu) in kvm_get_vttbr() argument
305 struct kvm_vmid *vmid = &mmu->vmid; in kvm_get_vttbr()
309 baddr = mmu->pgd_phys; in kvm_get_vttbr()
319 static __always_inline void __load_stage2(struct kvm_s2_mmu *mmu, in __load_stage2() argument
322 write_sysreg(mmu->vtcr, vtcr_el2); in __load_stage2()
323 write_sysreg(kvm_get_vttbr(mmu), vttbr_el2); in __load_stage2()
333 static inline struct kvm *kvm_s2_mmu_to_kvm(struct kvm_s2_mmu *mmu) in kvm_s2_mmu_to_kvm() argument
335 return container_of(mmu->arch, struct kvm, arch); in kvm_s2_mmu_to_kvm()
344 static inline bool kvm_s2_mmu_valid(struct kvm_s2_mmu *mmu) in kvm_s2_mmu_valid() argument
346 return !(mmu->tlb_vttbr & VTTBR_CNP_BIT); in kvm_s2_mmu_valid()
349 static inline bool kvm_is_nested_s2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu) in kvm_is_nested_s2_mmu() argument
352 * Be careful, mmu may not be fully initialised so do look at in kvm_is_nested_s2_mmu()
355 return &kvm->arch.mmu != mmu; in kvm_is_nested_s2_mmu()