Lines Matching refs:pvo
314 moea64_pte_synch_native(struct pvo_entry *pvo) in moea64_pte_synch_native() argument
316 volatile struct lpte *pt = moea64_pteg_table + pvo->pvo_pte.slot; in moea64_pte_synch_native()
319 PMAP_LOCK_ASSERT(pvo->pvo_pmap, MA_OWNED); in moea64_pte_synch_native()
321 pvo_ptevpn = moea64_pte_vpn_from_pvo_vpn(pvo); in moea64_pte_synch_native()
339 moea64_pte_clear_native(struct pvo_entry *pvo, uint64_t ptebit) in moea64_pte_clear_native() argument
341 volatile struct lpte *pt = moea64_pteg_table + pvo->pvo_pte.slot; in moea64_pte_clear_native()
345 PMAP_LOCK_ASSERT(pvo->pvo_pmap, MA_OWNED); in moea64_pte_clear_native()
347 moea64_pte_from_pvo(pvo, &properpt); in moea64_pte_clear_native()
373 TLBIE(pvo->pvo_vpn, properpt.pte_hi); in moea64_pte_clear_native()
377 ptelo = moea64_pte_unset_native(pvo); in moea64_pte_clear_native()
378 moea64_pte_insert_native(pvo); in moea64_pte_clear_native()
410 moea64_pte_unset_native(struct pvo_entry *pvo) in moea64_pte_unset_native() argument
412 volatile struct lpte *pt = moea64_pteg_table + pvo->pvo_pte.slot; in moea64_pte_unset_native()
416 pvo_ptevpn = moea64_pte_vpn_from_pvo_vpn(pvo); in moea64_pte_unset_native()
425 ret = moea64_pte_unset_locked(pt, pvo->pvo_vpn); in moea64_pte_unset_native()
433 moea64_pte_replace_inval_native(struct pvo_entry *pvo, in moea64_pte_replace_inval_native() argument
439 moea64_pte_from_pvo(pvo, &properpt); in moea64_pte_replace_inval_native()
459 TLBIE(pvo->pvo_vpn, ptehi); in moea64_pte_replace_inval_native()
473 moea64_pte_replace_native(struct pvo_entry *pvo, int flags) in moea64_pte_replace_native() argument
475 volatile struct lpte *pt = moea64_pteg_table + pvo->pvo_pte.slot; in moea64_pte_replace_native()
481 moea64_pte_from_pvo(pvo, &properpt); in moea64_pte_replace_native()
494 ptelo = moea64_pte_replace_inval_native(pvo, pt); in moea64_pte_replace_native()
820 moea64_pte_insert_locked(struct pvo_entry *pvo, struct lpte *insertpt, in moea64_pte_insert_locked() argument
828 slot = moea64_insert_to_pteg_native(insertpt, pvo->pvo_pte.slot, in moea64_pte_insert_locked()
831 pvo->pvo_pte.slot = slot; in moea64_pte_insert_locked()
838 pvo->pvo_vaddr ^= PVO_HID; in moea64_pte_insert_locked()
840 pvo->pvo_pte.slot ^= (moea64_pteg_mask << 3); in moea64_pte_insert_locked()
841 slot = moea64_insert_to_pteg_native(insertpt, pvo->pvo_pte.slot, in moea64_pte_insert_locked()
844 pvo->pvo_pte.slot = slot; in moea64_pte_insert_locked()
852 moea64_pte_insert_native(struct pvo_entry *pvo) in moea64_pte_insert_native() argument
858 moea64_pte_from_pvo(pvo, &insertpt); in moea64_pte_insert_native()
863 pvo->pvo_pte.slot &= ~7ULL; /* Base slot address */ in moea64_pte_insert_native()
864 ret = moea64_pte_insert_locked(pvo, &insertpt, LPTE_VALID); in moea64_pte_insert_native()
876 ret = moea64_pte_insert_locked(pvo, &insertpt, LPTE_BIG); in moea64_pte_insert_native()
921 moea64_pte_unset_sp_locked(struct pvo_entry *pvo) in moea64_pte_unset_sp_locked() argument
928 eva = PVO_VADDR(pvo) + HPT_SP_SIZE; in moea64_pte_unset_sp_locked()
930 for (; pvo != NULL && PVO_VADDR(pvo) < eva; in moea64_pte_unset_sp_locked()
931 pvo = RB_NEXT(pvo_tree, &pvo->pvo_pmap->pmap_pvo, pvo)) { in moea64_pte_unset_sp_locked()
932 pt = moea64_pteg_table + pvo->pvo_pte.slot; in moea64_pte_unset_sp_locked()
935 moea64_pte_vpn_from_pvo_vpn(pvo)) { in moea64_pte_unset_sp_locked()
938 vpn = moea64_vpn_from_pte(ptehi, pvo->pvo_pte.slot); in moea64_pte_unset_sp_locked()
944 vpn = pvo->pvo_vpn; in moea64_pte_unset_sp_locked()
953 moea64_pte_unset_sp_native(struct pvo_entry *pvo) in moea64_pte_unset_sp_native() argument
957 PMAP_LOCK_ASSERT(pvo->pvo_pmap, MA_OWNED); in moea64_pte_unset_sp_native()
958 KASSERT((PVO_VADDR(pvo) & HPT_SP_MASK) == 0, in moea64_pte_unset_sp_native()
959 ("%s: va %#jx unaligned", __func__, (uintmax_t)PVO_VADDR(pvo))); in moea64_pte_unset_sp_native()
962 refchg = moea64_pte_unset_sp_locked(pvo); in moea64_pte_unset_sp_native()
969 moea64_pte_insert_sp_locked(struct pvo_entry *pvo) in moea64_pte_insert_sp_locked() argument
975 eva = PVO_VADDR(pvo) + HPT_SP_SIZE; in moea64_pte_insert_sp_locked()
977 for (; pvo != NULL && PVO_VADDR(pvo) < eva; in moea64_pte_insert_sp_locked()
978 pvo = RB_NEXT(pvo_tree, &pvo->pvo_pmap->pmap_pvo, pvo)) { in moea64_pte_insert_sp_locked()
979 moea64_pte_from_pvo(pvo, &insertpt); in moea64_pte_insert_sp_locked()
980 pvo->pvo_pte.slot &= ~7ULL; /* Base slot address */ in moea64_pte_insert_sp_locked()
982 ret = moea64_pte_insert_locked(pvo, &insertpt, LPTE_VALID); in moea64_pte_insert_sp_locked()
990 ret = moea64_pte_insert_locked(pvo, &insertpt, in moea64_pte_insert_sp_locked()
1003 moea64_pte_insert_sp_native(struct pvo_entry *pvo) in moea64_pte_insert_sp_native() argument
1005 PMAP_LOCK_ASSERT(pvo->pvo_pmap, MA_OWNED); in moea64_pte_insert_sp_native()
1006 KASSERT((PVO_VADDR(pvo) & HPT_SP_MASK) == 0, in moea64_pte_insert_sp_native()
1007 ("%s: va %#jx unaligned", __func__, (uintmax_t)PVO_VADDR(pvo))); in moea64_pte_insert_sp_native()
1010 moea64_pte_insert_sp_locked(pvo); in moea64_pte_insert_sp_native()
1017 moea64_pte_replace_sp_native(struct pvo_entry *pvo) in moea64_pte_replace_sp_native() argument
1021 PMAP_LOCK_ASSERT(pvo->pvo_pmap, MA_OWNED); in moea64_pte_replace_sp_native()
1022 KASSERT((PVO_VADDR(pvo) & HPT_SP_MASK) == 0, in moea64_pte_replace_sp_native()
1023 ("%s: va %#jx unaligned", __func__, (uintmax_t)PVO_VADDR(pvo))); in moea64_pte_replace_sp_native()
1026 refchg = moea64_pte_unset_sp_locked(pvo); in moea64_pte_replace_sp_native()
1027 moea64_pte_insert_sp_locked(pvo); in moea64_pte_replace_sp_native()