Lines Matching full:l3
76 pt_entry_t *l3 __diagused; in vmmpmap_release_l3()
80 l3 = (pd_entry_t *)PHYS_TO_DMAP(l2e & ~ATTR_MASK); in vmmpmap_release_l3()
82 KASSERT(l3[i] == 0, ("%s: l3 still mapped: %p %lx", __func__, in vmmpmap_release_l3()
83 &l3[i], l3[i])); in vmmpmap_release_l3()
242 pt_entry_t new_l2e, l2e, *l2, *l3; in vmmpmap_l3_table() local
278 l3 = (pt_entry_t *)PHYS_TO_DMAP(l2e & ~ATTR_MASK); in vmmpmap_l3_table()
279 return (l3); in vmmpmap_l3_table()
288 pd_entry_t l3e, *l3; in vmmpmap_enter() local
312 l3 = vmmpmap_l3_table(va); in vmmpmap_enter()
313 if (l3 == NULL) in vmmpmap_enter()
318 * Ensure no other threads can write to l3 between the KASSERT in vmmpmap_enter()
323 KASSERT(atomic_load_64(&l3[pmap_l3_index(va)]) == 0, in vmmpmap_enter()
326 atomic_store_64(&l3[pmap_l3_index(va)], l3e | pa); in vmmpmap_enter()
343 pd_entry_t *l3, l3e, **l3_list; in vmmpmap_remove() local
390 l3 = (pd_entry_t *)PHYS_TO_DMAP(l2e & ~ATTR_MASK); in vmmpmap_remove()
392 l3e = atomic_load_64(&l3[pmap_l3_index(va)]); in vmmpmap_remove()
400 atomic_store_64(&l3[pmap_l3_index(va)], l3e); in vmmpmap_remove()
402 l3_list[i] = &l3[pmap_l3_index(va)]; in vmmpmap_remove()
409 atomic_store_64(&l3[pmap_l3_index(va)], 0); in vmmpmap_remove()