Lines Matching refs:xa

49 __xa_erase(struct xarray *xa, uint32_t index)  in __xa_erase()  argument
53 XA_ASSERT_LOCKED(xa); in __xa_erase()
55 retval = radix_tree_delete(&xa->xa_head, index); in __xa_erase()
63 xa_erase(struct xarray *xa, uint32_t index) in xa_erase() argument
67 xa_lock(xa); in xa_erase()
68 retval = __xa_erase(xa, index); in xa_erase()
69 xa_unlock(xa); in xa_erase()
79 xa_load(struct xarray *xa, uint32_t index) in xa_load() argument
83 xa_lock(xa); in xa_load()
84 retval = radix_tree_lookup(&xa->xa_head, index); in xa_load()
85 xa_unlock(xa); in xa_load()
98 xa_vm_wait_locked(struct xarray *xa) in xa_vm_wait_locked() argument
100 xa_unlock(xa); in xa_vm_wait_locked()
102 xa_lock(xa); in xa_vm_wait_locked()
118 __xa_alloc(struct xarray *xa, uint32_t *pindex, void *ptr, uint32_t mask, gfp_t gfp) in __xa_alloc() argument
122 XA_ASSERT_LOCKED(xa); in __xa_alloc()
125 MPASS(mask > ((xa->xa_flags & XA_FLAGS_ALLOC1) != 0 ? 1 : 0)); in __xa_alloc()
130 *pindex = (xa->xa_flags & XA_FLAGS_ALLOC1) != 0 ? 1 : 0; in __xa_alloc()
134 retval = radix_tree_insert(&xa->xa_head, *pindex, ptr); in __xa_alloc()
146 xa_vm_wait_locked(xa); in __xa_alloc()
157 xa_alloc(struct xarray *xa, uint32_t *pindex, void *ptr, uint32_t mask, gfp_t gfp) in xa_alloc() argument
164 xa_lock(xa); in xa_alloc()
165 retval = __xa_alloc(xa, pindex, ptr, mask, gfp); in xa_alloc()
166 xa_unlock(xa); in xa_alloc()
178 __xa_alloc_cyclic(struct xarray *xa, uint32_t *pindex, void *ptr, uint32_t mask, in __xa_alloc_cyclic() argument
184 XA_ASSERT_LOCKED(xa); in __xa_alloc_cyclic()
187 MPASS(mask > ((xa->xa_flags & XA_FLAGS_ALLOC1) != 0 ? 1 : 0)); in __xa_alloc_cyclic()
192 *pnext_index = (xa->xa_flags & XA_FLAGS_ALLOC1) != 0 ? 1 : 0; in __xa_alloc_cyclic()
196 retval = radix_tree_insert(&xa->xa_head, *pnext_index, ptr); in __xa_alloc_cyclic()
206 if (*pnext_index == 0 && (xa->xa_flags & XA_FLAGS_ALLOC1) != 0) in __xa_alloc_cyclic()
211 xa_vm_wait_locked(xa); in __xa_alloc_cyclic()
224 xa_alloc_cyclic(struct xarray *xa, uint32_t *pindex, void *ptr, uint32_t mask, in xa_alloc_cyclic() argument
229 xa_lock(xa); in xa_alloc_cyclic()
230 retval = __xa_alloc_cyclic(xa, pindex, ptr, mask, pnext_index, gfp); in xa_alloc_cyclic()
231 xa_unlock(xa); in xa_alloc_cyclic()
237 xa_alloc_cyclic_irq(struct xarray *xa, uint32_t *pindex, void *ptr, in xa_alloc_cyclic_irq() argument
242 xa_lock_irq(xa); in xa_alloc_cyclic_irq()
243 retval = __xa_alloc_cyclic(xa, pindex, ptr, mask, pnext_index, gfp); in xa_alloc_cyclic_irq()
244 xa_unlock_irq(xa); in xa_alloc_cyclic_irq()
257 __xa_insert(struct xarray *xa, uint32_t index, void *ptr, gfp_t gfp) in __xa_insert() argument
261 XA_ASSERT_LOCKED(xa); in __xa_insert()
265 retval = radix_tree_insert(&xa->xa_head, index, ptr); in __xa_insert()
270 xa_vm_wait_locked(xa); in __xa_insert()
281 xa_insert(struct xarray *xa, uint32_t index, void *ptr, gfp_t gfp) in xa_insert() argument
285 xa_lock(xa); in xa_insert()
286 retval = __xa_insert(xa, index, ptr, gfp); in xa_insert()
287 xa_unlock(xa); in xa_insert()
301 __xa_store(struct xarray *xa, uint32_t index, void *ptr, gfp_t gfp) in __xa_store() argument
305 XA_ASSERT_LOCKED(xa); in __xa_store()
309 retval = radix_tree_store(&xa->xa_head, index, &ptr); in __xa_store()
318 xa_vm_wait_locked(xa); in __xa_store()
331 xa_store(struct xarray *xa, uint32_t index, void *ptr, gfp_t gfp) in xa_store() argument
335 xa_lock(xa); in xa_store()
336 retval = __xa_store(xa, index, ptr, gfp); in xa_store()
337 xa_unlock(xa); in xa_store()
346 xa_init_flags(struct xarray *xa, uint32_t flags) in xa_init_flags() argument
348 memset(xa, 0, sizeof(*xa)); in xa_init_flags()
350 mtx_init(&xa->xa_lock, "lkpi-xarray", NULL, MTX_DEF | MTX_RECURSE); in xa_init_flags()
351 xa->xa_head.gfp_mask = GFP_NOWAIT; in xa_init_flags()
352 xa->xa_flags = flags; in xa_init_flags()
360 xa_destroy(struct xarray *xa) in xa_destroy() argument
365 xa_lock(xa); in xa_destroy()
366 radix_tree_for_each_slot(ppslot, &xa->xa_head, &iter, 0) in xa_destroy()
367 radix_tree_iter_delete(&xa->xa_head, &iter, ppslot); in xa_destroy()
368 xa_unlock(xa); in xa_destroy()
385 __xa_empty(struct xarray *xa) in __xa_empty() argument
390 XA_ASSERT_LOCKED(xa); in __xa_empty()
392 return (!radix_tree_iter_find(&xa->xa_head, &iter, &temp)); in __xa_empty()
396 xa_empty(struct xarray *xa) in xa_empty() argument
400 xa_lock(xa); in xa_empty()
401 retval = __xa_empty(xa); in xa_empty()
402 xa_unlock(xa); in xa_empty()
413 __xa_next(struct xarray *xa, unsigned long *pindex, bool not_first) in __xa_next() argument
420 XA_ASSERT_LOCKED(xa); in __xa_next()
429 found = radix_tree_iter_find(&xa->xa_head, &iter, &ppslot); in __xa_next()
442 xa_next(struct xarray *xa, unsigned long *pindex, bool not_first) in xa_next() argument
446 xa_lock(xa); in xa_next()
447 retval = __xa_next(xa, pindex, not_first); in xa_next()
448 xa_unlock(xa); in xa_next()