| /linux/drivers/dma-buf/ |
| H A D | st-dma-resv.c | 40 struct dma_resv resv; in sanitycheck() local 53 dma_resv_init(&resv); in sanitycheck() 54 r = dma_resv_lock(&resv, NULL); in sanitycheck() 58 dma_resv_unlock(&resv); in sanitycheck() 59 dma_resv_fini(&resv); in sanitycheck() 66 struct dma_resv resv; in test_signaling() local 76 dma_resv_init(&resv); in test_signaling() 77 r = dma_resv_lock(&resv, NULL); in test_signaling() 83 r = dma_resv_reserve_fences(&resv, 1); in test_signaling() 89 dma_resv_add_fence(&resv, f, usage); in test_signaling() [all …]
|
| /linux/fs/xfs/scrub/ |
| H A D | newbt.c | 100 enum xfs_ag_resv_type resv) in xrep_newbt_init_ag() argument 106 xnr->resv = resv; in xrep_newbt_init_ag() 195 struct xrep_newbt_resv *resv; in xrep_newbt_add_blocks() local 198 resv = kmalloc(sizeof(struct xrep_newbt_resv), XCHK_GFP_FLAGS); in xrep_newbt_add_blocks() 199 if (!resv) in xrep_newbt_add_blocks() 202 INIT_LIST_HEAD(&resv->list); in xrep_newbt_add_blocks() 203 resv->agbno = XFS_FSB_TO_AGBNO(mp, args->fsbno); in xrep_newbt_add_blocks() 204 resv->len = args->len; in xrep_newbt_add_blocks() 205 resv->used = 0; in xrep_newbt_add_blocks() 206 resv->pag = xfs_perag_hold(pag); in xrep_newbt_add_blocks() [all …]
|
| H A D | alloc_repair.c | 533 struct xrep_newbt_resv *resv) in xrep_abt_dispose_one() argument 537 xfs_agblock_t free_agbno = resv->agbno + resv->used; in xrep_abt_dispose_one() 538 xfs_extlen_t free_aglen = resv->len - resv->used; in xrep_abt_dispose_one() 541 ASSERT(pag == resv->pag); in xrep_abt_dispose_one() 544 if (resv->used > 0) in xrep_abt_dispose_one() 546 xfs_agbno_to_fsb(pag, resv->agbno), resv->used, in xrep_abt_dispose_one() 557 trace_xrep_newbt_free_blocks(resv->pag, free_agbno, free_aglen, in xrep_abt_dispose_one() 560 error = __xfs_free_extent(sc->tp, resv->pag, free_agbno, free_aglen, in xrep_abt_dispose_one() 579 struct xrep_newbt_resv *resv, *n; in xrep_abt_dispose_reservations() local 584 list_for_each_entry_safe(resv, n, &ra->new_bnobt.resv_list, list) { in xrep_abt_dispose_reservations() [all …]
|
| /linux/fs/xfs/libxfs/ |
| H A D | xfs_ag_resv.c | 135 struct xfs_ag_resv *resv; in __xfs_ag_resv_free() local 140 resv = xfs_perag_resv(pag, type); in __xfs_ag_resv_free() 142 pag_mount(pag)->m_ag_max_usable += resv->ar_asked; in __xfs_ag_resv_free() 149 oldresv = resv->ar_orig_reserved; in __xfs_ag_resv_free() 151 oldresv = resv->ar_reserved; in __xfs_ag_resv_free() 153 resv->ar_reserved = 0; in __xfs_ag_resv_free() 154 resv->ar_asked = 0; in __xfs_ag_resv_free() 155 resv->ar_orig_reserved = 0; in __xfs_ag_resv_free() 175 struct xfs_ag_resv *resv; in __xfs_ag_resv_init() local 226 resv = xfs_perag_resv(pag, type); in __xfs_ag_resv_init() [all …]
|
| /linux/drivers/gpu/drm/ttm/ |
| H A D | ttm_bo.c | 81 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_to_lru_tail() 105 dma_resv_assert_held(bo->base.resv); in ttm_bo_set_bulk_move() 153 ret = dma_resv_reserve_fences(bo->base.resv, 1); in ttm_bo_handle_move_mem() 195 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv() 200 r = dma_resv_copy_fences(&bo->base._resv, bo->base.resv); in ttm_bo_individualize_resv() 211 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv() 220 struct dma_resv *resv = &bo->base._resv; in ttm_bo_flush_all_fences() local 224 dma_resv_iter_begin(&cursor, resv, DMA_RESV_USAGE_BOOKKEEP); in ttm_bo_flush_all_fences() 244 dma_resv_lock(bo->base.resv, NULL); in ttm_bo_delayed_delete() 246 dma_resv_unlock(bo->base.resv); in ttm_bo_delayed_delete() [all …]
|
| H A D | ttm_bo_vm.c | 51 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_KERNEL)) in ttm_bo_vm_fault_idle() 65 (void)dma_resv_wait_timeout(bo->base.resv, in ttm_bo_vm_fault_idle() 68 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle() 76 err = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_KERNEL, true, in ttm_bo_vm_fault_idle() 127 if (unlikely(!dma_resv_trylock(bo->base.resv))) { in ttm_bo_vm_reserve() 137 if (!dma_resv_lock_interruptible(bo->base.resv, in ttm_bo_vm_reserve() 139 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_reserve() 146 if (dma_resv_lock_interruptible(bo->base.resv, NULL)) in ttm_bo_vm_reserve() 156 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_reserve() 345 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault()
|
| H A D | ttm_bo_util.c | 254 fbo->base.base.resv = &fbo->base.base._resv; in ttm_buffer_object_transfer() 505 dma_resv_assert_held(bo->base.resv); in ttm_bo_vmap() 573 dma_resv_assert_held(bo->base.resv); in ttm_bo_vunmap() 593 ret = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_wait_free_node() 717 dma_resv_add_fence(bo->base.resv, fence, DMA_RESV_USAGE_KERNEL); in ttm_bo_move_accel_cleanup() 776 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP)) { in ttm_bo_pipeline_gutting() 810 ret = dma_resv_copy_fences(&ghost->base._resv, bo->base.resv); in ttm_bo_pipeline_gutting() 813 dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_pipeline_gutting() 834 if (dma_resv_trylock(bo->base.resv)) { in ttm_lru_walk_trylock() 839 if (bo->base.resv == ctx->resv && ctx->allow_res_evict) { in ttm_lru_walk_trylock() [all …]
|
| /linux/drivers/gpu/drm/ |
| H A D | drm_gem_shmem_helper.c | 193 dma_resv_lock(shmem->base.resv, NULL); in drm_gem_shmem_release() 209 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_release() 235 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_get_pages_locked() 274 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_put_pages_locked() 294 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_pin_locked() 311 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_unpin_locked() 338 ret = dma_resv_lock_interruptible(shmem->base.resv, NULL); in drm_gem_shmem_pin() 342 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_pin() 364 dma_resv_lock(shmem->base.resv, NULL); in drm_gem_shmem_unpin() 366 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_unpin() [all...] |
| /linux/drivers/gpu/drm/virtio/ |
| H A D | virtgpu_prime.c | 134 exp_info.resv = obj->resv; in virtgpu_gem_prime_export() 155 dma_resv_assert_held(attach->dmabuf->resv); in virtgpu_dma_buf_import_sgt() 157 ret = dma_resv_wait_timeout(attach->dmabuf->resv, in virtgpu_dma_buf_import_sgt() 190 dma_resv_assert_held(attach->dmabuf->resv); in virtgpu_dma_buf_unmap() 212 dma_resv_lock(dmabuf->resv, NULL); in virtgpu_dma_buf_free_obj() 214 dma_resv_unlock(dmabuf->resv); in virtgpu_dma_buf_free_obj() 234 struct dma_resv *resv = attach->dmabuf->resv; in virtgpu_dma_buf_init_obj() local 245 dma_resv_lock(resv, NULL); in virtgpu_dma_buf_init_obj() 265 dma_resv_unlock(resv); in virtgpu_dma_buf_init_obj() 272 dma_resv_unlock(resv); in virtgpu_dma_buf_init_obj() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_dma_buf.c | 114 r = dma_resv_lock(bo->tbo.base.resv, NULL); in amdgpu_dma_buf_attach() 120 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_dma_buf_attach() 138 dma_resv_assert_held(dmabuf->resv); in amdgpu_dma_buf_pin() 416 struct dma_resv *resv = dma_buf->resv; in amdgpu_dma_buf_create_obj() local 423 dma_resv_lock(resv, NULL); in amdgpu_dma_buf_create_obj() 436 ttm_bo_type_sg, resv, &gobj, 0); in amdgpu_dma_buf_create_obj() 444 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj() 448 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj() 464 struct ww_acquire_ctx *ticket = dma_resv_locking_ctx(obj->resv); in amdgpu_dma_buf_move_notify() 487 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify() local [all …]
|
| H A D | amdgpu_eviction_fence.c | 201 struct dma_resv *resv = bo->tbo.base.resv; in amdgpu_eviction_fence_attach() local 204 if (!resv) in amdgpu_eviction_fence_attach() 207 ret = dma_resv_reserve_fences(resv, 1); in amdgpu_eviction_fence_attach() 216 dma_resv_add_fence(resv, &ev_fence->base, DMA_RESV_USAGE_BOOKKEEP); in amdgpu_eviction_fence_attach() 227 dma_resv_replace_fences(bo->tbo.base.resv, evf_mgr->ev_fence_ctx, in amdgpu_eviction_fence_detach()
|
| /linux/drivers/gpu/drm/ttm/tests/ |
| H A D | ttm_bo_validate_test.c | 66 struct dma_resv *resv, in dma_resv_kunit_active_fence_init() argument 74 dma_resv_lock(resv, NULL); in dma_resv_kunit_active_fence_init() 75 dma_resv_reserve_fences(resv, 1); in dma_resv_kunit_active_fence_init() 76 dma_resv_add_fence(resv, fence, usage); in dma_resv_kunit_active_fence_init() 77 dma_resv_unlock(resv); in dma_resv_kunit_active_fence_init() 127 dma_resv_unlock(bo->base.resv); in ttm_bo_init_reserved_sys_man() 139 KUNIT_EXPECT_NOT_NULL(test, (void *)bo->base.resv->fences); in ttm_bo_init_reserved_sys_man() 176 dma_resv_unlock(bo->base.resv); in ttm_bo_init_reserved_mock_man() 202 struct dma_resv resv; in ttm_bo_init_reserved_resv() local 212 dma_resv_init(&resv); in ttm_bo_init_reserved_resv() [all …]
|
| H A D | ttm_tt_test.c | 161 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_basic() 163 dma_resv_unlock(bo->base.resv); in ttm_tt_create_basic() 180 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_invalid_bo_type() 182 dma_resv_unlock(bo->base.resv); in ttm_tt_create_invalid_bo_type() 204 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_ttm_exists() 206 dma_resv_unlock(bo->base.resv); in ttm_tt_create_ttm_exists() 234 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_failed() 236 dma_resv_unlock(bo->base.resv); in ttm_tt_create_failed() 249 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_destroy_basic() 251 dma_resv_unlock(bo->base.resv); in ttm_tt_destroy_basic()
|
| /linux/tools/include/uapi/linux/ |
| H A D | io_uring.h | 493 __u32 resv[3]; member 577 __u32 resv; member 597 __u32 resv; member 603 __u32 resv; member 617 __u8 resv; member 625 __u16 resv; member 637 __u8 resv; member 645 __u16 resv; member 684 __u64 resv[3]; member 733 __u64 resv; member
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_prime.c | 49 struct dma_resv *resv = attach->dmabuf->resv; in radeon_gem_prime_import_sg_table() local 54 dma_resv_lock(resv, NULL); in radeon_gem_prime_import_sg_table() 56 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); in radeon_gem_prime_import_sg_table() 57 dma_resv_unlock(resv); in radeon_gem_prime_import_sg_table()
|
| H A D | radeon_benchmark.c | 38 struct dma_resv *resv) in radeon_benchmark_do_move() argument 51 resv); in radeon_benchmark_do_move() 56 resv); in radeon_benchmark_do_move() 125 dobj->tbo.base.resv); in radeon_benchmark_move() 136 dobj->tbo.base.resv); in radeon_benchmark_move()
|
| /linux/kernel/irq/ |
| H A D | affinity.c | 112 unsigned int resv = affd->pre_vectors + affd->post_vectors; in irq_calc_affinity_vectors() local 115 if (resv > minvec) in irq_calc_affinity_vectors() 119 set_vecs = maxvec - resv; in irq_calc_affinity_vectors() 126 return resv + min(set_vecs, maxvec - resv); in irq_calc_affinity_vectors()
|
| /linux/drivers/infiniband/core/ |
| H A D | umem_dmabuf.c | 24 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages() 75 ret = dma_resv_wait_timeout(umem_dmabuf->attach->dmabuf->resv, in ib_umem_dmabuf_map_pages() 88 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_unmap_pages() 216 dma_resv_lock(umem_dmabuf->attach->dmabuf->resv, NULL); in ib_umem_dmabuf_get_pinned_with_dma_device() 225 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned_with_dma_device() 232 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned_with_dma_device() 252 dma_resv_lock(dmabuf->resv, NULL); in ib_umem_dmabuf_revoke() 262 dma_resv_unlock(dmabuf->resv); in ib_umem_dmabuf_revoke()
|
| /linux/include/uapi/linux/ |
| H A D | io_uring.h | 602 __u32 resv[3]; member 719 __u32 resv; member 764 __u32 resv; member 770 __u32 resv; member 784 __u8 resv; member 792 __u16 resv; member 804 __u8 resv; member 831 __u16 resv; member 878 __u64 resv[3]; member 885 __u32 resv[8]; member [all …]
|
| /linux/mm/ |
| H A D | hugetlb.c | 470 get_file_region_entry_from_cache(struct resv_map *resv, long from, long to) in get_file_region_entry_from_cache() argument 474 VM_BUG_ON(resv->region_cache_count <= 0); in get_file_region_entry_from_cache() 476 resv->region_cache_count--; in get_file_region_entry_from_cache() 477 nrg = list_first_entry(&resv->region_cache, struct file_region, link); in get_file_region_entry_from_cache() 500 struct resv_map *resv, in record_hugetlb_cgroup_uncharge_info() argument 519 if (!resv->pages_per_hpage) in record_hugetlb_cgroup_uncharge_info() 520 resv->pages_per_hpage = pages_per_huge_page(h); in record_hugetlb_cgroup_uncharge_info() 524 VM_BUG_ON(resv->pages_per_hpage != pages_per_huge_page(h)); in record_hugetlb_cgroup_uncharge_info() 552 static void coalesce_file_region(struct resv_map *resv, struct file_region *rg) in coalesce_file_region() argument 557 if (&prg->link != &resv->regions && prg->to == rg->from && in coalesce_file_region() [all …]
|
| /linux/drivers/vfio/pci/ |
| H A D | vfio_pci_dmabuf.c | 43 dma_resv_assert_held(priv->dmabuf->resv); in vfio_pci_dma_buf_map() 101 dma_resv_assert_held(attachment->dmabuf->resv); in vfio_pci_dma_buf_iommufd_map() 281 dma_resv_lock(priv->dmabuf->resv, NULL); in vfio_pci_core_feature_dma_buf() 284 dma_resv_unlock(priv->dmabuf->resv); in vfio_pci_core_feature_dma_buf() 321 dma_resv_lock(priv->dmabuf->resv, NULL); in vfio_pci_dma_buf_move() 324 dma_resv_unlock(priv->dmabuf->resv); in vfio_pci_dma_buf_move() 340 dma_resv_lock(priv->dmabuf->resv, NULL); in vfio_pci_dma_buf_cleanup() 345 dma_resv_unlock(priv->dmabuf->resv); in vfio_pci_dma_buf_cleanup()
|
| /linux/include/linux/sunrpc/ |
| H A D | svc.h | 518 struct kvec *resv = buf->head; in svcxdr_init_encode() local 523 xdr->iov = resv; in svcxdr_init_encode() 524 xdr->p = resv->iov_base + resv->iov_len; in svcxdr_init_encode() 525 xdr->end = resv->iov_base + PAGE_SIZE; in svcxdr_init_encode() 526 buf->len = resv->iov_len; in svcxdr_init_encode() 565 struct kvec *resv = buf->head; in svcxdr_set_auth_slack() local 572 WARN_ON(xdr->iov != resv); in svcxdr_set_auth_slack()
|
| /linux/drivers/net/ethernet/netronome/nfp/crypto/ |
| H A D | fw.h | 17 u8 resv[2]; member 37 u8 resv[3]; member 86 u8 resv[3]; member
|
| /linux/Documentation/gpu/ |
| H A D | drm-vm-bind-locking.rst | 96 * The ``gpu_vm->resv`` lock. Protects the gpu_vm's list of gpu_vmas needing 166 dma_resv_lock(gpu_vm->resv); 186 add_dependencies(&gpu_job, &gpu_vm->resv); 189 add_dma_fence(job_dma_fence, &gpu_vm->resv); 190 dma_resv_unlock(gpu_vm->resv); 206 dma_resv_lock(obj->resv); 210 add_dependencies(&eviction_job, &obj->resv); 212 add_dma_fence(&obj->resv, job_dma_fence); 214 dma_resv_unlock(&obj->resv); 218 dma_resv lock such that ``obj->resv == gpu_vm->resv``. [all …]
|
| /linux/include/linux/ |
| H A D | hugetlb_cgroup.h | 146 extern void hugetlb_cgroup_uncharge_counter(struct resv_map *resv, 150 extern void hugetlb_cgroup_uncharge_file_region(struct resv_map *resv, 160 static inline void hugetlb_cgroup_uncharge_file_region(struct resv_map *resv, in hugetlb_cgroup_uncharge_file_region() argument 255 static inline void hugetlb_cgroup_uncharge_counter(struct resv_map *resv, in hugetlb_cgroup_uncharge_counter() argument
|