| /linux/drivers/dma-buf/ |
| H A D | st-dma-resv.c | 40 struct dma_resv resv; in sanitycheck() local 53 dma_resv_init(&resv); in sanitycheck() 54 r = dma_resv_lock(&resv, NULL); in sanitycheck() 58 dma_resv_unlock(&resv); in sanitycheck() 59 dma_resv_fini(&resv); in sanitycheck() 66 struct dma_resv resv; in test_signaling() local 76 dma_resv_init(&resv); in test_signaling() 77 r = dma_resv_lock(&resv, NULL); in test_signaling() 83 r = dma_resv_reserve_fences(&resv, 1); in test_signaling() 89 dma_resv_add_fence(&resv, f, usage); in test_signaling() [all …]
|
| H A D | dma-buf.c | 185 if (dmabuf->resv == (struct dma_resv *)&dmabuf[1]) in dma_buf_release() 186 dma_resv_fini(dmabuf->resv); in dma_buf_release() 318 static bool dma_buf_poll_add_cb(struct dma_resv *resv, bool write, in dma_buf_poll_add_cb() argument 325 dma_resv_for_each_fence(&cursor, resv, dma_resv_usage_rw(write), in dma_buf_poll_add_cb() 340 struct dma_resv *resv; in dma_buf_poll() local 344 if (!dmabuf || !dmabuf->resv) in dma_buf_poll() 347 resv = dmabuf->resv; in dma_buf_poll() 355 dma_resv_lock(resv, NULL); in dma_buf_poll() 372 if (!dma_buf_poll_add_cb(resv, true, dcb)) in dma_buf_poll() 395 if (!dma_buf_poll_add_cb(resv, false, dcb)) in dma_buf_poll() [all …]
|
| /linux/fs/xfs/scrub/ |
| H A D | newbt.c | 100 enum xfs_ag_resv_type resv) in xrep_newbt_init_ag() argument 106 xnr->resv = resv; in xrep_newbt_init_ag() 195 struct xrep_newbt_resv *resv; in xrep_newbt_add_blocks() local 198 resv = kmalloc_obj(struct xrep_newbt_resv, XCHK_GFP_FLAGS); in xrep_newbt_add_blocks() 199 if (!resv) in xrep_newbt_add_blocks() 202 INIT_LIST_HEAD(&resv->list); in xrep_newbt_add_blocks() 203 resv->agbno = XFS_FSB_TO_AGBNO(mp, args->fsbno); in xrep_newbt_add_blocks() 204 resv->len = args->len; in xrep_newbt_add_blocks() 205 resv->used = 0; in xrep_newbt_add_blocks() 206 resv->pag = xfs_perag_hold(pag); in xrep_newbt_add_blocks() [all …]
|
| H A D | alloc_repair.c | 533 struct xrep_newbt_resv *resv) in xrep_abt_dispose_one() argument 537 xfs_agblock_t free_agbno = resv->agbno + resv->used; in xrep_abt_dispose_one() 538 xfs_extlen_t free_aglen = resv->len - resv->used; in xrep_abt_dispose_one() 541 ASSERT(pag == resv->pag); in xrep_abt_dispose_one() 544 if (resv->used > 0) in xrep_abt_dispose_one() 546 xfs_agbno_to_fsb(pag, resv->agbno), resv->used, in xrep_abt_dispose_one() 557 trace_xrep_newbt_free_blocks(resv->pag, free_agbno, free_aglen, in xrep_abt_dispose_one() 560 error = __xfs_free_extent(sc->tp, resv->pag, free_agbno, free_aglen, in xrep_abt_dispose_one() 579 struct xrep_newbt_resv *resv, *n; in xrep_abt_dispose_reservations() local 584 list_for_each_entry_safe(resv, n, &ra->new_bnobt.resv_list, list) { in xrep_abt_dispose_reservations() [all …]
|
| /linux/drivers/gpu/drm/ttm/tests/ |
| H A D | ttm_bo_test.c | 70 dma_resv_unlock(bo->base.resv); in ttm_bo_reserve_optimistic_no_ticket() 83 dma_resv_lock(bo->base.resv, NULL); in ttm_bo_reserve_locked_no_sleep() 86 dma_resv_unlock(bo->base.resv); in ttm_bo_reserve_locked_no_sleep() 126 dma_resv_unlock(bo->base.resv); in ttm_bo_reserve_double_resv() 151 ww_mutex_base_lock(&bo2->base.resv->lock.base); in ttm_bo_reserve_deadlock() 154 lock_release(&bo2->base.resv->lock.base.dep_map, 1); in ttm_bo_reserve_deadlock() 156 bo2->base.resv->lock.ctx = &ctx2; in ttm_bo_reserve_deadlock() 166 dma_resv_unlock(bo1->base.resv); in ttm_bo_reserve_deadlock() 225 dma_resv_lock(bo->base.resv, NULL); in ttm_bo_reserve_interrupted() 231 dma_resv_unlock(bo->base.resv); in ttm_bo_reserve_interrupted() [all …]
|
| H A D | ttm_tt_test.c | 161 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_basic() 163 dma_resv_unlock(bo->base.resv); in ttm_tt_create_basic() 180 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_invalid_bo_type() 182 dma_resv_unlock(bo->base.resv); in ttm_tt_create_invalid_bo_type() 204 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_ttm_exists() 206 dma_resv_unlock(bo->base.resv); in ttm_tt_create_ttm_exists() 234 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_create_failed() 236 dma_resv_unlock(bo->base.resv); in ttm_tt_create_failed() 249 dma_resv_lock(bo->base.resv, NULL); in ttm_tt_destroy_basic() 251 dma_resv_unlock(bo->base.resv); in ttm_tt_destroy_basic()
|
| /linux/drivers/gpu/drm/ |
| H A D | drm_gem_shmem_helper.c | 175 dma_resv_lock(shmem->base.resv, NULL); in drm_gem_shmem_release() 191 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_release() 217 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_get_pages_locked() 256 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_put_pages_locked() 276 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_pin_locked() 293 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_unpin_locked() 320 ret = dma_resv_lock_interruptible(shmem->base.resv, NULL); in drm_gem_shmem_pin() 324 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_pin() 346 dma_resv_lock(shmem->base.resv, NULL); in drm_gem_shmem_unpin() 348 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_unpin() [all …]
|
| H A D | drm_gem.c | 232 if (!obj->resv) in drm_gem_private_object_init() 233 obj->resv = &obj->_resv; in drm_gem_private_object_init() 899 ret = dma_resv_wait_timeout(obj->resv, dma_resv_usage_rw(wait_all), in drm_gem_dma_resv_wait() 1382 dma_resv_assert_held(obj->resv); in drm_gem_vmap_locked() 1399 dma_resv_assert_held(obj->resv); in drm_gem_vunmap_locked() 1414 dma_resv_lock(obj->resv, NULL); in drm_gem_lock() 1420 dma_resv_unlock(obj->resv); in drm_gem_unlock() 1428 dma_resv_lock(obj->resv, NULL); in drm_gem_vmap() 1430 dma_resv_unlock(obj->resv); in drm_gem_vmap() 1438 dma_resv_lock(obj->resv, NULL); in drm_gem_vunmap() [all …]
|
| /linux/drivers/gpu/drm/ttm/ |
| H A D | ttm_bo.c | 81 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_to_lru_tail() 105 dma_resv_assert_held(bo->base.resv); in ttm_bo_set_bulk_move() 153 ret = dma_resv_reserve_fences(bo->base.resv, 1); in ttm_bo_handle_move_mem() 195 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv() 200 r = dma_resv_copy_fences(&bo->base._resv, bo->base.resv); in ttm_bo_individualize_resv() 211 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv() 220 struct dma_resv *resv = &bo->base._resv; in ttm_bo_flush_all_fences() local 224 dma_resv_iter_begin(&cursor, resv, DMA_RESV_USAGE_BOOKKEEP); in ttm_bo_flush_all_fences() 244 dma_resv_lock(bo->base.resv, NULL); in ttm_bo_delayed_delete() 246 dma_resv_unlock(bo->base.resv); in ttm_bo_delayed_delete() [all …]
|
| H A D | ttm_bo_util.c | 254 fbo->base.base.resv = &fbo->base.base._resv; in ttm_buffer_object_transfer() 503 dma_resv_assert_held(bo->base.resv); in ttm_bo_vmap() 568 dma_resv_assert_held(bo->base.resv); in ttm_bo_vunmap() 588 ret = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_wait_free_node() 711 dma_resv_add_fence(bo->base.resv, fence, DMA_RESV_USAGE_KERNEL); in ttm_bo_move_accel_cleanup() 770 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP)) { in ttm_bo_pipeline_gutting() 804 ret = dma_resv_copy_fences(&ghost->base._resv, bo->base.resv); in ttm_bo_pipeline_gutting() 807 dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_pipeline_gutting() 828 if (dma_resv_trylock(bo->base.resv)) { in ttm_lru_walk_trylock() 833 if (bo->base.resv == ctx->resv && ctx->allow_res_evict) { in ttm_lru_walk_trylock() [all …]
|
| /linux/drivers/gpu/drm/virtio/ |
| H A D | virtgpu_prime.c | 134 exp_info.resv = obj->resv; in virtgpu_gem_prime_export() 155 dma_resv_assert_held(attach->dmabuf->resv); in virtgpu_dma_buf_import_sgt() 157 ret = dma_resv_wait_timeout(attach->dmabuf->resv, in virtgpu_dma_buf_import_sgt() 188 dma_resv_assert_held(attach->dmabuf->resv); in virtgpu_dma_buf_unmap() 210 dma_resv_lock(dmabuf->resv, NULL); in virtgpu_dma_buf_free_obj() 212 dma_resv_unlock(dmabuf->resv); in virtgpu_dma_buf_free_obj() 232 struct dma_resv *resv = attach->dmabuf->resv; in virtgpu_dma_buf_init_obj() local 243 dma_resv_lock(resv, NULL); in virtgpu_dma_buf_init_obj() 263 dma_resv_unlock(resv); in virtgpu_dma_buf_init_obj() 270 dma_resv_unlock(resv); in virtgpu_dma_buf_init_obj() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_dma_buf.c | 103 r = dma_resv_lock(bo->tbo.base.resv, NULL); in amdgpu_dma_buf_attach() 109 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_dma_buf_attach() 127 dma_resv_assert_held(dmabuf->resv); in amdgpu_dma_buf_pin() 422 struct dma_resv *resv = dma_buf->resv; in amdgpu_dma_buf_create_obj() local 429 dma_resv_lock(resv, NULL); in amdgpu_dma_buf_create_obj() 442 ttm_bo_type_sg, resv, &gobj, 0); in amdgpu_dma_buf_create_obj() 450 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj() 454 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj() 470 struct ww_acquire_ctx *ticket = dma_resv_locking_ctx(obj->resv); in amdgpu_dma_buf_move_notify() 493 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify() local [all …]
|
| H A D | amdgpu_eviction_fence.c | 201 struct dma_resv *resv = bo->tbo.base.resv; in amdgpu_eviction_fence_attach() local 204 if (!resv) in amdgpu_eviction_fence_attach() 207 ret = dma_resv_reserve_fences(resv, 1); in amdgpu_eviction_fence_attach() 216 dma_resv_add_fence(resv, &ev_fence->base, DMA_RESV_USAGE_BOOKKEEP); in amdgpu_eviction_fence_attach() 227 dma_resv_replace_fences(bo->tbo.base.resv, evf_mgr->ev_fence_ctx, in amdgpu_eviction_fence_detach()
|
| /linux/drivers/gpu/drm/vgem/ |
| H A D | vgem_fence.c | 116 struct dma_resv *resv; in vgem_fence_attach_ioctl() local 139 resv = obj->resv; in vgem_fence_attach_ioctl() 141 if (!dma_resv_test_signaled(resv, usage)) { in vgem_fence_attach_ioctl() 147 dma_resv_lock(resv, NULL); in vgem_fence_attach_ioctl() 148 ret = dma_resv_reserve_fences(resv, 1); in vgem_fence_attach_ioctl() 150 dma_resv_add_fence(resv, fence, arg->flags & VGEM_FENCE_WRITE ? in vgem_fence_attach_ioctl() 152 dma_resv_unlock(resv); in vgem_fence_attach_ioctl()
|
| /linux/tools/include/uapi/linux/ |
| H A D | io_uring.h | 493 __u32 resv[3]; member 577 __u32 resv; member 597 __u32 resv; member 603 __u32 resv; member 617 __u8 resv; member 625 __u16 resv; member 637 __u8 resv; member 645 __u16 resv; member 684 __u64 resv[3]; member 733 __u64 resv; member
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_prime.c | 49 struct dma_resv *resv = attach->dmabuf->resv; in radeon_gem_prime_import_sg_table() local 54 dma_resv_lock(resv, NULL); in radeon_gem_prime_import_sg_table() 56 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); in radeon_gem_prime_import_sg_table() 57 dma_resv_unlock(resv); in radeon_gem_prime_import_sg_table()
|
| H A D | radeon_benchmark.c | 38 struct dma_resv *resv) in radeon_benchmark_do_move() argument 51 resv); in radeon_benchmark_do_move() 56 resv); in radeon_benchmark_do_move() 125 dobj->tbo.base.resv); in radeon_benchmark_move() 136 dobj->tbo.base.resv); in radeon_benchmark_move()
|
| /linux/include/uapi/linux/ |
| H A D | io_uring.h | 615 __u32 resv[3]; member 735 __u32 resv; member 780 __u32 resv; member 786 __u32 resv; member 800 __u8 resv; member 808 __u16 resv; member 820 __u8 resv; member 827 __u32 resv[3]; member 854 __u16 resv; member 901 __u64 resv[3]; member [all …]
|
| /linux/kernel/irq/ |
| H A D | affinity.c | 112 unsigned int resv = affd->pre_vectors + affd->post_vectors; in irq_calc_affinity_vectors() local 115 if (resv > minvec) in irq_calc_affinity_vectors() 119 set_vecs = maxvec - resv; in irq_calc_affinity_vectors() 126 return resv + min(set_vecs, maxvec - resv); in irq_calc_affinity_vectors()
|
| /linux/drivers/infiniband/core/ |
| H A D | umem_dmabuf.c | 24 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages() 75 ret = dma_resv_wait_timeout(umem_dmabuf->attach->dmabuf->resv, in ib_umem_dmabuf_map_pages() 88 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_unmap_pages() 213 dma_resv_lock(umem_dmabuf->attach->dmabuf->resv, NULL); in ib_umem_dmabuf_get_pinned_with_dma_device() 222 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned_with_dma_device() 227 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned_with_dma_device() 247 dma_resv_lock(dmabuf->resv, NULL); in ib_umem_dmabuf_revoke() 257 dma_resv_unlock(dmabuf->resv); in ib_umem_dmabuf_revoke()
|
| H A D | uverbs_std_types_dmabuf.c | 31 dma_resv_assert_held(priv->dmabuf->resv); in uverbs_dmabuf_map() 52 dma_resv_assert_held(priv->dmabuf->resv); in uverbs_dmabuf_unmap() 168 dma_resv_lock(uverbs_dmabuf->dmabuf->resv, NULL); in uverbs_dmabuf_fd_destroy_uobj() 173 dma_resv_wait_timeout(uverbs_dmabuf->dmabuf->resv, in uverbs_dmabuf_fd_destroy_uobj() 178 dma_resv_unlock(uverbs_dmabuf->dmabuf->resv); in uverbs_dmabuf_fd_destroy_uobj()
|
| /linux/mm/ |
| H A D | hugetlb.c | 460 get_file_region_entry_from_cache(struct resv_map *resv, long from, long to) in get_file_region_entry_from_cache() argument 464 VM_BUG_ON(resv->region_cache_count <= 0); in get_file_region_entry_from_cache() 466 resv->region_cache_count--; in get_file_region_entry_from_cache() 467 nrg = list_first_entry(&resv->region_cache, struct file_region, link); in get_file_region_entry_from_cache() 490 struct resv_map *resv, in record_hugetlb_cgroup_uncharge_info() argument 509 if (!resv->pages_per_hpage) in record_hugetlb_cgroup_uncharge_info() 510 resv->pages_per_hpage = pages_per_huge_page(h); in record_hugetlb_cgroup_uncharge_info() 514 VM_BUG_ON(resv->pages_per_hpage != pages_per_huge_page(h)); in record_hugetlb_cgroup_uncharge_info() 542 static void coalesce_file_region(struct resv_map *resv, struct file_region *rg) in coalesce_file_region() argument 547 if (&prg->link != &resv->regions && prg->to == rg->from && in coalesce_file_region() [all …]
|
| /linux/drivers/vfio/pci/ |
| H A D | vfio_pci_dmabuf.c | 53 dma_resv_assert_held(priv->dmabuf->resv); in vfio_pci_dma_buf_map() 113 dma_resv_assert_held(attachment->dmabuf->resv); in vfio_pci_dma_buf_iommufd_map() 292 dma_resv_lock(priv->dmabuf->resv, NULL); in vfio_pci_core_feature_dma_buf() 295 dma_resv_unlock(priv->dmabuf->resv); in vfio_pci_core_feature_dma_buf() 331 dma_resv_lock(priv->dmabuf->resv, NULL); in vfio_pci_dma_buf_move() 334 dma_resv_unlock(priv->dmabuf->resv); in vfio_pci_dma_buf_move() 350 dma_resv_lock(priv->dmabuf->resv, NULL); in vfio_pci_dma_buf_cleanup() 355 dma_resv_unlock(priv->dmabuf->resv); in vfio_pci_dma_buf_cleanup()
|
| /linux/include/linux/sunrpc/ |
| H A D | svc.h | 525 struct kvec *resv = buf->head; in svcxdr_init_encode() 530 xdr->iov = resv; in svcxdr_init_encode() 531 xdr->p = resv->iov_base + resv->iov_len; 532 xdr->end = resv->iov_base + PAGE_SIZE; 533 buf->len = resv->iov_len; 572 struct kvec *resv = buf->head; in svcxdr_set_auth_slack() 579 WARN_ON(xdr->iov != resv); 518 struct kvec *resv = buf->head; svcxdr_init_encode() local 565 struct kvec *resv = buf->head; svcxdr_set_auth_slack() local
|
| /linux/Documentation/gpu/ |
| H A D | drm-vm-bind-locking.rst | 96 * The ``gpu_vm->resv`` lock. Protects the gpu_vm's list of gpu_vmas needing 166 dma_resv_lock(gpu_vm->resv); 186 add_dependencies(&gpu_job, &gpu_vm->resv); 189 add_dma_fence(job_dma_fence, &gpu_vm->resv); 190 dma_resv_unlock(gpu_vm->resv); 206 dma_resv_lock(obj->resv); 210 add_dependencies(&eviction_job, &obj->resv); 212 add_dma_fence(&obj->resv, job_dma_fence); 214 dma_resv_unlock(&obj->resv); 218 dma_resv lock such that ``obj->resv == gpu_vm->resv``. [all …]
|