/linux/drivers/gpu/drm/xe/ |
H A D | xe_hmm.c | 126 struct xe_userptr *userptr = &uvma->userptr; in xe_hmm_userptr_free_sg() local 133 xe_assert(xe, userptr->sg); in xe_hmm_userptr_free_sg() 134 dma_unmap_sgtable(dev, userptr->sg, in xe_hmm_userptr_free_sg() 137 sg_free_table(userptr->sg); in xe_hmm_userptr_free_sg() 138 userptr->sg = NULL; in xe_hmm_userptr_free_sg() 170 struct xe_userptr *userptr; in xe_hmm_userptr_populate_range() local 181 userptr = &uvma->userptr; in xe_hmm_userptr_populate_range() 184 mmap_assert_locked(userptr->notifier.mm); in xe_hmm_userptr_populate_range() 189 notifier_seq = mmu_interval_read_begin(&userptr->notifier); in xe_hmm_userptr_populate_range() 190 if (notifier_seq == userptr->notifier_seq) in xe_hmm_userptr_populate_range() [all …]
|
H A D | xe_vm.c | 62 return mmu_interval_check_retry(&uvma->userptr.notifier, in xe_vma_userptr_check_repin() 63 uvma->userptr.notifier_seq) ? in xe_vma_userptr_check_repin() 250 down_read(&vm->userptr.notifier_lock); in xe_vm_add_compute_exec_queue() 264 up_read(&vm->userptr.notifier_lock); in xe_vm_add_compute_exec_queue() 312 lockdep_assert_held_read(&vm->userptr.notifier_lock); in __xe_vm_userptr_needs_repin() 314 return (list_empty(&vm->userptr.repin_list) && in __xe_vm_userptr_needs_repin() 315 list_empty(&vm->userptr.invalidated)) ? 0 : -EAGAIN; in __xe_vm_userptr_needs_repin() 546 down_read(&vm->userptr.notifier_lock); in preempt_rebind_work_func() 548 up_read(&vm->userptr.notifier_lock); in preempt_rebind_work_func() 562 up_read(&vm->userptr.notifier_lock); in preempt_rebind_work_func() [all …]
|
H A D | xe_pt.c | 675 xe_res_first_sg(to_userptr_vma(vma)->userptr.sg, 0, in xe_pt_stage_bind() 1183 u32 divisor = uvma->userptr.divisor ? uvma->userptr.divisor : 2; in xe_pt_userptr_inject_eagain() 1187 uvma->userptr.divisor = divisor << 1; in xe_pt_userptr_inject_eagain() 1209 lockdep_assert_held_read(&vm->userptr.notifier_lock); in vma_check_userptr() 1215 notifier_seq = uvma->userptr.notifier_seq; in vma_check_userptr() 1217 if (uvma->userptr.initial_bind && !xe_vm_in_fault_mode(vm)) in vma_check_userptr() 1220 if (!mmu_interval_read_retry(&uvma->userptr.notifier, in vma_check_userptr() 1228 spin_lock(&vm->userptr.invalidated_lock); in vma_check_userptr() 1229 list_move_tail(&uvma->userptr.invalidate_link, in vma_check_userptr() 1230 &vm->userptr.invalidated); in vma_check_userptr() [all …]
|
H A D | xe_vm_types.h | 133 struct xe_userptr userptr; member 234 } userptr; member
|
H A D | xe_exec.c | 291 err = down_read_interruptible(&vm->userptr.notifier_lock); in xe_exec_ioctl() 333 up_read(&vm->userptr.notifier_lock); in xe_exec_ioctl()
|
/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_userptr.c | 74 return mmu_interval_notifier_insert(&obj->userptr.notifier, current->mm, in i915_gem_userptr_init__mmu_notifier() 75 obj->userptr.ptr, obj->base.size, in i915_gem_userptr_init__mmu_notifier() 85 if (!--obj->userptr.page_ref) { in i915_gem_object_userptr_drop_ref() 86 pvec = obj->userptr.pvec; in i915_gem_object_userptr_drop_ref() 87 obj->userptr.pvec = NULL; in i915_gem_object_userptr_drop_ref() 89 GEM_BUG_ON(obj->userptr.page_ref < 0); in i915_gem_object_userptr_drop_ref() 115 if (!obj->userptr.page_ref) { in i915_gem_userptr_get_pages() 120 obj->userptr.page_ref++; in i915_gem_userptr_get_pages() 121 pvec = obj->userptr.pvec; in i915_gem_userptr_get_pages() 241 if (obj->userptr.notifier.mm != current->mm) in i915_gem_object_userptr_submit_init() [all …]
|
H A D | i915_gem_object_types.h | 721 } userptr; member
|
/linux/drivers/accel/habanalabs/common/ |
H A D | memory.c | 228 struct hl_userptr *userptr; in dma_map_host_va() local 231 userptr = kzalloc(sizeof(*userptr), GFP_KERNEL); in dma_map_host_va() 232 if (!userptr) { in dma_map_host_va() 237 rc = hl_pin_host_memory(hdev, addr, size, userptr); in dma_map_host_va() 241 userptr->dma_mapped = true; in dma_map_host_va() 242 userptr->dir = DMA_BIDIRECTIONAL; in dma_map_host_va() 243 userptr->vm_type = VM_TYPE_USERPTR; in dma_map_host_va() 245 *p_userptr = userptr; in dma_map_host_va() 247 rc = hl_dma_map_sgtable(hdev, userptr->sgt, DMA_BIDIRECTIONAL); in dma_map_host_va() 256 hl_unpin_host_memory(hdev, userptr); in dma_map_host_va() [all …]
|
H A D | debugfs.c | 207 struct hl_userptr *userptr; in userptr_show() local 214 list_for_each_entry(userptr, &dev_entry->userptr_list, debugfs_list) { in userptr_show() 222 userptr->pid, userptr->addr, userptr->size, in userptr_show() 223 dma_dir[userptr->dir]); in userptr_show() 242 struct hl_userptr *userptr; in vm_show() local 267 userptr = hnode->ptr; in vm_show() 270 hnode->vaddr, userptr->size); in vm_show() 354 struct hl_userptr *userptr; in userptr_lookup_show() local 362 list_for_each_entry(userptr, &dev_entry->userptr_list, debugfs_list) { in userptr_lookup_show() 363 if (dev_entry->userptr_lookup >= userptr->addr && in userptr_lookup_show() [all …]
|
/linux/Documentation/gpu/ |
H A D | drm-vm-bind-locking.rst | 8 including the userptr mmu_notifier locking. It also discusses some 9 optimizations to get rid of the looping through of all userptr mappings and 19 in this document. In particular, it is currently lacking a userptr 39 * ``userptr gpu_vma or just userptr``: A gpu_vma, whose backing store 81 gpu_vm's list of userptr gpu_vmas. With a CPU mm analogy this would 86 userptr gpu_vma on the gpu_vm's userptr list, and in write mode during mmu 103 invalidation. The userptr notifier lock is per gpu_vm. 193 might be userptr gpu_vmas that are not mapping a buffer object that 384 userptr gpu_vmas 387 A userptr gpu_vma is a gpu_vma that, instead of mapping a buffer object to a [all …]
|
/linux/drivers/media/v4l2-core/ |
H A D | v4l2-compat-ioctl32.c | 272 compat_long_t userptr; member 300 compat_long_t userptr; member 324 compat_long_t userptr; member 350 m.userptr = (unsigned long)compat_ptr(plane32.m.userptr); in get_v4l2_plane32() 387 plane32.m.userptr = (uintptr_t)(p64->m.userptr); in put_v4l2_plane32() 431 vb->m.userptr = (unsigned long)compat_ptr(vb32.m.userptr); in get_v4l2_buffer32() 475 vb->m.userptr = (unsigned long)compat_ptr(vb32.m.userptr); in get_v4l2_buffer32_time32() 518 vb32.m.userptr = (uintptr_t)(vb->m.userptr); in put_v4l2_buffer32() 562 vb32.m.userptr = (uintptr_t)(vb->m.userptr); in put_v4l2_buffer32_time32()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_ttm.c | 676 uint64_t userptr; member 698 unsigned long start = gtt->userptr; in amdgpu_ttm_tt_get_user_pages() 748 if (gtt && gtt->userptr && range) in amdgpu_ttm_tt_discard_user_pages() 763 if (!gtt || !gtt->userptr || !range) in amdgpu_ttm_tt_get_user_pages_done() 767 gtt->userptr, ttm->num_pages); in amdgpu_ttm_tt_get_user_pages_done() 930 if (gtt->userptr) { in amdgpu_ttm_backend_bind() 1057 if (gtt->userptr) { in amdgpu_ttm_backend_unbind() 1146 if (gtt->userptr) { in amdgpu_ttm_tt_populate() 1186 if (gtt->userptr) { in amdgpu_ttm_tt_unpopulate() 1225 *user_addr = gtt->userptr; in amdgpu_ttm_tt_get_userptr() [all …]
|
/linux/drivers/gpu/drm/exynos/ |
H A D | exynos_drm_g2d.c | 207 unsigned long userptr; member 415 unsigned long userptr, in g2d_userptr_get_dma_addr() argument 434 if (g2d_userptr->userptr == userptr) { in g2d_userptr_get_dma_addr() 468 start = userptr & PAGE_MASK; in g2d_userptr_get_dma_addr() 469 offset = userptr & ~PAGE_MASK; in g2d_userptr_get_dma_addr() 470 end = PAGE_ALIGN(userptr + size); in g2d_userptr_get_dma_addr() 517 g2d_userptr->userptr = userptr; in g2d_userptr_get_dma_addr() 747 g2d_userptr.userptr, in g2d_map_cmdlist_gem()
|
/linux/drivers/gpu/drm/msm/ |
H A D | msm_gem_submit.c | 126 void __user *userptr = in submit_lookup_objects() local 134 if (copy_from_user(&submit_bo, userptr, sizeof(submit_bo))) { in submit_lookup_objects() 193 void __user *userptr = in submit_lookup_cmds() local 196 ret = copy_from_user(&submit_cmd, userptr, sizeof(submit_cmd)); in submit_lookup_cmds() 226 userptr = u64_to_user_ptr(submit_cmd.relocs); in submit_lookup_cmds() 240 ret = copy_from_user(submit->cmd[i].relocs, userptr, sz); in submit_lookup_cmds()
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_drv.h | 192 void __user *userptr = u64_to_user_ptr(user); in u_memcpya() local 197 return vmemdup_user(userptr, bytes); in u_memcpya()
|
/linux/Documentation/userspace-api/media/v4l/ |
H A D | vidioc-prepare-buf.rst | 55 bounds, or no buffers have been allocated yet, or the ``userptr`` or
|
H A D | vidioc-qbuf.rst | 71 ``memory`` field to ``V4L2_MEMORY_USERPTR``, the ``m.userptr`` field to 73 multi-planar API is used, ``m.userptr`` and ``length`` members of the 164 bounds, or no buffers have been allocated yet, or the ``userptr`` or
|
H A D | capture.c.rst | 160 if (buf.m.userptr == (unsigned long)buffers[i].start 166 process_image((void *)buf.m.userptr, buf.bytesused); 268 buf.m.userptr = (unsigned long)buffers[i].start;
|
/linux/include/linux/ |
H A D | splice.h | 37 void __user *userptr; /* memory to write to */ member
|
/linux/net/ipv4/netfilter/ |
H A D | ip_tables.c | 812 void __user *userptr) in copy_entries_to_user() argument 835 if (copy_to_user(userptr + off, e, sizeof(*e))) { in copy_entries_to_user() 839 if (copy_to_user(userptr + off in copy_entries_to_user() 852 if (xt_match_to_user(m, userptr + off + i)) { in copy_entries_to_user() 859 if (xt_target_to_user(t, userptr + off + e->target_offset)) { in copy_entries_to_user() 1551 void __user *userptr) in compat_copy_entries_to_user() argument 1565 pos = userptr; in compat_copy_entries_to_user()
|
H A D | arp_tables.c | 672 void __user *userptr) in copy_entries_to_user() argument 693 if (copy_to_user(userptr + off, e, sizeof(*e))) { in copy_entries_to_user() 697 if (copy_to_user(userptr + off in copy_entries_to_user() 706 if (xt_target_to_user(t, userptr + off + e->target_offset)) { in copy_entries_to_user() 1341 void __user *userptr) in compat_copy_entries_to_user() argument 1355 pos = userptr; in compat_copy_entries_to_user()
|
/linux/net/ipv6/netfilter/ |
H A D | ip6_tables.c | 828 void __user *userptr) in copy_entries_to_user() argument 851 if (copy_to_user(userptr + off, e, sizeof(*e))) { in copy_entries_to_user() 855 if (copy_to_user(userptr + off in copy_entries_to_user() 868 if (xt_match_to_user(m, userptr + off + i)) { in copy_entries_to_user() 875 if (xt_target_to_user(t, userptr + off + e->target_offset)) { in copy_entries_to_user() 1560 void __user *userptr) in compat_copy_entries_to_user() argument 1574 pos = userptr; in compat_copy_entries_to_user()
|
/linux/drivers/cdrom/ |
H A D | cdrom.c | 3279 void __user *userptr = (void __user *)arg; in mmc_ioctl() local 3289 return mmc_ioctl_cdrom_read_data(cdi, userptr, &cgc, cmd); in mmc_ioctl() 3291 return mmc_ioctl_cdrom_read_audio(cdi, userptr); in mmc_ioctl() 3293 return mmc_ioctl_cdrom_subchannel(cdi, userptr); in mmc_ioctl() 3295 return mmc_ioctl_cdrom_play_msf(cdi, userptr, &cgc); in mmc_ioctl() 3297 return mmc_ioctl_cdrom_play_blk(cdi, userptr, &cgc); in mmc_ioctl() 3300 return mmc_ioctl_cdrom_volume(cdi, userptr, &cgc, cmd); in mmc_ioctl() 3308 return mmc_ioctl_dvd_read_struct(cdi, userptr, &cgc); in mmc_ioctl() 3310 return mmc_ioctl_dvd_auth(cdi, userptr); in mmc_ioctl() 3312 return mmc_ioctl_cdrom_next_writable(cdi, userptr); in mmc_ioctl() [all …]
|
/linux/drivers/accel/habanalabs/goya/ |
H A D | goya.c | 3332 struct hl_userptr *userptr; in goya_pin_memory_before_cs() local 3336 parser->job_userptr_list, &userptr)) in goya_pin_memory_before_cs() 3339 userptr = kzalloc(sizeof(*userptr), GFP_KERNEL); in goya_pin_memory_before_cs() 3340 if (!userptr) in goya_pin_memory_before_cs() 3344 userptr); in goya_pin_memory_before_cs() 3348 list_add_tail(&userptr->job_node, parser->job_userptr_list); in goya_pin_memory_before_cs() 3350 rc = hl_dma_map_sgtable(hdev, userptr->sgt, dir); in goya_pin_memory_before_cs() 3356 userptr->dma_mapped = true; in goya_pin_memory_before_cs() 3357 userptr->dir = dir; in goya_pin_memory_before_cs() 3361 goya_get_dma_desc_list_size(hdev, userptr->sgt); in goya_pin_memory_before_cs() [all …]
|
/linux/include/uapi/drm/ |
H A D | exynos_drm.h | 119 unsigned long userptr; member
|