Home
last modified time | relevance | path

Searched refs:u64_to_user_ptr (Results 1 – 25 of 100) sorted by relevance

1234

/linux/io_uring/
H A Dfs.c61 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_renameat_prep()
62 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_renameat_prep()
120 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_unlinkat_prep()
167 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_mkdirat_prep()
209 oldpath = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_symlinkat_prep()
210 newpath = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_symlinkat_prep()
253 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_linkat_prep()
254 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_linkat_prep()
H A Dquery.c68 udata = u64_to_user_ptr(hdr.query_data); in io_handle_query_entry()
126 uhdr = u64_to_user_ptr(next_hdr); in io_query()
H A Dcmd_net.c22 optval = u64_to_user_ptr(READ_ONCE(sqe->optval)); in io_uring_cmd_getsockopt()
46 optval = u64_to_user_ptr(READ_ONCE(sqe->optval)); in io_uring_cmd_setsockopt()
147 uaddr = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_uring_cmd_sock()
148 ulen = u64_to_user_ptr(sqe->addr3); in io_uring_cmd_sock()
H A Depoll.c43 ev = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_epoll_ctl_prep()
75 iew->events = u64_to_user_ptr(READ_ONCE(sqe->addr));
H A Drsrc.c230 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_files_update()
231 __s32 __user *fds = u64_to_user_ptr(up->data); in __io_sqe_files_update()
295 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_buffers_update()
312 uvec = u64_to_user_ptr(user_data); in __io_sqe_buffers_update()
418 return io_sqe_files_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc()
419 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc()
423 return io_sqe_buffers_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc()
424 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc()
450 __s32 __user *fds = u64_to_user_ptr(up->arg); in io_files_update_with_index_alloc()
H A Dopenclose.c69 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_openat_prep()
105 how = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_openat2_prep()
324 p->fds = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_pipe_prep()
H A Drw.c55 struct compat_iovec __user *uiov = u64_to_user_ptr(rw->addr); in io_iov_compat_buffer_select_prep()
76 uiov = u64_to_user_ptr(rw->addr); in io_iov_buffer_select_prep()
118 sel->addr = u64_to_user_ptr(rw->addr); in __io_import_rw_buffer()
239 if (copy_from_user(&pi_attr, u64_to_user_ptr(attr_ptr), in io_prep_rw_pi()
250 ret = import_ubuf(ddir, u64_to_user_ptr(pi_attr.addr), in io_prep_rw_pi()
422 uvec = u64_to_user_ptr(rw->addr); in io_rw_prep_reg_vec()
722 addr = u64_to_user_ptr(rw->addr); in loop_rw_iter()
/linux/drivers/accel/amdxdna/
H A Daie2_pci.c639 if (copy_from_user(&status, u64_to_user_ptr(args->buffer), sizeof(status))) { in aie2_get_aie_status()
650 ret = aie2_query_status(ndev, u64_to_user_ptr(status.buffer), in aie2_get_aie_status()
657 if (copy_to_user(u64_to_user_ptr(args->buffer), &status, sizeof(status))) { in aie2_get_aie_status()
703 if (copy_to_user(u64_to_user_ptr(args->buffer), meta, sizeof(*meta))) in aie2_get_aie_metadata()
721 if (copy_to_user(u64_to_user_ptr(args->buffer), &version, sizeof(version))) in aie2_get_aie_version()
738 if (copy_to_user(u64_to_user_ptr(args->buffer), &version, sizeof(version))) in aie2_get_firmware_version()
754 if (copy_to_user(u64_to_user_ptr(args->buffer), &mode, sizeof(mode))) in aie2_get_power_mode()
779 if (copy_to_user(u64_to_user_ptr(args->buffer), clock, sizeof(*clock))) in aie2_get_clock_metadata()
815 buf = u64_to_user_ptr(array_args->buffer); in aie2_hwctx_status_cb()
869 if (copy_to_user(u64_to_user_ptr(args->buffer), &res_info, sizeof(res_info))) in aie2_query_resource_info()
[all …]
/linux/block/
H A Dblk-crypto.c490 if (copy_from_user(raw_key, u64_to_user_ptr(arg.raw_key_ptr), in blk_crypto_ioctl_import_key()
503 if (copy_to_user(u64_to_user_ptr(arg.lt_key_ptr), lt_key, in blk_crypto_ioctl_import_key()
538 if (copy_to_user(u64_to_user_ptr(arg.lt_key_ptr), lt_key, in blk_crypto_ioctl_generate_key()
568 if (copy_from_user(lt_key, u64_to_user_ptr(arg.lt_key_ptr), in blk_crypto_ioctl_prepare_key()
581 if (copy_to_user(u64_to_user_ptr(arg.eph_key_ptr), eph_key, in blk_crypto_ioctl_prepare_key()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_userq_fence.c480 syncobj_handles = memdup_user(u64_to_user_ptr(args->syncobj_handles), in amdgpu_userq_signal_ioctl()
501 bo_handles_read = memdup_user(u64_to_user_ptr(args->bo_read_handles), in amdgpu_userq_signal_ioctl()
524 bo_handles_write = memdup_user(u64_to_user_ptr(args->bo_write_handles), in amdgpu_userq_signal_ioctl()
665 bo_handles_read = memdup_user(u64_to_user_ptr(wait_info->bo_read_handles), in amdgpu_userq_wait_ioctl()
671 bo_handles_write = memdup_user(u64_to_user_ptr(wait_info->bo_write_handles), in amdgpu_userq_wait_ioctl()
679 syncobj_handles = memdup_user(u64_to_user_ptr(wait_info->syncobj_handles), in amdgpu_userq_wait_ioctl()
687 timeline_handles = memdup_user(u64_to_user_ptr(wait_info->syncobj_timeline_handles), in amdgpu_userq_wait_ioctl()
694 timeline_points = memdup_user(u64_to_user_ptr(wait_info->syncobj_timeline_points), in amdgpu_userq_wait_ioctl()
964 if (copy_to_user(u64_to_user_ptr(wait_info->out_fences), in amdgpu_userq_wait_ioctl()
H A Dmes_userqueue.c296 compute_mqd = memdup_user(u64_to_user_ptr(mqd_user->mqd), mqd_user->mqd_size); in mes_userq_mqd_create()
332 mqd_gfx_v11 = memdup_user(u64_to_user_ptr(mqd_user->mqd), mqd_user->mqd_size); in mes_userq_mqd_create()
363 mqd_sdma_v11 = memdup_user(u64_to_user_ptr(mqd_user->mqd), mqd_user->mqd_size); in mes_userq_mqd_create()
/linux/drivers/fwctl/
H A Dmain.c66 if (clear_user(u64_to_user_ptr(cmd->out_device_data), in fwctl_cmd_info()
75 if (copy_to_user_zero_pad(u64_to_user_ptr(cmd->out_device_data), in fwctl_cmd_info()
120 if (copy_from_user(inbuf, u64_to_user_ptr(cmd->in), cmd->in_len)) in fwctl_cmd_rpc()
133 if (copy_to_user(u64_to_user_ptr(cmd->out), outbuf, in fwctl_cmd_rpc()
/linux/net/bpf/
H A Dbpf_dummy_struct_ops.c43 ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in dummy_ops_init_args()
48 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_init_args()
63 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_copy_args()
/linux/drivers/infiniband/core/
H A Duverbs_ioctl.c144 return ib_is_buffer_cleared(u64_to_user_ptr(uattr->data) + len, in uverbs_is_attr_cleared()
206 ret = copy_from_user(idr_vals, u64_to_user_ptr(uattr->data), in uverbs_process_idrs_array()
304 if (copy_from_user(p, u64_to_user_ptr(uattr->data), in uverbs_process_attr()
728 udata->inbuf = u64_to_user_ptr(in->ptr_attr.data); in uverbs_fill_udata()
735 udata->outbuf = u64_to_user_ptr(out->ptr_attr.data); in uverbs_fill_udata()
753 if (copy_to_user(u64_to_user_ptr(attr->ptr_attr.data), from, min_size)) in uverbs_copy_to()
831 if (clear_user(u64_to_user_ptr(attr->ptr_attr.data) + size, in uverbs_copy_to_struct_or_zero()
/linux/kernel/bpf/
H A Dmprog.c420 uprog_id = u64_to_user_ptr(attr->query.prog_ids); in bpf_mprog_query()
421 uprog_flags = u64_to_user_ptr(attr->query.prog_attach_flags); in bpf_mprog_query()
422 ulink_id = u64_to_user_ptr(attr->query.link_ids); in bpf_mprog_query()
423 ulink_flags = u64_to_user_ptr(attr->query.link_attach_flags); in bpf_mprog_query()
H A Dsyscall.c1720 void __user *ukey = u64_to_user_ptr(attr->key); in map_lookup_elem()
1721 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_elem()
1888 void __user *ukey = u64_to_user_ptr(attr->key); in map_get_next_key()
1889 void __user *unext_key = u64_to_user_ptr(attr->next_key); in map_get_next_key()
1946 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_delete_batch()
2002 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_update_batch()
2003 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_update_batch()
2059 void __user *uobatch = u64_to_user_ptr(attr->batch.out_batch); in generic_map_lookup_batch()
2060 void __user *ubatch = u64_to_user_ptr(attr->batch.in_batch); in generic_map_lookup_batch()
2061 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_lookup_batch()
[all …]
/linux/drivers/gpu/drm/virtio/
H A Dvirtgpu_submit.c118 u64_to_user_ptr(address), in virtio_gpu_parse_deps()
208 u64_to_user_ptr(address), in virtio_gpu_parse_post_deps()
311 if (copy_from_user(bo_handles, u64_to_user_ptr(exbuf->bo_handles), in virtio_gpu_init_submit_buflist()
416 submit->buf = vmemdup_user(u64_to_user_ptr(exbuf->command), exbuf->size); in virtio_gpu_init_submit()
H A Dvirtgpu_ioctl.c123 if (copy_to_user(u64_to_user_ptr(param->value), &value, sizeof(int))) in virtio_gpu_getparam_ioctl()
433 if (copy_to_user(u64_to_user_ptr(args->addr), ptr, size)) in virtio_gpu_get_caps_ioctl()
520 buf = memdup_user(u64_to_user_ptr(rc_blob->cmd), in virtio_gpu_resource_create_blob_ioctl()
596 ctx_set_params = memdup_user(u64_to_user_ptr(args->ctx_set_params), in virtio_gpu_context_init_ioctl()
662 u64_to_user_ptr(value), in virtio_gpu_context_init_ioctl()
/linux/arch/x86/kvm/svm/
H A Dsev.c559 if (copy_from_user(&data, u64_to_user_ptr(argp->data), sizeof(data))) in sev_guest_init2()
608 if (copy_from_user(&params, u64_to_user_ptr(argp->data), sizeof(params))) in sev_launch_start()
652 if (copy_to_user(u64_to_user_ptr(argp->data), &params, sizeof(params))) { in sev_launch_start()
812 if (copy_from_user(&params, u64_to_user_ptr(argp->data), sizeof(params))) in sev_launch_update_data()
1040 void __user *measure = u64_to_user_ptr(argp->data); in sev_launch_measure()
1059 p = u64_to_user_ptr(params.uaddr); in sev_launch_measure()
1130 if (copy_to_user(u64_to_user_ptr(argp->data), &params, sizeof(params))) in sev_guest_status()
1294 if (copy_from_user(&debug, u64_to_user_ptr(argp->data), sizeof(debug))) in sev_dbg_crypt()
1377 if (copy_from_user(&params, u64_to_user_ptr(argp->data), sizeof(params))) in sev_launch_secret()
1441 void __user *report = u64_to_user_ptr(argp->data); in sev_get_attestation_report()
[all …]
/linux/drivers/gpu/drm/i915/
H A Di915_user_extensions.c57 ext = u64_to_user_ptr(next); in i915_user_extensions()
/linux/drivers/gpu/drm/
H A Ddrm_property.c481 values_ptr = u64_to_user_ptr(out_resp->values_ptr); in drm_mode_getproperty_ioctl()
492 enum_ptr = u64_to_user_ptr(out_resp->enum_blob_ptr); in drm_mode_getproperty_ioctl()
828 if (copy_to_user(u64_to_user_ptr(out_resp->data), in drm_mode_getblob_ioctl()
857 u64_to_user_ptr(out_resp->data), in drm_mode_createblob_ioctl()
H A Ddrm_mode_config.c113 fb_id = u64_to_user_ptr(card_res->fb_id_ptr); in drm_mode_getresources()
131 crtc_id = u64_to_user_ptr(card_res->crtc_id_ptr); in drm_mode_getresources()
143 encoder_id = u64_to_user_ptr(card_res->encoder_id_ptr); in drm_mode_getresources()
154 connector_id = u64_to_user_ptr(card_res->connector_id_ptr); in drm_mode_getresources()
/linux/include/linux/
H A Dutil_macros.h145 #define u64_to_user_ptr(x) \ macro
H A Dbpfptr.h31 return USER_BPFPTR(u64_to_user_ptr(addr)); in make_bpfptr()
/linux/drivers/gpu/drm/xe/
H A Dxe_exec.c117 struct drm_xe_sync __user *syncs_user = u64_to_user_ptr(args->syncs); in xe_exec_ioctl()
118 u64 __user *addresses_user = u64_to_user_ptr(args->address); in xe_exec_ioctl()

1234