/linux/drivers/gpu/drm/i915/ |
H A D | i915_query.c | 25 if (copy_from_user(query_hdr, u64_to_user_ptr(query_item->data_ptr), in copy_query_item() 68 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr), in fill_topology_info() 72 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + sizeof(topo)), in fill_topology_info() 76 if (intel_sseu_copy_ssmask_to_user(u64_to_user_ptr(query_item->data_ptr + in fill_topology_info() 81 if (intel_sseu_copy_eumask_to_user(u64_to_user_ptr(query_item->data_ptr + in fill_topology_info() 132 u64_to_user_ptr(query_item->data_ptr); in query_engine_info() 200 u32 __user *p = u64_to_user_ptr(user_regs_ptr); in copy_perf_config_registers_or_number() 230 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_data() 232 u64_to_user_ptr(query_item->data_ptr + in query_perf_config_data() 376 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_list() [all …]
|
H A D | i915_user_extensions.c | 57 ext = u64_to_user_ptr(next); in i915_user_extensions()
|
/linux/io_uring/ |
H A D | fs.c | 61 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_renameat_prep() 62 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_renameat_prep() 120 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_unlinkat_prep() 167 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_mkdirat_prep() 209 oldpath = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_symlinkat_prep() 210 newpath = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_symlinkat_prep() 253 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_linkat_prep() 254 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_linkat_prep()
|
H A D | mock_file.c | 73 ubuf = u64_to_user_ptr(READ_ONCE(sqe->addr3)); in io_cmd_copy_regbuf() 74 iovec = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_cmd_copy_regbuf() 225 uarg = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_create_mock_file() 296 uarg = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_probe_mock()
|
H A D | cmd_net.c | 21 optval = u64_to_user_ptr(READ_ONCE(sqe->optval)); in io_uring_cmd_getsockopt() 45 optval = u64_to_user_ptr(READ_ONCE(sqe->optval)); in io_uring_cmd_setsockopt()
|
H A D | rsrc.c | 227 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_files_update() 228 __s32 __user *fds = u64_to_user_ptr(up->data); in __io_sqe_files_update() 292 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_buffers_update() 309 uvec = u64_to_user_ptr(user_data); in __io_sqe_buffers_update() 415 return io_sqe_files_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc() 416 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc() 420 return io_sqe_buffers_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc() 421 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc() 447 __s32 __user *fds = u64_to_user_ptr(up->arg); in io_files_update_with_index_alloc()
|
H A D | openclose.c | 68 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_openat_prep() 104 how = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_openat2_prep() 323 p->fds = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_pipe_prep()
|
H A D | epoll.c | 43 ev = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_epoll_ctl_prep() 75 iew->events = u64_to_user_ptr(READ_ONCE(sqe->addr));
|
H A D | rw.c | 54 struct compat_iovec __user *uiov = u64_to_user_ptr(rw->addr); in io_iov_compat_buffer_select_prep() 75 uiov = u64_to_user_ptr(rw->addr); in io_iov_buffer_select_prep() 115 void __user *buf = u64_to_user_ptr(rw->addr); in __io_import_rw_buffer() 232 if (copy_from_user(&pi_attr, u64_to_user_ptr(attr_ptr), in io_prep_rw_pi() 243 ret = import_ubuf(ddir, u64_to_user_ptr(pi_attr.addr), in io_prep_rw_pi() 414 uvec = u64_to_user_ptr(rw->addr); in io_rw_prep_reg_vec() 716 addr = u64_to_user_ptr(rw->addr); in loop_rw_iter()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_query.c | 135 query_ptr = u64_to_user_ptr(query->data); in query_engine_cycles() 190 u64_to_user_ptr(query->data); in query_engines() 252 u64_to_user_ptr(query->data); in query_mem_regions() 323 u64_to_user_ptr(query->data); in query_config() 369 u64_to_user_ptr(query->data); in query_gt_list() 441 void __user *query_ptr = u64_to_user_ptr(query->data); in query_hwconfig() 507 void __user *query_ptr = u64_to_user_ptr(query->data); in query_gt_topology() 566 struct drm_xe_query_uc_fw_version __user *query_ptr = u64_to_user_ptr(query->data); in query_uc_fw_version() 656 void __user *query_ptr = u64_to_user_ptr(query->data); in query_oa_units() 716 struct drm_xe_query_pxp_status __user *query_ptr = u64_to_user_ptr(query->data); in query_pxp_status() [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_userq_fence.c | 431 syncobj_handles = memdup_user(u64_to_user_ptr(args->syncobj_handles), in amdgpu_userq_signal_ioctl() 452 bo_handles_read = memdup_user(u64_to_user_ptr(args->bo_read_handles), in amdgpu_userq_signal_ioctl() 475 bo_handles_write = memdup_user(u64_to_user_ptr(args->bo_write_handles), in amdgpu_userq_signal_ioctl() 613 bo_handles_read = memdup_user(u64_to_user_ptr(wait_info->bo_read_handles), in amdgpu_userq_wait_ioctl() 619 bo_handles_write = memdup_user(u64_to_user_ptr(wait_info->bo_write_handles), in amdgpu_userq_wait_ioctl() 627 syncobj_handles = memdup_user(u64_to_user_ptr(wait_info->syncobj_handles), in amdgpu_userq_wait_ioctl() 635 timeline_handles = memdup_user(u64_to_user_ptr(wait_info->syncobj_timeline_handles), in amdgpu_userq_wait_ioctl() 642 timeline_points = memdup_user(u64_to_user_ptr(wait_info->syncobj_timeline_points), in amdgpu_userq_wait_ioctl() 912 if (copy_to_user(u64_to_user_ptr(wait_info->out_fences), in amdgpu_userq_wait_ioctl()
|
/linux/drivers/accel/amdxdna/ |
H A D | aie2_pci.c | 608 if (copy_from_user(&status, u64_to_user_ptr(args->buffer), sizeof(status))) { in aie2_get_aie_status() 619 ret = aie2_query_status(ndev, u64_to_user_ptr(status.buffer), in aie2_get_aie_status() 626 if (copy_to_user(u64_to_user_ptr(args->buffer), &status, sizeof(status))) { in aie2_get_aie_status() 672 if (copy_to_user(u64_to_user_ptr(args->buffer), meta, sizeof(*meta))) in aie2_get_aie_metadata() 690 if (copy_to_user(u64_to_user_ptr(args->buffer), &version, sizeof(version))) in aie2_get_aie_version() 707 if (copy_to_user(u64_to_user_ptr(args->buffer), &version, sizeof(version))) in aie2_get_firmware_version() 723 if (copy_to_user(u64_to_user_ptr(args->buffer), &mode, sizeof(mode))) in aie2_get_power_mode() 748 if (copy_to_user(u64_to_user_ptr(args->buffer), clock, sizeof(*clock))) in aie2_get_clock_metadata() 776 buf = u64_to_user_ptr(args->buffer); in aie2_get_hwctx_status() 864 if (copy_from_user(&power_state, u64_to_user_ptr(args->buffer), in aie2_set_power_mode()
|
/linux/block/ |
H A D | blk-crypto.c | 490 if (copy_from_user(raw_key, u64_to_user_ptr(arg.raw_key_ptr), in blk_crypto_ioctl_import_key() 503 if (copy_to_user(u64_to_user_ptr(arg.lt_key_ptr), lt_key, in blk_crypto_ioctl_import_key() 538 if (copy_to_user(u64_to_user_ptr(arg.lt_key_ptr), lt_key, in blk_crypto_ioctl_generate_key() 568 if (copy_from_user(lt_key, u64_to_user_ptr(arg.lt_key_ptr), in blk_crypto_ioctl_prepare_key() 581 if (copy_to_user(u64_to_user_ptr(arg.eph_key_ptr), eph_key, in blk_crypto_ioctl_prepare_key()
|
/linux/drivers/gpu/drm/qxl/ |
H A D | qxl_ioctl.c | 165 if (!access_ok(u64_to_user_ptr(cmd->command), in qxl_process_single_command() 187 u64_to_user_ptr(cmd->command), cmd->command_size); in qxl_process_single_command() 205 struct drm_qxl_reloc __user *u = u64_to_user_ptr(cmd->relocs); in qxl_process_single_command() 281 u64_to_user_ptr(execbuffer->commands); in qxl_execbuffer_ioctl()
|
/linux/drivers/fwctl/ |
H A D | main.c | 66 if (clear_user(u64_to_user_ptr(cmd->out_device_data), in fwctl_cmd_info() 75 if (copy_to_user_zero_pad(u64_to_user_ptr(cmd->out_device_data), in fwctl_cmd_info() 120 if (copy_from_user(inbuf, u64_to_user_ptr(cmd->in), cmd->in_len)) in fwctl_cmd_rpc() 133 if (copy_to_user(u64_to_user_ptr(cmd->out), outbuf, in fwctl_cmd_rpc()
|
/linux/drivers/infiniband/core/ |
H A D | uverbs_ioctl.c | 144 return ib_is_buffer_cleared(u64_to_user_ptr(uattr->data) + len, in uverbs_is_attr_cleared() 206 ret = copy_from_user(idr_vals, u64_to_user_ptr(uattr->data), in uverbs_process_idrs_array() 304 if (copy_from_user(p, u64_to_user_ptr(uattr->data), in uverbs_process_attr() 728 udata->inbuf = u64_to_user_ptr(in->ptr_attr.data); in uverbs_fill_udata() 735 udata->outbuf = u64_to_user_ptr(out->ptr_attr.data); in uverbs_fill_udata() 753 if (copy_to_user(u64_to_user_ptr(attr->ptr_attr.data), from, min_size)) in uverbs_copy_to() 831 if (clear_user(u64_to_user_ptr(attr->ptr_attr.data) + size, in uverbs_copy_to_struct_or_zero()
|
/linux/net/bpf/ |
H A D | bpf_dummy_struct_ops.c | 43 ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in dummy_ops_init_args() 48 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_init_args() 63 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_copy_args()
|
H A D | test_run.c | 453 void __user *data_out = u64_to_user_ptr(kattr->test.data_out); in bpf_test_finish() 665 void __user *data_in = u64_to_user_ptr(kattr->test.data_in); in BTF_ID_FLAGS() 760 void __user *ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_prog_test_run_raw_tp() 816 void __user *data_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_ctx_init() 817 void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out); in bpf_ctx_init() 849 void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out); in bpf_ctx_finish() 1283 void __user *data_in = u64_to_user_ptr(kattr->test.data_in); in bpf_prog_test_run_xdp() 1540 void __user *ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_prog_test_run_syscall()
|
/linux/drivers/gpu/drm/virtio/ |
H A D | virtgpu_ioctl.c | 123 if (copy_to_user(u64_to_user_ptr(param->value), &value, sizeof(int))) in virtio_gpu_getparam_ioctl() 433 if (copy_to_user(u64_to_user_ptr(args->addr), ptr, size)) in virtio_gpu_get_caps_ioctl() 520 buf = memdup_user(u64_to_user_ptr(rc_blob->cmd), in virtio_gpu_resource_create_blob_ioctl() 596 ctx_set_params = memdup_user(u64_to_user_ptr(args->ctx_set_params), in virtio_gpu_context_init_ioctl() 662 u64_to_user_ptr(value), in virtio_gpu_context_init_ioctl()
|
H A D | virtgpu_submit.c | 118 u64_to_user_ptr(address), in virtio_gpu_parse_deps() 208 u64_to_user_ptr(address), in virtio_gpu_parse_post_deps() 311 if (copy_from_user(bo_handles, u64_to_user_ptr(exbuf->bo_handles), in virtio_gpu_init_submit_buflist() 416 submit->buf = vmemdup_user(u64_to_user_ptr(exbuf->command), exbuf->size); in virtio_gpu_init_submit()
|
/linux/kernel/bpf/ |
H A D | mprog.c | 420 uprog_id = u64_to_user_ptr(attr->query.prog_ids); in bpf_mprog_query() 421 uprog_flags = u64_to_user_ptr(attr->query.prog_attach_flags); in bpf_mprog_query() 422 ulink_id = u64_to_user_ptr(attr->query.link_ids); in bpf_mprog_query() 423 ulink_flags = u64_to_user_ptr(attr->query.link_attach_flags); in bpf_mprog_query()
|
H A D | syscall.c | 1662 void __user *ukey = u64_to_user_ptr(attr->key); in map_lookup_elem() 1663 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_elem() 1835 void __user *ukey = u64_to_user_ptr(attr->key); in map_get_next_key() 1836 void __user *unext_key = u64_to_user_ptr(attr->next_key); in map_get_next_key() 1893 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_delete_batch() 1949 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_update_batch() 1950 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_update_batch() 2010 void __user *uobatch = u64_to_user_ptr(attr->batch.out_batch); in generic_map_lookup_batch() 2011 void __user *ubatch = u64_to_user_ptr(attr->batch.in_batch); in generic_map_lookup_batch() 2012 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_lookup_batch() [all …]
|
/linux/arch/x86/kvm/svm/ |
H A D | sev.c | 520 if (copy_from_user(&data, u64_to_user_ptr(argp->data), sizeof(data))) in sev_guest_init2() 569 if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params))) in sev_launch_start() 615 if (copy_to_user(u64_to_user_ptr(argp->data), ¶ms, sizeof(params))) { in sev_launch_start() 774 if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params))) in sev_launch_update_data() 1002 void __user *measure = u64_to_user_ptr(argp->data); in sev_launch_measure() 1021 p = u64_to_user_ptr(params.uaddr); in sev_launch_measure() 1092 if (copy_to_user(u64_to_user_ptr(argp->data), ¶ms, sizeof(params))) in sev_guest_status() 1256 if (copy_from_user(&debug, u64_to_user_ptr(argp->data), sizeof(debug))) in sev_dbg_crypt() 1339 if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), sizeof(params))) in sev_launch_secret() 1403 void __user *report = u64_to_user_ptr(argp->data); in sev_get_attestation_report() [all …]
|
/linux/drivers/gpu/drm/ |
H A D | drm_property.c | 481 values_ptr = u64_to_user_ptr(out_resp->values_ptr); in drm_mode_getproperty_ioctl() 492 enum_ptr = u64_to_user_ptr(out_resp->enum_blob_ptr); in drm_mode_getproperty_ioctl() 828 if (copy_to_user(u64_to_user_ptr(out_resp->data), in drm_mode_getblob_ioctl() 857 u64_to_user_ptr(out_resp->data), in drm_mode_createblob_ioctl()
|
/linux/include/linux/ |
H A D | util_macros.h | 145 #define u64_to_user_ptr(x) \ macro
|