Home
last modified time | relevance | path

Searched refs:kdata (Results 1 – 20 of 20) sorted by relevance

/linux/kernel/
H A Dcapability.c143 struct __user_cap_data_struct kdata[2]; in SYSCALL_DEFINE2()
164 kdata[0].effective = pE.val; kdata[1].effective = pE.val >> 32; in SYSCALL_DEFINE2()
165 kdata[0].permitted = pP.val; kdata[1].permitted = pP.val >> 32; in SYSCALL_DEFINE2()
166 kdata[0].inheritable = pI.val; kdata[1].inheritable = pI.val >> 32; in SYSCALL_DEFINE2()
187 if (copy_to_user(dataptr, kdata, tocopy * sizeof(kdata[0]))) in SYSCALL_DEFINE2()
218 struct __user_cap_data_struct kdata[
147 struct __user_cap_data_struct kdata[2]; SYSCALL_DEFINE2() local
222 struct __user_cap_data_struct kdata[2] = { { 0, }, }; SYSCALL_DEFINE2() local
[all...]
/linux/drivers/xen/
H A Dprivcmd.c634 struct privcmd_dm_op kdata; in privcmd_ioctl_dm_op() local
643 if (copy_from_user(&kdata, udata, sizeof(kdata))) in privcmd_ioctl_dm_op()
647 if (data->domid != DOMID_INVALID && data->domid != kdata.dom) in privcmd_ioctl_dm_op()
650 if (kdata.num == 0) in privcmd_ioctl_dm_op()
653 if (kdata.num > privcmd_dm_op_max_num) in privcmd_ioctl_dm_op()
656 kbufs = kcalloc(kdata.num, sizeof(*kbufs), GFP_KERNEL); in privcmd_ioctl_dm_op()
660 if (copy_from_user(kbufs, kdata.ubufs, in privcmd_ioctl_dm_op()
661 sizeof(*kbufs) * kdata.num)) { in privcmd_ioctl_dm_op()
666 for (i = 0; i < kdata.num; i++) { in privcmd_ioctl_dm_op()
689 xbufs = kcalloc(kdata.num, sizeof(*xbufs), GFP_KERNEL); in privcmd_ioctl_dm_op()
[all …]
/linux/kernel/trace/
H A Dtrace_hwlat.c163 struct hwlat_kthread_data *kdata = get_cpu_data(); in trace_hwlat_callback() local
165 if (!kdata->kthread) in trace_hwlat_callback()
174 kdata->nmi_ts_start = time_get(); in trace_hwlat_callback()
176 kdata->nmi_total_ts += time_get() - kdata->nmi_ts_start; in trace_hwlat_callback()
180 kdata->nmi_count++; in trace_hwlat_callback()
201 struct hwlat_kthread_data *kdata = get_cpu_data(); in get_sample() local
214 kdata->nmi_total_ts = 0; in get_sample()
215 kdata->nmi_count = 0; in get_sample()
285 if (kdata->nmi_total_ts) in get_sample()
286 do_div(kdata->nmi_total_ts, NSEC_PER_USEC); in get_sample()
[all …]
/linux/net/ipv4/
H A Dbpf_tcp_ca.c212 void *kdata, const void *udata) in bpf_tcp_ca_init_member() argument
219 tcp_ca = (struct tcp_congestion_ops *)kdata; in bpf_tcp_ca_init_member()
238 static int bpf_tcp_ca_reg(void *kdata, struct bpf_link *link) in bpf_tcp_ca_reg() argument
240 return tcp_register_congestion_control(kdata); in bpf_tcp_ca_reg()
243 static void bpf_tcp_ca_unreg(void *kdata, struct bpf_link *link) in bpf_tcp_ca_unreg() argument
245 tcp_unregister_congestion_control(kdata); in bpf_tcp_ca_unreg()
248 static int bpf_tcp_ca_update(void *kdata, void *old_kdata, struct bpf_link *link) in bpf_tcp_ca_update() argument
250 return tcp_update_congestion_control(kdata, old_kdata); in bpf_tcp_ca_update()
253 static int bpf_tcp_ca_validate(void *kdata) in bpf_tcp_ca_validate() argument
255 return tcp_validate_congestion_control(kdata); in bpf_tcp_ca_validate()
/linux/drivers/dma-buf/
H A Ddma-heap.c129 char *kdata = stack_kdata; in dma_heap_ioctl() local
154 kdata = kmalloc(ksize, GFP_KERNEL); in dma_heap_ioctl()
155 if (!kdata) in dma_heap_ioctl()
159 if (copy_from_user(kdata, (void __user *)arg, in_size) != 0) { in dma_heap_ioctl()
166 memset(kdata + in_size, 0, ksize - in_size); in dma_heap_ioctl()
170 ret = dma_heap_ioctl_allocate(file, kdata); in dma_heap_ioctl()
177 if (copy_to_user((void __user *)arg, kdata, out_size) != 0) in dma_heap_ioctl()
180 if (kdata != stack_kdata) in dma_heap_ioctl()
181 kfree(kdata); in dma_heap_ioctl()
/linux/tools/testing/selftests/bpf/test_kmods/
H A Dbpf_test_no_cfi.c20 void *kdata, const void *udata) in dummy_init_member() argument
25 static int dummy_reg(void *kdata, struct bpf_link *link) in dummy_reg() argument
30 static void dummy_unreg(void *kdata, struct bpf_link *link) in dummy_unreg() argument
H A Dbpf_testmod.c1113 void *kdata, const void *udata) in bpf_testmod_ops_init_member() argument
1121 ((struct bpf_testmod_ops *)kdata)->data = ((struct bpf_testmod_ops *)udata)->data; in bpf_testmod_ops_init_member()
1140 static int bpf_dummy_reg(void *kdata, struct bpf_link *link) in bpf_dummy_reg() argument
1142 struct bpf_testmod_ops *ops = kdata; in bpf_dummy_reg()
1155 static void bpf_dummy_unreg(void *kdata, struct bpf_link *link) in bpf_dummy_unreg() argument
1196 static int bpf_dummy_reg2(void *kdata, struct bpf_link *link) in bpf_dummy_reg2() argument
1198 struct bpf_testmod_ops2 *ops = kdata; in bpf_dummy_reg2()
1219 static int st_ops3_reg(void *kdata, struct bpf_link *link) in st_ops3_reg() argument
1229 st_ops3 = kdata; in st_ops3_reg()
1236 static void st_ops3_unreg(void *kdata, struct bpf_link *link) in st_ops3_unreg() argument
[all …]
/linux/drivers/hid/bpf/
H A Dhid_bpf_struct_ops.c151 void *kdata, const void *udata) in hid_bpf_ops_init_member() argument
158 khid_bpf_ops = (struct hid_bpf_ops *)kdata; in hid_bpf_ops_init_member()
180 static int hid_bpf_reg(void *kdata, struct bpf_link *link) in hid_bpf_reg() argument
182 struct hid_bpf_ops *ops = kdata; in hid_bpf_reg()
239 static void hid_bpf_unreg(void *kdata, struct bpf_link *link) in hid_bpf_unreg() argument
241 struct hid_bpf_ops *ops = kdata; in hid_bpf_unreg()
/linux/drivers/gpu/drm/radeon/
H A Dradeon_cs.c109 r = (struct drm_radeon_cs_reloc *)&chunk->kdata[i*4]; in radeon_cs_parser_relocs()
348 p->chunks[i].kdata = kvmalloc_array(size, sizeof(uint32_t), GFP_KERNEL); in radeon_cs_parser_init()
350 if (p->chunks[i].kdata == NULL) { in radeon_cs_parser_init()
353 if (copy_from_user(p->chunks[i].kdata, cdata, size)) { in radeon_cs_parser_init()
357 p->cs_flags = p->chunks[i].kdata[0]; in radeon_cs_parser_init()
359 ring = p->chunks[i].kdata[1]; in radeon_cs_parser_init()
361 priority = (s32)p->chunks[i].kdata[2]; in radeon_cs_parser_init()
459 kvfree(parser->chunks[i].kdata); in radeon_cs_parser_fini()
663 if (ib_chunk->kdata) in radeon_cs_ib_fill()
664 memcpy(parser->ib.ptr, ib_chunk->kdata, ib_chunk->length_dw * 4); in radeon_cs_ib_fill()
[all …]
H A Dradeon.h1014 uint32_t *kdata; member
1054 if (ibc->kdata) in radeon_get_ib_value()
1055 return ibc->kdata[idx]; in radeon_get_ib_value()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_cs.c227 p->chunks[i].kdata = kvmalloc_array(size, sizeof(uint32_t), in amdgpu_cs_pass1()
229 if (p->chunks[i].kdata == NULL) { in amdgpu_cs_pass1()
235 if (copy_from_user(p->chunks[i].kdata, cdata, size)) { in amdgpu_cs_pass1()
247 ret = amdgpu_cs_p1_ib(p, p->chunks[i].kdata, num_ibs); in amdgpu_cs_pass1()
256 ret = amdgpu_cs_p1_user_fence(p, p->chunks[i].kdata, in amdgpu_cs_pass1()
270 ret = amdgpu_cs_p1_bo_handles(p, p->chunks[i].kdata); in amdgpu_cs_pass1()
321 kvfree(p->chunks[i].kdata); in amdgpu_cs_pass1()
336 struct drm_amdgpu_cs_chunk_ib *chunk_ib = chunk->kdata; in amdgpu_cs_p2_ib()
389 struct drm_amdgpu_cs_chunk_dep *deps = chunk->kdata; in amdgpu_cs_p2_dependencies()
461 struct drm_amdgpu_cs_chunk_sem *deps = chunk->kdata; in amdgpu_cs_p2_syncobj_in()
[all …]
H A Damdgpu_cs.h40 void *kdata; member
/linux/kernel/bpf/
H A Dbpf_struct_ops.c665 void *udata, *kdata; in bpf_struct_ops_map_update_elem() local
708 kdata = &kvalue->data; in bpf_struct_ops_map_update_elem()
727 *(void **)(kdata + moff) = BPF_MODULE_OWNER; in bpf_struct_ops_map_update_elem()
731 err = st_ops->init_member(t, member, kdata, udata); in bpf_struct_ops_map_update_elem()
814 *(void **)(kdata + moff) = image + trampoline_start + cfi_get_offset(); in bpf_struct_ops_map_update_elem()
827 err = st_ops->validate(kdata); in bpf_struct_ops_map_update_elem()
848 err = st_ops->reg(kdata, NULL); in bpf_struct_ops_map_update_elem()
1131 bool bpf_struct_ops_get(const void *kdata) in bpf_struct_ops_get() argument
1137 kvalue = container_of(kdata, struct bpf_struct_ops_value, data); in bpf_struct_ops_get()
1144 void bpf_struct_ops_put(const void *kdata) in bpf_struct_ops_put() argument
[all …]
/linux/arch/arm64/kernel/
H A Dptrace.c2102 static int compat_ptrace_hbp_get_resource_info(u32 *kdata) in compat_ptrace_hbp_get_resource_info() argument
2120 *kdata = reg; in compat_ptrace_hbp_get_resource_info()
2127 u32 *kdata) in compat_ptrace_hbp_get() argument
2136 *kdata = (u32)addr; in compat_ptrace_hbp_get()
2139 *kdata = ctrl; in compat_ptrace_hbp_get()
2148 u32 *kdata) in compat_ptrace_hbp_set() argument
2156 addr = *kdata; in compat_ptrace_hbp_set()
2159 ctrl = *kdata; in compat_ptrace_hbp_set()
2170 u32 kdata; in compat_ptrace_gethbpregs() local
2174 ret = compat_ptrace_hbp_get(NT_ARM_HW_WATCH, tsk, num, &kdata); in compat_ptrace_gethbpregs()
[all …]
/linux/net/bpf/
H A Dbpf_dummy_struct_ops.c270 void *kdata, const void *udata) in bpf_dummy_init_member() argument
275 static int bpf_dummy_reg(void *kdata, struct bpf_link *link) in bpf_dummy_reg() argument
280 static void bpf_dummy_unreg(void *kdata, struct bpf_link *link) in bpf_dummy_unreg() argument
/linux/drivers/net/ethernet/netronome/nfp/flower/
H A Dconntrack.c821 u8 *key, *msk, *kdata, *mdata; in nfp_fl_ct_add_offload() local
868 kdata = flow_pay->unmasked_data; in nfp_fl_ct_add_offload()
872 key = kdata + offset; in nfp_fl_ct_add_offload()
880 key = kdata + offset; in nfp_fl_ct_add_offload()
893 key = kdata + offset; in nfp_fl_ct_add_offload()
916 key = kdata + offset; in nfp_fl_ct_add_offload()
926 key = kdata + offset; in nfp_fl_ct_add_offload()
942 key = kdata + offset; in nfp_fl_ct_add_offload()
953 key = kdata + offset; in nfp_fl_ct_add_offload()
964 key = kdata + offset; in nfp_fl_ct_add_offload()
[all …]
/linux/drivers/accel/habanalabs/common/
H A Dhabanalabs_ioctl.c1235 char *kdata = NULL; in _hl_ioctl() local
1259 kdata = stack_kdata; in _hl_ioctl()
1261 kdata = kzalloc(asize, GFP_KERNEL); in _hl_ioctl()
1262 if (!kdata) { in _hl_ioctl()
1270 if (copy_from_user(kdata, (void __user *)arg, usize)) { in _hl_ioctl()
1276 retcode = func(hpriv, kdata); in _hl_ioctl()
1278 if ((cmd & IOC_OUT) && copy_to_user((void __user *)arg, kdata, usize)) in _hl_ioctl()
1287 if (kdata != stack_kdata) in _hl_ioctl()
1288 kfree(kdata); in _hl_ioctl()
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_chardev.c3241 char *kdata = NULL; in kfd_ioctl() local
3308 kdata = stack_kdata; in kfd_ioctl()
3310 kdata = kmalloc(asize, GFP_KERNEL); in kfd_ioctl()
3311 if (!kdata) { in kfd_ioctl()
3317 memset(kdata + usize, 0, asize - usize); in kfd_ioctl()
3321 if (copy_from_user(kdata, (void __user *)arg, usize) != 0) { in kfd_ioctl()
3326 memset(kdata, 0, usize); in kfd_ioctl()
3329 retcode = func(filep, process, kdata); in kfd_ioctl()
3332 if (copy_to_user((void __user *)arg, kdata, usize) != 0) in kfd_ioctl()
3340 if (kdata != stack_kdata) in kfd_ioctl()
[all …]
/linux/include/linux/
H A Dbpf.h1802 void *kdata, const void *udata);
1803 int (*reg)(void *kdata, struct bpf_link *link);
1804 void (*unreg)(void *kdata, struct bpf_link *link);
1805 int (*update)(void *kdata, void *old_kdata, struct bpf_link *link);
1806 int (*validate)(void *kdata);
1865 bool bpf_struct_ops_get(const void *kdata);
1866 void bpf_struct_ops_put(const void *kdata);
/linux/kernel/sched/
H A Dext.c5936 void *kdata, const void *udata) in bpf_scx_init_member() argument
5939 struct sched_ext_ops *ops = kdata; in bpf_scx_init_member()
6006 static int bpf_scx_reg(void *kdata, struct bpf_link *link) in bpf_scx_reg() argument
6008 return scx_ops_enable(kdata, link); in bpf_scx_reg()
6011 static void bpf_scx_unreg(void *kdata, struct bpf_link *link) in bpf_scx_unreg() argument
6024 static int bpf_scx_update(void *kdata, void *old_kdata, struct bpf_link *link) in bpf_scx_update() argument
6036 static int bpf_scx_validate(void *kdata) in bpf_scx_validate() argument