Home
last modified time | relevance | path

Searched refs:kdata (Results 1 – 17 of 17) sorted by relevance

/Linux-v5.15/drivers/xen/
Dprivcmd.c618 struct privcmd_dm_op kdata; in privcmd_ioctl_dm_op() local
627 if (copy_from_user(&kdata, udata, sizeof(kdata))) in privcmd_ioctl_dm_op()
631 if (data->domid != DOMID_INVALID && data->domid != kdata.dom) in privcmd_ioctl_dm_op()
634 if (kdata.num == 0) in privcmd_ioctl_dm_op()
637 if (kdata.num > privcmd_dm_op_max_num) in privcmd_ioctl_dm_op()
640 kbufs = kcalloc(kdata.num, sizeof(*kbufs), GFP_KERNEL); in privcmd_ioctl_dm_op()
644 if (copy_from_user(kbufs, kdata.ubufs, in privcmd_ioctl_dm_op()
645 sizeof(*kbufs) * kdata.num)) { in privcmd_ioctl_dm_op()
650 for (i = 0; i < kdata.num; i++) { in privcmd_ioctl_dm_op()
673 xbufs = kcalloc(kdata.num, sizeof(*xbufs), GFP_KERNEL); in privcmd_ioctl_dm_op()
[all …]
/Linux-v5.15/kernel/trace/
Dtrace_hwlat.c165 struct hwlat_kthread_data *kdata = get_cpu_data(); in trace_hwlat_callback() local
167 if (!kdata->kthread) in trace_hwlat_callback()
176 kdata->nmi_ts_start = time_get(); in trace_hwlat_callback()
178 kdata->nmi_total_ts += time_get() - kdata->nmi_ts_start; in trace_hwlat_callback()
182 kdata->nmi_count++; in trace_hwlat_callback()
203 struct hwlat_kthread_data *kdata = get_cpu_data(); in get_sample() local
216 kdata->nmi_total_ts = 0; in get_sample()
217 kdata->nmi_count = 0; in get_sample()
287 if (kdata->nmi_total_ts) in get_sample()
288 do_div(kdata->nmi_total_ts, NSEC_PER_USEC); in get_sample()
[all …]
/Linux-v5.15/kernel/
Dcapability.c167 struct __user_cap_data_struct kdata[_KERNEL_CAPABILITY_U32S]; in SYSCALL_DEFINE2() local
171 kdata[i].effective = pE.cap[i]; in SYSCALL_DEFINE2()
172 kdata[i].permitted = pP.cap[i]; in SYSCALL_DEFINE2()
173 kdata[i].inheritable = pI.cap[i]; in SYSCALL_DEFINE2()
195 if (copy_to_user(dataptr, kdata, tocopy in SYSCALL_DEFINE2()
224 struct __user_cap_data_struct kdata[_KERNEL_CAPABILITY_U32S]; in SYSCALL_DEFINE2() local
243 if (copybytes > sizeof(kdata)) in SYSCALL_DEFINE2()
246 if (copy_from_user(&kdata, data, copybytes)) in SYSCALL_DEFINE2()
250 effective.cap[i] = kdata[i].effective; in SYSCALL_DEFINE2()
251 permitted.cap[i] = kdata[i].permitted; in SYSCALL_DEFINE2()
[all …]
/Linux-v5.15/drivers/dma-buf/
Ddma-heap.c129 char *kdata = stack_kdata; in dma_heap_ioctl() local
153 kdata = kmalloc(ksize, GFP_KERNEL); in dma_heap_ioctl()
154 if (!kdata) in dma_heap_ioctl()
158 if (copy_from_user(kdata, (void __user *)arg, in_size) != 0) { in dma_heap_ioctl()
165 memset(kdata + in_size, 0, ksize - in_size); in dma_heap_ioctl()
169 ret = dma_heap_ioctl_allocate(file, kdata); in dma_heap_ioctl()
176 if (copy_to_user((void __user *)arg, kdata, out_size) != 0) in dma_heap_ioctl()
179 if (kdata != stack_kdata) in dma_heap_ioctl()
180 kfree(kdata); in dma_heap_ioctl()
/Linux-v5.15/drivers/gpu/drm/
Ddrm_ioctl.c778 long drm_ioctl_kernel(struct file *file, drm_ioctl_t *func, void *kdata, in drm_ioctl_kernel() argument
795 retcode = func(dev, kdata, file_priv); in drm_ioctl_kernel()
798 retcode = func(dev, kdata, file_priv); in drm_ioctl_kernel()
828 char *kdata = NULL; in drm_ioctl() local
881 kdata = stack_kdata; in drm_ioctl()
883 kdata = kmalloc(ksize, GFP_KERNEL); in drm_ioctl()
884 if (!kdata) { in drm_ioctl()
890 if (copy_from_user(kdata, (void __user *)arg, in_size) != 0) { in drm_ioctl()
896 memset(kdata + in_size, 0, ksize - in_size); in drm_ioctl()
898 retcode = drm_ioctl_kernel(filp, func, kdata, ioctl->flags); in drm_ioctl()
[all …]
/Linux-v5.15/kernel/bpf/
Dbpf_struct_ops.c324 void *udata, *kdata; in bpf_struct_ops_map_update_elem() local
364 kdata = &kvalue->data; in bpf_struct_ops_map_update_elem()
378 *(void **)(kdata + moff) = BPF_MODULE_OWNER; in bpf_struct_ops_map_update_elem()
382 err = st_ops->init_member(t, member, kdata, udata); in bpf_struct_ops_map_update_elem()
444 *(void **)(kdata + moff) = image; in bpf_struct_ops_map_update_elem()
456 err = st_ops->reg(kdata); in bpf_struct_ops_map_update_elem()
620 bool bpf_struct_ops_get(const void *kdata) in bpf_struct_ops_get() argument
624 kvalue = container_of(kdata, struct bpf_struct_ops_value, data); in bpf_struct_ops_get()
637 void bpf_struct_ops_put(const void *kdata) in bpf_struct_ops_put() argument
641 kvalue = container_of(kdata, struct bpf_struct_ops_value, data); in bpf_struct_ops_put()
/Linux-v5.15/net/ipv4/
Dbpf_tcp_ca.c272 void *kdata, const void *udata) in bpf_tcp_ca_init_member() argument
280 tcp_ca = (struct tcp_congestion_ops *)kdata; in bpf_tcp_ca_init_member()
317 static int bpf_tcp_ca_reg(void *kdata) in bpf_tcp_ca_reg() argument
319 return tcp_register_congestion_control(kdata); in bpf_tcp_ca_reg()
322 static void bpf_tcp_ca_unreg(void *kdata) in bpf_tcp_ca_unreg() argument
324 tcp_unregister_congestion_control(kdata); in bpf_tcp_ca_unreg()
/Linux-v5.15/drivers/gpu/drm/radeon/
Dradeon_cs.c109 r = (struct drm_radeon_cs_reloc *)&chunk->kdata[i*4]; in radeon_cs_parser_relocs()
350 p->chunks[i].kdata = kvmalloc_array(size, sizeof(uint32_t), GFP_KERNEL); in radeon_cs_parser_init()
352 if (p->chunks[i].kdata == NULL) { in radeon_cs_parser_init()
355 if (copy_from_user(p->chunks[i].kdata, cdata, size)) { in radeon_cs_parser_init()
359 p->cs_flags = p->chunks[i].kdata[0]; in radeon_cs_parser_init()
361 ring = p->chunks[i].kdata[1]; in radeon_cs_parser_init()
363 priority = (s32)p->chunks[i].kdata[2]; in radeon_cs_parser_init()
454 kvfree(parser->chunks[i].kdata); in radeon_cs_parser_fini()
654 if (ib_chunk->kdata) in radeon_cs_ib_fill()
655 memcpy(parser->ib.ptr, ib_chunk->kdata, ib_chunk->length_dw * 4); in radeon_cs_ib_fill()
[all …]
Dradeon.h1052 uint32_t *kdata; member
1092 if (ibc->kdata) in radeon_get_ib_value()
1093 return ibc->kdata[idx]; in radeon_get_ib_value()
/Linux-v5.15/arch/arm64/kernel/
Dptrace.c1573 static int compat_ptrace_hbp_get_resource_info(u32 *kdata) in compat_ptrace_hbp_get_resource_info() argument
1591 *kdata = reg; in compat_ptrace_hbp_get_resource_info()
1598 u32 *kdata) in compat_ptrace_hbp_get() argument
1607 *kdata = (u32)addr; in compat_ptrace_hbp_get()
1610 *kdata = ctrl; in compat_ptrace_hbp_get()
1619 u32 *kdata) in compat_ptrace_hbp_set() argument
1627 addr = *kdata; in compat_ptrace_hbp_set()
1630 ctrl = *kdata; in compat_ptrace_hbp_set()
1641 u32 kdata; in compat_ptrace_gethbpregs() local
1645 ret = compat_ptrace_hbp_get(NT_ARM_HW_WATCH, tsk, num, &kdata); in compat_ptrace_gethbpregs()
[all …]
/Linux-v5.15/drivers/misc/habanalabs/common/
Dhabanalabs_ioctl.c655 char *kdata = NULL; in _hl_ioctl() local
685 kdata = stack_kdata; in _hl_ioctl()
687 kdata = kzalloc(asize, GFP_KERNEL); in _hl_ioctl()
688 if (!kdata) { in _hl_ioctl()
696 if (copy_from_user(kdata, (void __user *)arg, usize)) { in _hl_ioctl()
701 memset(kdata, 0, usize); in _hl_ioctl()
704 retcode = func(hpriv, kdata); in _hl_ioctl()
706 if ((cmd & IOC_OUT) && copy_to_user((void __user *)arg, kdata, usize)) in _hl_ioctl()
714 if (kdata != stack_kdata) in _hl_ioctl()
715 kfree(kdata); in _hl_ioctl()
/Linux-v5.15/drivers/gpu/drm/amd/amdgpu/
Damdgpu_cs.c172 p->chunks[i].kdata = kvmalloc_array(size, sizeof(uint32_t), GFP_KERNEL); in amdgpu_cs_parser_init()
173 if (p->chunks[i].kdata == NULL) { in amdgpu_cs_parser_init()
179 if (copy_from_user(p->chunks[i].kdata, cdata, size)) { in amdgpu_cs_parser_init()
196 ret = amdgpu_cs_user_fence_chunk(p, p->chunks[i].kdata, in amdgpu_cs_parser_init()
210 ret = amdgpu_cs_bo_handles_chunk(p, p->chunks[i].kdata); in amdgpu_cs_parser_init()
252 kvfree(p->chunks[i].kdata); in amdgpu_cs_parser_init()
711 kvfree(parser->chunks[i].kdata); in amdgpu_cs_parser_fini()
748 chunk_ib = chunk->kdata; in amdgpu_cs_vm_handling()
888 chunk_ib = (struct drm_amdgpu_cs_chunk_ib *)chunk->kdata; in amdgpu_cs_ib_fill()
959 deps = (struct drm_amdgpu_cs_chunk_dep *)chunk->kdata; in amdgpu_cs_process_fence_dep()
[all …]
Damdgpu.h516 void *kdata; member
/Linux-v5.15/drivers/net/ethernet/netronome/nfp/flower/
Dconntrack.c552 u8 *key, *msk, *kdata, *mdata; in nfp_fl_ct_add_offload() local
591 kdata = flow_pay->unmasked_data; in nfp_fl_ct_add_offload()
595 key = kdata + offset; in nfp_fl_ct_add_offload()
603 key = kdata + offset; in nfp_fl_ct_add_offload()
616 key = kdata + offset; in nfp_fl_ct_add_offload()
639 key = kdata + offset; in nfp_fl_ct_add_offload()
649 key = kdata + offset; in nfp_fl_ct_add_offload()
665 key = kdata + offset; in nfp_fl_ct_add_offload()
676 key = kdata + offset; in nfp_fl_ct_add_offload()
687 key = kdata + offset; in nfp_fl_ct_add_offload()
[all …]
/Linux-v5.15/drivers/gpu/drm/amd/amdkfd/
Dkfd_chardev.c1940 char *kdata = NULL; in kfd_ioctl() local
1985 kdata = stack_kdata; in kfd_ioctl()
1987 kdata = kmalloc(asize, GFP_KERNEL); in kfd_ioctl()
1988 if (!kdata) { in kfd_ioctl()
1994 memset(kdata + usize, 0, asize - usize); in kfd_ioctl()
1998 if (copy_from_user(kdata, (void __user *)arg, usize) != 0) { in kfd_ioctl()
2003 memset(kdata, 0, usize); in kfd_ioctl()
2006 retcode = func(filep, process, kdata); in kfd_ioctl()
2009 if (copy_to_user((void __user *)arg, kdata, usize) != 0) in kfd_ioctl()
2017 if (kdata != stack_kdata) in kfd_ioctl()
[all …]
/Linux-v5.15/include/linux/
Dbpf.h982 void *kdata, const void *udata);
983 int (*reg)(void *kdata);
984 void (*unreg)(void *kdata);
997 bool bpf_struct_ops_get(const void *kdata);
998 void bpf_struct_ops_put(const void *kdata);
/Linux-v5.15/tools/power/pm-graph/
Dsleepgraph.py566 def defaultKprobe(self, name, kdata): argument
567 k = kdata