/Linux-v5.15/drivers/gpu/drm/i915/ |
D | i915_query.c | 24 if (copy_from_user(query_hdr, u64_to_user_ptr(query_item->data_ptr), in copy_query_item() 71 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr), in query_topology_info() 75 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + sizeof(topo)), in query_topology_info() 79 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + in query_topology_info() 84 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + in query_topology_info() 98 u64_to_user_ptr(query_item->data_ptr); in query_engine_info() 164 u32 __user *p = u64_to_user_ptr(user_regs_ptr); in copy_perf_config_registers_or_number() 194 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_data() 196 u64_to_user_ptr(query_item->data_ptr + in query_perf_config_data() 339 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_list() [all …]
|
D | i915_user_extensions.c | 57 ext = u64_to_user_ptr(next); in i915_user_extensions()
|
D | i915_gem.c | 237 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_shmem_pread() 385 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_gtt_pread() 451 if (!access_ok(u64_to_user_ptr(args->data_ptr), in i915_gem_pread_ioctl() 558 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_gtt_pwrite_fast() 669 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_shmem_pwrite() 725 if (!access_ok(u64_to_user_ptr(args->data_ptr), args->size)) in i915_gem_pwrite_ioctl()
|
/Linux-v5.15/drivers/gpu/drm/ |
D | drm_syncobj.c | 1153 u64_to_user_ptr(timeline_wait->points), in drm_syncobj_array_wait() 1240 u64_to_user_ptr(args->handles), in drm_syncobj_wait_ioctl() 1274 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_wait_ioctl() 1308 u64_to_user_ptr(args->handles), in drm_syncobj_reset_ioctl() 1341 u64_to_user_ptr(args->handles), in drm_syncobj_signal_ioctl() 1379 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_signal_ioctl() 1391 if (!u64_to_user_ptr(args->points)) { in drm_syncobj_timeline_signal_ioctl() 1393 } else if (copy_from_user(points, u64_to_user_ptr(args->points), in drm_syncobj_timeline_signal_ioctl() 1436 uint64_t __user *points = u64_to_user_ptr(args->points); in drm_syncobj_query_ioctl() 1450 u64_to_user_ptr(args->handles), in drm_syncobj_query_ioctl()
|
D | drm_property.c | 483 values_ptr = u64_to_user_ptr(out_resp->values_ptr); in drm_mode_getproperty_ioctl() 494 enum_ptr = u64_to_user_ptr(out_resp->enum_blob_ptr); in drm_mode_getproperty_ioctl() 772 if (copy_to_user(u64_to_user_ptr(out_resp->data), in drm_mode_getblob_ioctl() 801 u64_to_user_ptr(out_resp->data), in drm_mode_createblob_ioctl()
|
D | drm_mode_config.c | 110 fb_id = u64_to_user_ptr(card_res->fb_id_ptr); in drm_mode_getresources() 128 crtc_id = u64_to_user_ptr(card_res->crtc_id_ptr); in drm_mode_getresources() 140 encoder_id = u64_to_user_ptr(card_res->encoder_id_ptr); in drm_mode_getresources() 151 connector_id = u64_to_user_ptr(card_res->connector_id_ptr); in drm_mode_getresources()
|
/Linux-v5.15/drivers/infiniband/core/ |
D | uverbs_ioctl.c | 141 return ib_is_buffer_cleared(u64_to_user_ptr(uattr->data) + len, in uverbs_is_attr_cleared() 201 ret = copy_from_user(idr_vals, u64_to_user_ptr(uattr->data), in uverbs_process_idrs_array() 297 if (copy_from_user(p, u64_to_user_ptr(uattr->data), in uverbs_process_attr() 708 udata->inbuf = u64_to_user_ptr(in->ptr_attr.data); in uverbs_fill_udata() 715 udata->outbuf = u64_to_user_ptr(out->ptr_attr.data); in uverbs_fill_udata() 733 if (copy_to_user(u64_to_user_ptr(attr->ptr_attr.data), from, min_size)) in uverbs_copy_to() 811 if (clear_user(u64_to_user_ptr(attr->ptr_attr.data) + size, in uverbs_copy_to_struct_or_zero()
|
D | ucma.c | 399 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_get_event() 469 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_create_id() 615 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_destroy_id() 859 if (copy_to_user(u64_to_user_ptr(cmd.response), &resp, in ucma_query_route() 1005 response = u64_to_user_ptr(cmd.response); in ucma_query() 1242 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_init_qp_attr() 1394 optval = memdup_user(u64_to_user_ptr(cmd.optval), in ucma_set_option() 1486 if (copy_to_user(u64_to_user_ptr(cmd->response), in ucma_process_join() 1591 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_leave_multicast() 1663 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_migrate_id()
|
D | uverbs_main.c | 525 if (!access_ok(u64_to_user_ptr(ex_hdr->response), in verify_hdr() 642 u64_to_user_ptr(response + in ib_uverbs_write() 653 &bundle.ucore, buf, u64_to_user_ptr(response), in ib_uverbs_write() 659 u64_to_user_ptr(ex_hdr.response), in ib_uverbs_write() 664 u64_to_user_ptr(ex_hdr.response) + bundle.ucore.outlen, in ib_uverbs_write()
|
/Linux-v5.15/drivers/gpu/drm/qxl/ |
D | qxl_ioctl.c | 168 if (!access_ok(u64_to_user_ptr(cmd->command), in qxl_process_single_command() 190 u64_to_user_ptr(cmd->command), cmd->command_size); in qxl_process_single_command() 209 struct drm_qxl_reloc __user *u = u64_to_user_ptr(cmd->relocs); in qxl_process_single_command() 287 u64_to_user_ptr(execbuffer->commands); in qxl_execbuffer_ioctl()
|
/Linux-v5.15/kernel/bpf/ |
D | syscall.c | 1042 void __user *ukey = u64_to_user_ptr(attr->key); in map_lookup_elem() 1043 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_elem() 1168 void __user *ukey = u64_to_user_ptr(attr->key); in map_delete_elem() 1221 void __user *ukey = u64_to_user_ptr(attr->key); in map_get_next_key() 1222 void __user *unext_key = u64_to_user_ptr(attr->next_key); in map_get_next_key() 1287 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_delete_batch() 1339 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_update_batch() 1340 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_update_batch() 1401 void __user *uobatch = u64_to_user_ptr(attr->batch.out_batch); in generic_map_lookup_batch() 1402 void __user *ubatch = u64_to_user_ptr(attr->batch.in_batch); in generic_map_lookup_batch() [all …]
|
/Linux-v5.15/drivers/gpu/drm/virtio/ |
D | virtgpu_ioctl.c | 137 user_bo_handles = u64_to_user_ptr(exbuf->bo_handles); in virtio_gpu_execbuffer_ioctl() 154 buf = vmemdup_user(u64_to_user_ptr(exbuf->command), exbuf->size); in virtio_gpu_execbuffer_ioctl() 232 if (copy_to_user(u64_to_user_ptr(param->value), &value, sizeof(int))) in virtio_gpu_getparam_ioctl() 530 if (copy_to_user(u64_to_user_ptr(args->addr), ptr, size)) in virtio_gpu_get_caps_ioctl() 618 buf = memdup_user(u64_to_user_ptr(rc_blob->cmd), in virtio_gpu_resource_create_blob_ioctl()
|
/Linux-v5.15/net/bpf/ |
D | test_run.c | 134 void __user *data_out = u64_to_user_ptr(kattr->test.data_out); in bpf_test_finish() 252 void __user *data_in = u64_to_user_ptr(kattr->test.data_in); in bpf_test_init() 338 void __user *ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_prog_test_run_raw_tp() 399 void __user *data_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_ctx_init() 400 void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out); in bpf_ctx_init() 432 void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out); in bpf_ctx_finish() 1027 void __user *ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_prog_test_run_syscall()
|
/Linux-v5.15/drivers/gpu/drm/i915/gem/ |
D | i915_gem_phys.c | 142 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pwrite_phys() 172 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pread_phys()
|
D | i915_gem_create.c | 256 u64_to_user_ptr(args->regions); in set_placements() 382 ret = i915_user_extensions(u64_to_user_ptr(args->extensions), in i915_gem_create_ext_ioctl()
|
D | i915_gem_context.c | 530 u64_to_user_ptr(args->value); in set_proto_ctx_engines() 588 err = i915_user_extensions(u64_to_user_ptr(extensions), in set_proto_ctx_engines() 618 if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value), in set_proto_ctx_sseu() 1537 err = i915_user_extensions(u64_to_user_ptr(args->extensions), in i915_gem_vm_create_ioctl() 1726 if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value), in set_sseu() 1993 ret = i915_user_extensions(u64_to_user_ptr(args->extensions), in i915_gem_context_create_ioctl() 2082 if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value), in get_sseu() 2114 if (copy_to_user(u64_to_user_ptr(args->value), &user_sseu, in get_sseu()
|
/Linux-v5.15/drivers/gpu/drm/etnaviv/ |
D | etnaviv_gem_submit.c | 485 ret = copy_from_user(bos, u64_to_user_ptr(args->bos), in etnaviv_ioctl_gem_submit() 492 ret = copy_from_user(relocs, u64_to_user_ptr(args->relocs), in etnaviv_ioctl_gem_submit() 499 ret = copy_from_user(pmrs, u64_to_user_ptr(args->pmrs), in etnaviv_ioctl_gem_submit() 506 ret = copy_from_user(stream, u64_to_user_ptr(args->stream), in etnaviv_ioctl_gem_submit()
|
/Linux-v5.15/include/linux/ |
D | bpfptr.h | 30 return USER_BPFPTR(u64_to_user_ptr(addr)); in make_bpfptr()
|
/Linux-v5.15/drivers/gpu/drm/msm/ |
D | msm_gem_submit.c | 112 u64_to_user_ptr(args->bos + (i * sizeof(submit_bo))); in submit_lookup_objects() 181 u64_to_user_ptr(args->cmds + (i * sizeof(submit_cmd))); in submit_lookup_cmds() 213 userptr = u64_to_user_ptr(submit_cmd.relocs); in submit_lookup_cmds() 570 u64_to_user_ptr(address), in msm_parse_deps() 648 u64_to_user_ptr(address), in msm_parse_post_deps()
|
/Linux-v5.15/fs/verity/ |
D | enable.c | 219 copy_from_user(desc->salt, u64_to_user_ptr(arg->salt_ptr), in enable_verity() 228 copy_from_user(desc->signature, u64_to_user_ptr(arg->sig_ptr), in enable_verity()
|
/Linux-v5.15/drivers/gpu/drm/tegra/ |
D | submit.c | 190 if (copy_from_user(bo->gather_data, u64_to_user_ptr(args->gather_data_ptr), copy_len)) { in submit_copy_gather_data() 242 bufs = alloc_copy_user_array(u64_to_user_ptr(args->bufs_ptr), args->num_bufs, in submit_process_bufs() 381 cmds = alloc_copy_user_array(u64_to_user_ptr(args->cmds_ptr), args->num_cmds, in submit_create_job()
|
/Linux-v5.15/lib/ |
D | test_hmm.c | 375 if (copy_to_user(u64_to_user_ptr(cmd->ptr), bounce.ptr, in dmirror_read() 428 if (copy_from_user(bounce.ptr, u64_to_user_ptr(cmd->ptr), in dmirror_write() 768 if (copy_to_user(u64_to_user_ptr(cmd->ptr), bounce.ptr, in dmirror_exclusive() 839 if (copy_to_user(u64_to_user_ptr(cmd->ptr), bounce.ptr, in dmirror_migrate() 1002 uptr = u64_to_user_ptr(cmd->ptr); in dmirror_snapshot()
|
/Linux-v5.15/kernel/trace/ |
D | trace_event_perf.c | 259 func, u64_to_user_ptr(p_event->attr.kprobe_func), in perf_kprobe_init() 312 path = strndup_user(u64_to_user_ptr(p_event->attr.uprobe_path), in perf_uprobe_init()
|
/Linux-v5.15/drivers/gpu/drm/vc4/ |
D | vc4_gem.c | 130 if (copy_to_user(u64_to_user_ptr(get_state->bo), in vc4_get_hang_state_ioctl() 761 if (copy_from_user(handles, u64_to_user_ptr(args->bo_handles), in vc4_cl_lookup_bos() 869 u64_to_user_ptr(args->bin_cl), in vc4_get_bcl() 876 u64_to_user_ptr(args->shader_rec), in vc4_get_bcl() 883 u64_to_user_ptr(args->uniforms), in vc4_get_bcl()
|
/Linux-v5.15/fs/ |
D | io_uring.c | 3099 return u64_to_user_ptr(kbuf->addr); in io_rw_buffer_select() 3111 uiov = u64_to_user_ptr(req->rw.addr); in io_compat_import() 3132 struct iovec __user *uiov = u64_to_user_ptr(req->rw.addr); in __io_iov_buffer_select() 3157 iov[0].iov_base = u64_to_user_ptr(kbuf->addr); in io_iov_buffer_select() 3175 void __user *buf = u64_to_user_ptr(req->rw.addr); in io_import_iovec() 3246 iovec.iov_base = u64_to_user_ptr(req->rw.addr); in loop_rw_iter() 3691 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_renameat_prep() 3692 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_renameat_prep() 3748 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_unlinkat_prep() 3794 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_mkdirat_prep() [all …]
|