/linux-6.1.9/drivers/gpu/drm/i915/ |
D | i915_query.c | 25 if (copy_from_user(query_hdr, u64_to_user_ptr(query_item->data_ptr), in copy_query_item() 68 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr), in fill_topology_info() 72 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + sizeof(topo)), in fill_topology_info() 76 if (intel_sseu_copy_ssmask_to_user(u64_to_user_ptr(query_item->data_ptr + in fill_topology_info() 81 if (intel_sseu_copy_eumask_to_user(u64_to_user_ptr(query_item->data_ptr + in fill_topology_info() 132 u64_to_user_ptr(query_item->data_ptr); in query_engine_info() 200 u32 __user *p = u64_to_user_ptr(user_regs_ptr); in copy_perf_config_registers_or_number() 230 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_data() 232 u64_to_user_ptr(query_item->data_ptr + in query_perf_config_data() 375 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_list() [all …]
|
D | i915_user_extensions.c | 57 ext = u64_to_user_ptr(next); in i915_user_extensions()
|
D | i915_gem.c | 253 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_shmem_pread() 401 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_gtt_pread() 467 if (!access_ok(u64_to_user_ptr(args->data_ptr), in i915_gem_pread_ioctl() 574 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_gtt_pwrite_fast() 685 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_shmem_pwrite() 741 if (!access_ok(u64_to_user_ptr(args->data_ptr), args->size)) in i915_gem_pwrite_ioctl()
|
/linux-6.1.9/io_uring/ |
D | fs.c | 61 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_renameat_prep() 62 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_renameat_prep() 120 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_unlinkat_prep() 167 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_mkdirat_prep() 209 oldpath = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_symlinkat_prep() 210 newpath = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_symlinkat_prep() 253 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_linkat_prep() 254 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_linkat_prep()
|
D | xattr.c | 56 name = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_getxattr_prep() 57 ix->ctx.cvalue = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in __io_getxattr_prep() 96 path = u64_to_user_ptr(READ_ONCE(sqe->addr3)); in io_getxattr_prep() 162 name = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_setxattr_prep() 163 ix->ctx.cvalue = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in __io_setxattr_prep() 192 path = u64_to_user_ptr(READ_ONCE(sqe->addr3)); in io_setxattr_prep()
|
D | statx.c | 35 path = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_statx_prep() 36 sx->buffer = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_statx_prep()
|
D | rsrc.c | 104 dst->iov_base = u64_to_user_ptr((u64)ciov.iov_base); in io_copy_iov() 437 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_files_update() 438 __s32 __user *fds = u64_to_user_ptr(up->data); in __io_sqe_files_update() 517 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_buffers_update() 518 struct iovec iov, __user *iovs = u64_to_user_ptr(up->data); in __io_sqe_buffers_update() 645 return io_sqe_files_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc() 646 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc() 650 return io_sqe_buffers_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc() 651 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc() 677 __s32 __user *fds = u64_to_user_ptr(up->arg); in io_files_update_with_index_alloc()
|
D | epoll.c | 42 ev = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_epoll_ctl_prep()
|
D | openclose.c | 50 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_openat_prep() 84 how = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_openat2_prep()
|
D | kbuf.c | 123 return u64_to_user_ptr(kbuf->addr); in io_provided_buffer_select() 168 return u64_to_user_ptr(buf->addr); in io_ring_buffer_select() 342 if (!access_ok(u64_to_user_ptr(p->addr), size)) in io_provide_buffers_prep()
|
D | net.c | 246 sr->addr = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_sendmsg_prep() 252 sr->umsg = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_sendmsg_prep() 551 sr->umsg = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_recvmsg_prep() 978 zc->addr = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_send_zc_prep() 987 zc->buf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_send_zc_prep() 1253 accept->addr = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_accept_prep() 1254 accept->addr_len = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_accept_prep() 1403 conn->addr = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_connect_prep()
|
/linux-6.1.9/net/bpf/ |
D | bpf_dummy_struct_ops.c | 36 ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in dummy_ops_init_args() 41 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_init_args() 56 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_copy_args()
|
/linux-6.1.9/drivers/gpu/drm/ |
D | drm_syncobj.c | 1172 u64_to_user_ptr(timeline_wait->points), in drm_syncobj_array_wait() 1259 u64_to_user_ptr(args->handles), in drm_syncobj_wait_ioctl() 1293 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_wait_ioctl() 1327 u64_to_user_ptr(args->handles), in drm_syncobj_reset_ioctl() 1360 u64_to_user_ptr(args->handles), in drm_syncobj_signal_ioctl() 1398 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_signal_ioctl() 1410 if (!u64_to_user_ptr(args->points)) { in drm_syncobj_timeline_signal_ioctl() 1412 } else if (copy_from_user(points, u64_to_user_ptr(args->points), in drm_syncobj_timeline_signal_ioctl() 1455 uint64_t __user *points = u64_to_user_ptr(args->points); in drm_syncobj_query_ioctl() 1469 u64_to_user_ptr(args->handles), in drm_syncobj_query_ioctl()
|
D | drm_property.c | 480 values_ptr = u64_to_user_ptr(out_resp->values_ptr); in drm_mode_getproperty_ioctl() 491 enum_ptr = u64_to_user_ptr(out_resp->enum_blob_ptr); in drm_mode_getproperty_ioctl() 769 if (copy_to_user(u64_to_user_ptr(out_resp->data), in drm_mode_getblob_ioctl() 798 u64_to_user_ptr(out_resp->data), in drm_mode_createblob_ioctl()
|
D | drm_mode_config.c | 111 fb_id = u64_to_user_ptr(card_res->fb_id_ptr); in drm_mode_getresources() 129 crtc_id = u64_to_user_ptr(card_res->crtc_id_ptr); in drm_mode_getresources() 141 encoder_id = u64_to_user_ptr(card_res->encoder_id_ptr); in drm_mode_getresources() 152 connector_id = u64_to_user_ptr(card_res->connector_id_ptr); in drm_mode_getresources()
|
/linux-6.1.9/drivers/gpu/drm/qxl/ |
D | qxl_ioctl.c | 166 if (!access_ok(u64_to_user_ptr(cmd->command), in qxl_process_single_command() 188 u64_to_user_ptr(cmd->command), cmd->command_size); in qxl_process_single_command() 207 struct drm_qxl_reloc __user *u = u64_to_user_ptr(cmd->relocs); in qxl_process_single_command() 284 u64_to_user_ptr(execbuffer->commands); in qxl_execbuffer_ioctl()
|
/linux-6.1.9/drivers/infiniband/core/ |
D | uverbs_ioctl.c | 141 return ib_is_buffer_cleared(u64_to_user_ptr(uattr->data) + len, in uverbs_is_attr_cleared() 201 ret = copy_from_user(idr_vals, u64_to_user_ptr(uattr->data), in uverbs_process_idrs_array() 297 if (copy_from_user(p, u64_to_user_ptr(uattr->data), in uverbs_process_attr() 716 udata->inbuf = u64_to_user_ptr(in->ptr_attr.data); in uverbs_fill_udata() 723 udata->outbuf = u64_to_user_ptr(out->ptr_attr.data); in uverbs_fill_udata() 741 if (copy_to_user(u64_to_user_ptr(attr->ptr_attr.data), from, min_size)) in uverbs_copy_to() 819 if (clear_user(u64_to_user_ptr(attr->ptr_attr.data) + size, in uverbs_copy_to_struct_or_zero()
|
D | ucma.c | 402 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_get_event() 472 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_create_id() 618 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_destroy_id() 862 if (copy_to_user(u64_to_user_ptr(cmd.response), &resp, in ucma_query_route() 1008 response = u64_to_user_ptr(cmd.response); in ucma_query() 1245 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_init_qp_attr() 1397 optval = memdup_user(u64_to_user_ptr(cmd.optval), in ucma_set_option() 1493 if (copy_to_user(u64_to_user_ptr(cmd->response), in ucma_process_join() 1603 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_leave_multicast() 1675 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_migrate_id()
|
/linux-6.1.9/drivers/gpu/drm/virtio/ |
D | virtgpu_ioctl.c | 193 user_bo_handles = u64_to_user_ptr(exbuf->bo_handles); in virtio_gpu_execbuffer_ioctl() 210 buf = vmemdup_user(u64_to_user_ptr(exbuf->command), exbuf->size); in virtio_gpu_execbuffer_ioctl() 298 if (copy_to_user(u64_to_user_ptr(param->value), &value, sizeof(int))) in virtio_gpu_getparam_ioctl() 608 if (copy_to_user(u64_to_user_ptr(args->addr), ptr, size)) in virtio_gpu_get_caps_ioctl() 695 buf = memdup_user(u64_to_user_ptr(rc_blob->cmd), in virtio_gpu_resource_create_blob_ioctl() 771 ctx_set_params = memdup_user(u64_to_user_ptr(args->ctx_set_params), in virtio_gpu_context_init_ioctl()
|
/linux-6.1.9/kernel/bpf/ |
D | syscall.c | 1310 void __user *ukey = u64_to_user_ptr(attr->key); in map_lookup_elem() 1311 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_elem() 1496 void __user *ukey = u64_to_user_ptr(attr->key); in map_get_next_key() 1497 void __user *unext_key = u64_to_user_ptr(attr->next_key); in map_get_next_key() 1562 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_delete_batch() 1616 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_update_batch() 1617 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_update_batch() 1679 void __user *uobatch = u64_to_user_ptr(attr->batch.out_batch); in generic_map_lookup_batch() 1680 void __user *ubatch = u64_to_user_ptr(attr->batch.in_batch); in generic_map_lookup_batch() 1681 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_lookup_batch() [all …]
|
/linux-6.1.9/drivers/gpu/drm/i915/gem/ |
D | i915_gem_phys.c | 144 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pwrite_phys() 175 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pread_phys()
|
/linux-6.1.9/drivers/gpu/drm/etnaviv/ |
D | etnaviv_gem_submit.c | 478 ret = copy_from_user(bos, u64_to_user_ptr(args->bos), in etnaviv_ioctl_gem_submit() 485 ret = copy_from_user(relocs, u64_to_user_ptr(args->relocs), in etnaviv_ioctl_gem_submit() 492 ret = copy_from_user(pmrs, u64_to_user_ptr(args->pmrs), in etnaviv_ioctl_gem_submit() 499 ret = copy_from_user(stream, u64_to_user_ptr(args->stream), in etnaviv_ioctl_gem_submit()
|
/linux-6.1.9/drivers/gpu/drm/msm/ |
D | msm_gem_submit.c | 101 u64_to_user_ptr(args->bos + (i * sizeof(submit_bo))); in submit_lookup_objects() 170 u64_to_user_ptr(args->cmds + (i * sizeof(submit_cmd))); in submit_lookup_cmds() 202 userptr = u64_to_user_ptr(submit_cmd.relocs); in submit_lookup_cmds() 557 u64_to_user_ptr(address), in msm_parse_deps() 635 u64_to_user_ptr(address), in msm_parse_post_deps()
|
/linux-6.1.9/include/linux/ |
D | bpfptr.h | 31 return USER_BPFPTR(u64_to_user_ptr(addr)); in make_bpfptr()
|
/linux-6.1.9/fs/verity/ |
D | enable.c | 218 copy_from_user(desc->salt, u64_to_user_ptr(arg->salt_ptr), in enable_verity() 227 copy_from_user(desc->signature, u64_to_user_ptr(arg->sig_ptr), in enable_verity()
|