| /OK3568_Linux_fs/kernel/drivers/gpu/drm/i915/ |
| H A D | i915_query.c | 24 if (copy_from_user(query_hdr, u64_to_user_ptr(query_item->data_ptr), in copy_query_item() 71 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr), in query_topology_info() 75 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + sizeof(topo)), in query_topology_info() 79 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + in query_topology_info() 84 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr + in query_topology_info() 98 u64_to_user_ptr(query_item->data_ptr); in query_engine_info() 164 u32 __user *p = u64_to_user_ptr(user_regs_ptr); in copy_perf_config_registers_or_number() 194 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_data() 196 u64_to_user_ptr(query_item->data_ptr + in query_perf_config_data() 339 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_list() [all …]
|
| H A D | i915_gem.c | 332 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_shmem_pread() 424 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_gtt_pread() 490 if (!access_ok(u64_to_user_ptr(args->data_ptr), in i915_gem_pread_ioctl() 631 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_gtt_pwrite_fast() 752 user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_shmem_pwrite() 795 if (!access_ok(u64_to_user_ptr(args->data_ptr), args->size)) in i915_gem_pwrite_ioctl()
|
| H A D | i915_user_extensions.c | 57 ext = u64_to_user_ptr(next); in i915_user_extensions()
|
| /OK3568_Linux_fs/kernel/kernel/bpf/ |
| H A D | syscall.c | 1018 void __user *ukey = u64_to_user_ptr(attr->key); in map_lookup_elem() 1019 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_elem() 1085 void __user *ukey = u64_to_user_ptr(attr->key); in map_update_elem() 1086 void __user *uvalue = u64_to_user_ptr(attr->value); in map_update_elem() 1152 void __user *ukey = u64_to_user_ptr(attr->key); in map_delete_elem() 1207 void __user *ukey = u64_to_user_ptr(attr->key); in map_get_next_key() 1208 void __user *unext_key = u64_to_user_ptr(attr->next_key); in map_get_next_key() 1273 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_delete_batch() 1326 void __user *values = u64_to_user_ptr(attr->batch.values); in generic_map_update_batch() 1327 void __user *keys = u64_to_user_ptr(attr->batch.keys); in generic_map_update_batch() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/ |
| H A D | drm_syncobj.c | 1140 u64_to_user_ptr(timeline_wait->points), in drm_syncobj_array_wait() 1227 u64_to_user_ptr(args->handles), in drm_syncobj_wait_ioctl() 1261 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_wait_ioctl() 1295 u64_to_user_ptr(args->handles), in drm_syncobj_reset_ioctl() 1328 u64_to_user_ptr(args->handles), in drm_syncobj_signal_ioctl() 1363 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_signal_ioctl() 1375 if (!u64_to_user_ptr(args->points)) { in drm_syncobj_timeline_signal_ioctl() 1377 } else if (copy_from_user(points, u64_to_user_ptr(args->points), in drm_syncobj_timeline_signal_ioctl() 1420 uint64_t __user *points = u64_to_user_ptr(args->points); in drm_syncobj_query_ioctl() 1434 u64_to_user_ptr(args->handles), in drm_syncobj_query_ioctl()
|
| H A D | drm_mode_config.c | 110 fb_id = u64_to_user_ptr(card_res->fb_id_ptr); in drm_mode_getresources() 128 crtc_id = u64_to_user_ptr(card_res->crtc_id_ptr); in drm_mode_getresources() 140 encoder_id = u64_to_user_ptr(card_res->encoder_id_ptr); in drm_mode_getresources() 151 connector_id = u64_to_user_ptr(card_res->connector_id_ptr); in drm_mode_getresources()
|
| H A D | drm_property.c | 483 values_ptr = u64_to_user_ptr(out_resp->values_ptr); in drm_mode_getproperty_ioctl() 494 enum_ptr = u64_to_user_ptr(out_resp->enum_blob_ptr); in drm_mode_getproperty_ioctl() 772 if (copy_to_user(u64_to_user_ptr(out_resp->data), in drm_mode_getblob_ioctl() 801 u64_to_user_ptr(out_resp->data), in drm_mode_createblob_ioctl()
|
| /OK3568_Linux_fs/kernel/drivers/infiniband/core/ |
| H A D | uverbs_ioctl.c | 141 return ib_is_buffer_cleared(u64_to_user_ptr(uattr->data) + len, in uverbs_is_attr_cleared() 201 ret = copy_from_user(idr_vals, u64_to_user_ptr(uattr->data), in uverbs_process_idrs_array() 297 if (copy_from_user(p, u64_to_user_ptr(uattr->data), in uverbs_process_attr() 708 udata->inbuf = u64_to_user_ptr(in->ptr_attr.data); in uverbs_fill_udata() 715 udata->outbuf = u64_to_user_ptr(out->ptr_attr.data); in uverbs_fill_udata() 733 if (copy_to_user(u64_to_user_ptr(attr->ptr_attr.data), from, min_size)) in uverbs_copy_to() 787 if (clear_user(u64_to_user_ptr(attr->ptr_attr.data) + size, in uverbs_copy_to_struct_or_zero()
|
| H A D | ucma.c | 402 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_get_event() 472 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_create_id() 618 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_destroy_id() 862 if (copy_to_user(u64_to_user_ptr(cmd.response), &resp, in ucma_query_route() 1008 response = u64_to_user_ptr(cmd.response); in ucma_query() 1245 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_init_qp_attr() 1397 optval = memdup_user(u64_to_user_ptr(cmd.optval), in ucma_set_option() 1493 if (copy_to_user(u64_to_user_ptr(cmd->response), in ucma_process_join() 1603 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_leave_multicast() 1675 if (copy_to_user(u64_to_user_ptr(cmd.response), in ucma_migrate_id()
|
| H A D | uverbs_main.c | 525 if (!access_ok(u64_to_user_ptr(ex_hdr->response), in verify_hdr() 642 u64_to_user_ptr(response + in ib_uverbs_write() 653 &bundle.ucore, buf, u64_to_user_ptr(response), in ib_uverbs_write() 659 u64_to_user_ptr(ex_hdr.response), in ib_uverbs_write() 664 u64_to_user_ptr(ex_hdr.response) + bundle.ucore.outlen, in ib_uverbs_write()
|
| /OK3568_Linux_fs/kernel/fs/incfs/ |
| H A D | pseudo_files.c | 495 file_name = strndup_user(u64_to_user_ptr(args.file_name), PATH_MAX); in ioctl_create_file() 519 u64_to_user_ptr(args.directory_path), in ioctl_create_file() 641 u64_to_user_ptr(args.file_attr), in ioctl_create_file() 659 u64_to_user_ptr(args.signature_info), in ioctl_create_file() 679 notify_create(file, u64_to_user_ptr(args.directory_path), file_name, in ioctl_create_file() 772 file_name = strndup_user(u64_to_user_ptr(args.file_name), PATH_MAX); in ioctl_create_mapped_file() 835 u64_to_user_ptr(args.directory_path), in ioctl_create_mapped_file() 891 notify_create(file, u64_to_user_ptr(args.directory_path), file_name, in ioctl_create_mapped_file() 936 if (copy_to_user(u64_to_user_ptr(args.timeouts_array), buffer, in ioctl_get_read_timeouts() 970 if (copy_from_user(buffer, u64_to_user_ptr(args.timeouts_array), in ioctl_set_read_timeouts()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/qxl/ |
| H A D | qxl_ioctl.c | 169 if (!access_ok(u64_to_user_ptr(cmd->command), in qxl_process_single_command() 191 u64_to_user_ptr(cmd->command), cmd->command_size); in qxl_process_single_command() 210 struct drm_qxl_reloc __user *u = u64_to_user_ptr(cmd->relocs); in qxl_process_single_command() 288 u64_to_user_ptr(execbuffer->commands); in qxl_execbuffer_ioctl()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/virtio/ |
| H A D | virtgpu_ioctl.c | 133 user_bo_handles = u64_to_user_ptr(exbuf->bo_handles); in virtio_gpu_execbuffer_ioctl() 150 buf = vmemdup_user(u64_to_user_ptr(exbuf->command), exbuf->size); in virtio_gpu_execbuffer_ioctl() 219 if (copy_to_user(u64_to_user_ptr(param->value), &value, sizeof(int))) in virtio_gpu_getparam_ioctl() 490 if (copy_to_user(u64_to_user_ptr(args->addr), ptr, size)) in virtio_gpu_get_caps_ioctl()
|
| /OK3568_Linux_fs/kernel/net/bpf/ |
| H A D | test_run.c | 133 void __user *data_out = u64_to_user_ptr(kattr->test.data_out); in bpf_test_finish() 224 void __user *data_in = u64_to_user_ptr(kattr->test.data_in); in bpf_test_init() 311 void __user *ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_prog_test_run_raw_tp() 372 void __user *data_in = u64_to_user_ptr(kattr->test.ctx_in); in bpf_ctx_init() 373 void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out); in bpf_ctx_init() 405 void __user *data_out = u64_to_user_ptr(kattr->test.ctx_out); in bpf_ctx_finish()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/msm/ |
| H A D | msm_gem_submit.c | 82 u64_to_user_ptr(args->bos + (i * sizeof(submit_bo))); in submit_lookup_objects() 332 u64_to_user_ptr(relocs + (i * sizeof(submit_reloc))); in submit_reloc() 425 u64_to_user_ptr(address), in msm_wait_deps() 506 u64_to_user_ptr(address), in msm_parse_post_deps() 715 u64_to_user_ptr(args->cmds + (i * sizeof(submit_cmd))); in msm_ioctl_gem_submit()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/etnaviv/ |
| H A D | etnaviv_gem_submit.c | 491 ret = copy_from_user(bos, u64_to_user_ptr(args->bos), in etnaviv_ioctl_gem_submit() 498 ret = copy_from_user(relocs, u64_to_user_ptr(args->relocs), in etnaviv_ioctl_gem_submit() 505 ret = copy_from_user(pmrs, u64_to_user_ptr(args->pmrs), in etnaviv_ioctl_gem_submit() 512 ret = copy_from_user(stream, u64_to_user_ptr(args->stream), in etnaviv_ioctl_gem_submit()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/i915/gem/ |
| H A D | i915_gem_phys.c | 142 char __user *user_data = u64_to_user_ptr(args->data_ptr); in phys_pwrite() 173 char __user *user_data = u64_to_user_ptr(args->data_ptr); in phys_pread()
|
| H A D | i915_gem_execbuffer.c | 1677 u64_to_user_ptr(entry->relocs_ptr); in eb_relocate_vma() 1791 addr = u64_to_user_ptr(entry->relocs_ptr); in check_relocations() 1825 urelocs = u64_to_user_ptr(eb->exec[i].relocs_ptr); in eb_copy_relocations() 2701 user_fences = u64_to_user_ptr(timeline_fences->handles_ptr); in add_timeline_fence_array() 2705 user_values = u64_to_user_ptr(timeline_fences->values_ptr); in add_timeline_fence_array() 2831 user = u64_to_user_ptr(args->cliprects_ptr); in add_fence_array() 3018 return i915_user_extensions(u64_to_user_ptr(args->cliprects_ptr), in parse_execbuf2_extensions() 3318 u64_to_user_ptr(args->buffers_ptr), in i915_gem_execbuffer_ioctl() 3343 u64_to_user_ptr(args->buffers_ptr); in i915_gem_execbuffer_ioctl() 3393 u64_to_user_ptr(args->buffers_ptr), in i915_gem_execbuffer2_ioctl() [all …]
|
| H A D | i915_gem_context.c | 1030 err = i915_user_extensions(u64_to_user_ptr(args->extensions), in i915_gem_vm_create_ioctl() 1539 if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value), in set_sseu() 1778 u64_to_user_ptr(args->value); in set_engines() 1853 err = i915_user_extensions(u64_to_user_ptr(extensions), in set_engines() 1947 user = u64_to_user_ptr(args->value); in get_engines() 2381 ret = i915_user_extensions(u64_to_user_ptr(args->extensions), in i915_gem_context_create_ioctl() 2437 if (copy_from_user(&user_sseu, u64_to_user_ptr(args->value), in get_sseu() 2469 if (copy_to_user(u64_to_user_ptr(args->value), &user_sseu, in get_sseu()
|
| /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/cryptodev_linux/ |
| H A D | rk_cryptodev.c | 671 if (unlikely(copy_from_user(key, u64_to_user_ptr(rop->key), rop->key_len))) { in crypto_rsa_run() 682 if (unlikely(copy_from_user(in, u64_to_user_ptr(rop->in), rop->in_len))) { in crypto_rsa_run() 729 if (unlikely(copy_to_user(u64_to_user_ptr(rop->out), out, req->dst_len))) { in crypto_rsa_run() 883 ret = copy_from_user(tag_buf, u64_to_user_ptr((u64)caop->tag), caop->tag_len); in crypto_auth_fd_zc_rk() 907 ret = copy_to_user(u64_to_user_ptr((u64)kcaop->caop.tag), tag_buf, caop->tag_len); in crypto_auth_fd_zc_rk() 1060 ret = copy_from_user(kcaop->iv, u64_to_user_ptr((u64)caop->iv), kcaop->ivlen); in fill_kcaop_fd_from_caop() 1083 ret = copy_to_user(u64_to_user_ptr((u64)kcaop->caop.iv), kcaop->iv, kcaop->ivlen); in fill_caop_fd_from_kcaop()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/csf/ |
| H A D | mali_kbase_csf_kcpu.c | 367 void __user *data = u64_to_user_ptr(alloc_info->info); in kbase_kcpu_jit_allocate_prepare() 548 void __user *data = u64_to_user_ptr(free_info->ids); in kbase_kcpu_jit_free_prepare() 870 if (copy_from_user(objs, u64_to_user_ptr(cqs_wait_info->objs), in kbase_kcpu_cqs_wait_prepare() 979 if (copy_from_user(objs, u64_to_user_ptr(cqs_set_info->objs), in kbase_kcpu_cqs_set_prepare() 1136 if (copy_from_user(objs, u64_to_user_ptr(cqs_wait_operation_info->objs), in kbase_kcpu_cqs_wait_operation_prepare() 1303 if (copy_from_user(objs, u64_to_user_ptr(cqs_set_operation_info->objs), in kbase_kcpu_cqs_set_operation_prepare() 1546 if (copy_from_user(&fence, u64_to_user_ptr(fence_info->fence), sizeof(fence))) in kbase_kcpu_fence_wait_prepare() 1677 if (copy_from_user(&fence, u64_to_user_ptr(fence_info->fence), sizeof(fence))) in kbase_kcpu_fence_signal_prepare() 1684 if (copy_to_user(u64_to_user_ptr(fence_info->fence), &fence, in kbase_kcpu_fence_signal_prepare() 2267 void __user *user_cmds = u64_to_user_ptr(enq->addr); in kbase_csf_kcpu_queue_enqueue()
|
| /OK3568_Linux_fs/kernel/fs/verity/ |
| H A D | enable.c | 219 copy_from_user(desc->salt, u64_to_user_ptr(arg->salt_ptr), in enable_verity() 228 copy_from_user(desc->signature, u64_to_user_ptr(arg->sig_ptr), in enable_verity()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/vc4/ |
| H A D | vc4_gem.c | 130 if (copy_to_user(u64_to_user_ptr(get_state->bo), in vc4_get_hang_state_ioctl() 761 if (copy_from_user(handles, u64_to_user_ptr(args->bo_handles), in vc4_cl_lookup_bos() 869 u64_to_user_ptr(args->bin_cl), in vc4_get_bcl() 876 u64_to_user_ptr(args->shader_rec), in vc4_get_bcl() 883 u64_to_user_ptr(args->uniforms), in vc4_get_bcl()
|
| /OK3568_Linux_fs/kernel/io_uring/ |
| H A D | io_uring.c | 3166 return u64_to_user_ptr(kbuf->addr); in io_rw_buffer_select() 3178 uiov = u64_to_user_ptr(req->rw.addr); in io_compat_import() 3199 struct iovec __user *uiov = u64_to_user_ptr(req->rw.addr); in __io_iov_buffer_select() 3224 iov[0].iov_base = u64_to_user_ptr(kbuf->addr); in io_iov_buffer_select() 3242 void __user *buf = u64_to_user_ptr(req->rw.addr); in io_import_iovec() 3313 iovec.iov_base = u64_to_user_ptr(req->rw.addr); in loop_rw_iter() 3766 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_renameat_prep() 3767 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_renameat_prep() 3823 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_unlinkat_prep() 4090 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_openat_prep() [all …]
|
| /OK3568_Linux_fs/kernel/kernel/trace/ |
| H A D | trace_event_perf.c | 259 func, u64_to_user_ptr(p_event->attr.kprobe_func), in perf_kprobe_init() 312 path = strndup_user(u64_to_user_ptr(p_event->attr.uprobe_path), in perf_uprobe_init()
|