Lines Matching refs:vbuf

92 	struct virtio_gpu_vbuffer *vbuf;  in virtio_gpu_get_vbuf()  local
94 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL | __GFP_NOFAIL); in virtio_gpu_get_vbuf()
98 vbuf->buf = (void *)vbuf + sizeof(*vbuf); in virtio_gpu_get_vbuf()
99 vbuf->size = size; in virtio_gpu_get_vbuf()
101 vbuf->resp_cb = resp_cb; in virtio_gpu_get_vbuf()
102 vbuf->resp_size = resp_size; in virtio_gpu_get_vbuf()
104 vbuf->resp_buf = (void *)vbuf->buf + size; in virtio_gpu_get_vbuf()
106 vbuf->resp_buf = resp_buf; in virtio_gpu_get_vbuf()
107 BUG_ON(!vbuf->resp_buf); in virtio_gpu_get_vbuf()
108 return vbuf; in virtio_gpu_get_vbuf()
112 virtio_gpu_vbuf_ctrl_hdr(struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_vbuf_ctrl_hdr() argument
118 return (struct virtio_gpu_ctrl_hdr *)vbuf->buf; in virtio_gpu_vbuf_ctrl_hdr()
125 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_alloc_cursor() local
127 vbuf = virtio_gpu_get_vbuf in virtio_gpu_alloc_cursor()
130 if (IS_ERR(vbuf)) { in virtio_gpu_alloc_cursor()
132 return ERR_CAST(vbuf); in virtio_gpu_alloc_cursor()
134 *vbuffer_p = vbuf; in virtio_gpu_alloc_cursor()
135 return (struct virtio_gpu_update_cursor *)vbuf->buf; in virtio_gpu_alloc_cursor()
144 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_alloc_cmd_resp() local
146 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
148 *vbuffer_p = vbuf; in virtio_gpu_alloc_cmd_resp()
149 return (struct virtio_gpu_command *)vbuf->buf; in virtio_gpu_alloc_cmd_resp()
172 struct virtio_gpu_vbuffer *vbuf) in free_vbuf() argument
174 if (vbuf->resp_size > MAX_INLINE_RESP_SIZE) in free_vbuf()
175 kfree(vbuf->resp_buf); in free_vbuf()
176 kvfree(vbuf->data_buf); in free_vbuf()
177 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
182 struct virtio_gpu_vbuffer *vbuf; in reclaim_vbufs() local
186 while ((vbuf = virtqueue_get_buf(vq, &len))) { in reclaim_vbufs()
187 list_add_tail(&vbuf->list, reclaim_list); in reclaim_vbufs()
318 struct virtio_gpu_vbuffer *vbuf, in virtio_gpu_queue_ctrl_sgs() argument
329 if (fence && vbuf->objs) in virtio_gpu_queue_ctrl_sgs()
330 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_ctrl_sgs()
331 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
352 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
354 if (vbuf->objs) { in virtio_gpu_queue_ctrl_sgs()
355 virtio_gpu_array_add_fence(vbuf->objs, &fence->f); in virtio_gpu_queue_ctrl_sgs()
356 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_ctrl_sgs()
360 ret = virtqueue_add_sgs(vq, sgs, outcnt, incnt, vbuf, GFP_ATOMIC); in virtio_gpu_queue_ctrl_sgs()
363 trace_virtio_gpu_cmd_queue(vq, virtio_gpu_vbuf_ctrl_hdr(vbuf)); in virtio_gpu_queue_ctrl_sgs()
374 struct virtio_gpu_vbuffer *vbuf, in virtio_gpu_queue_fenced_ctrl_buffer() argument
382 sg_init_one(&vcmd, vbuf->buf, vbuf->size); in virtio_gpu_queue_fenced_ctrl_buffer()
388 if (vbuf->data_size) { in virtio_gpu_queue_fenced_ctrl_buffer()
389 if (is_vmalloc_addr(vbuf->data_buf)) { in virtio_gpu_queue_fenced_ctrl_buffer()
391 sgt = vmalloc_to_sgt(vbuf->data_buf, vbuf->data_size, in virtio_gpu_queue_fenced_ctrl_buffer()
394 if (fence && vbuf->objs) in virtio_gpu_queue_fenced_ctrl_buffer()
395 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_fenced_ctrl_buffer()
402 sg_init_one(&vout, vbuf->data_buf, vbuf->data_size); in virtio_gpu_queue_fenced_ctrl_buffer()
410 if (vbuf->resp_size) { in virtio_gpu_queue_fenced_ctrl_buffer()
411 sg_init_one(&vresp, vbuf->resp_buf, vbuf->resp_size); in virtio_gpu_queue_fenced_ctrl_buffer()
417 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
444 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_queue_ctrl_buffer() argument
446 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
450 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_queue_cursor() argument
458 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
462 sg_init_one(&ccmd, vbuf->buf, vbuf->size); in virtio_gpu_queue_cursor()
468 ret = virtqueue_add_sgs(vq, sgs, outcnt, 0, vbuf, GFP_ATOMIC); in virtio_gpu_queue_cursor()
476 virtio_gpu_vbuf_ctrl_hdr(vbuf)); in virtio_gpu_queue_cursor()
501 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_create_resource() local
503 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
505 vbuf->objs = objs; in virtio_gpu_cmd_create_resource()
513 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
518 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_unref_cb() argument
522 bo = vbuf->resp_cb_data; in virtio_gpu_cmd_unref_cb()
523 vbuf->resp_cb_data = NULL; in virtio_gpu_cmd_unref_cb()
532 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_unref_resource() local
535 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
542 vbuf->resp_cb_data = bo; in virtio_gpu_cmd_unref_resource()
543 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
554 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_set_scanout() local
556 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
567 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
576 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_flush() local
578 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
588 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_flush()
600 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_to_host_2d() local
608 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
610 vbuf->objs = objs; in virtio_gpu_cmd_transfer_to_host_2d()
620 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
631 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_attach_backing() local
633 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
640 vbuf->data_buf = ents; in virtio_gpu_cmd_resource_attach_backing()
641 vbuf->data_size = sizeof(*ents) * nents; in virtio_gpu_cmd_resource_attach_backing()
643 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
647 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_display_info_cb() argument
650 (struct virtio_gpu_resp_display_info *)vbuf->resp_buf; in virtio_gpu_cmd_get_display_info_cb()
676 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_capset_info_cb() argument
679 (struct virtio_gpu_get_capset_info *)vbuf->buf; in virtio_gpu_cmd_get_capset_info_cb()
681 (struct virtio_gpu_resp_capset_info *)vbuf->resp_buf; in virtio_gpu_cmd_get_capset_info_cb()
697 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_capset_cb() argument
700 (struct virtio_gpu_get_capset *)vbuf->buf; in virtio_gpu_cmd_capset_cb()
702 (struct virtio_gpu_resp_capset *)vbuf->resp_buf; in virtio_gpu_cmd_capset_cb()
734 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_edid_cb() argument
737 (struct virtio_gpu_cmd_get_edid *)vbuf->buf; in virtio_gpu_cmd_get_edid_cb()
739 (struct virtio_gpu_resp_edid *)vbuf->resp_buf; in virtio_gpu_cmd_get_edid_cb()
763 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_display_info() local
772 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
779 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
786 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_capset_info() local
795 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
802 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
811 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_capset() local
870 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
877 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
885 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_edids() local
899 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
904 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
914 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_create() local
916 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
924 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
931 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_destroy() local
933 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
938 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
947 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_attach_resource() local
949 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
951 vbuf->objs = objs; in virtio_gpu_cmd_context_attach_resource()
956 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
965 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_detach_resource() local
967 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
969 vbuf->objs = objs; in virtio_gpu_cmd_context_detach_resource()
974 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
985 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_create_3d() local
987 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
989 vbuf->objs = objs; in virtio_gpu_cmd_resource_create_3d()
1005 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1019 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_to_host_3d() local
1027 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1030 vbuf->objs = objs; in virtio_gpu_cmd_transfer_to_host_3d()
1039 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1051 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_from_host_3d() local
1053 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1056 vbuf->objs = objs; in virtio_gpu_cmd_transfer_from_host_3d()
1065 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1075 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_submit() local
1077 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1080 vbuf->data_buf = data; in virtio_gpu_cmd_submit()
1081 vbuf->data_size = data_size; in virtio_gpu_cmd_submit()
1082 vbuf->objs = objs; in virtio_gpu_cmd_submit()
1088 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1103 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cursor_ping() local
1107 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1109 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1113 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_resource_uuid_cb() argument
1116 gem_to_virtio_gpu_obj(vbuf->objs->objs[0]); in virtio_gpu_cmd_resource_uuid_cb()
1118 (struct virtio_gpu_resp_resource_uuid *)vbuf->resp_buf; in virtio_gpu_cmd_resource_uuid_cb()
1142 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_assign_uuid() local
1155 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1162 vbuf->objs = objs; in virtio_gpu_cmd_resource_assign_uuid()
1163 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()