19d9e1521SGerd Hoffmann /* 29d9e1521SGerd Hoffmann * Virtio GPU Device 39d9e1521SGerd Hoffmann * 49d9e1521SGerd Hoffmann * Copyright Red Hat, Inc. 2013-2014 59d9e1521SGerd Hoffmann * 69d9e1521SGerd Hoffmann * Authors: 79d9e1521SGerd Hoffmann * Dave Airlie <airlied@redhat.com> 89d9e1521SGerd Hoffmann * Gerd Hoffmann <kraxel@redhat.com> 99d9e1521SGerd Hoffmann * 109d9e1521SGerd Hoffmann * This work is licensed under the terms of the GNU GPL, version 2 or later. 119d9e1521SGerd Hoffmann * See the COPYING file in the top-level directory. 129d9e1521SGerd Hoffmann */ 139d9e1521SGerd Hoffmann 149b8bfe21SPeter Maydell #include "qemu/osdep.h" 159d9e1521SGerd Hoffmann #include "qemu-common.h" 169d9e1521SGerd Hoffmann #include "qemu/iov.h" 179d9e1521SGerd Hoffmann #include "trace.h" 189d9e1521SGerd Hoffmann #include "hw/virtio/virtio.h" 199d9e1521SGerd Hoffmann #include "hw/virtio/virtio-gpu.h" 20d0f0c865SMarc-André Lureau #include "qapi/error.h" 219d9e1521SGerd Hoffmann 229d9e1521SGerd Hoffmann #ifdef CONFIG_VIRGL 239d9e1521SGerd Hoffmann 24a9c94277SMarkus Armbruster #include <virglrenderer.h> 259d9e1521SGerd Hoffmann 269d9e1521SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs; 279d9e1521SGerd Hoffmann 289d9e1521SGerd Hoffmann static void virgl_cmd_create_resource_2d(VirtIOGPU *g, 299d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 309d9e1521SGerd Hoffmann { 319d9e1521SGerd Hoffmann struct virtio_gpu_resource_create_2d c2d; 329d9e1521SGerd Hoffmann struct virgl_renderer_resource_create_args args; 339d9e1521SGerd Hoffmann 349d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(c2d); 359d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_create_2d(c2d.resource_id, c2d.format, 369d9e1521SGerd Hoffmann c2d.width, c2d.height); 379d9e1521SGerd Hoffmann 389d9e1521SGerd Hoffmann args.handle = c2d.resource_id; 399d9e1521SGerd Hoffmann args.target = 2; 409d9e1521SGerd Hoffmann args.format = c2d.format; 419d9e1521SGerd Hoffmann args.bind = (1 << 1); 429d9e1521SGerd Hoffmann args.width = c2d.width; 439d9e1521SGerd Hoffmann args.height = c2d.height; 449d9e1521SGerd Hoffmann args.depth = 1; 459d9e1521SGerd Hoffmann args.array_size = 1; 469d9e1521SGerd Hoffmann args.last_level = 0; 479d9e1521SGerd Hoffmann args.nr_samples = 0; 489d9e1521SGerd Hoffmann args.flags = VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP; 499d9e1521SGerd Hoffmann virgl_renderer_resource_create(&args, NULL, 0); 509d9e1521SGerd Hoffmann } 519d9e1521SGerd Hoffmann 529d9e1521SGerd Hoffmann static void virgl_cmd_create_resource_3d(VirtIOGPU *g, 539d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 549d9e1521SGerd Hoffmann { 559d9e1521SGerd Hoffmann struct virtio_gpu_resource_create_3d c3d; 569d9e1521SGerd Hoffmann struct virgl_renderer_resource_create_args args; 579d9e1521SGerd Hoffmann 589d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(c3d); 599d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_create_3d(c3d.resource_id, c3d.format, 609d9e1521SGerd Hoffmann c3d.width, c3d.height, c3d.depth); 619d9e1521SGerd Hoffmann 629d9e1521SGerd Hoffmann args.handle = c3d.resource_id; 639d9e1521SGerd Hoffmann args.target = c3d.target; 649d9e1521SGerd Hoffmann args.format = c3d.format; 659d9e1521SGerd Hoffmann args.bind = c3d.bind; 669d9e1521SGerd Hoffmann args.width = c3d.width; 679d9e1521SGerd Hoffmann args.height = c3d.height; 689d9e1521SGerd Hoffmann args.depth = c3d.depth; 699d9e1521SGerd Hoffmann args.array_size = c3d.array_size; 709d9e1521SGerd Hoffmann args.last_level = c3d.last_level; 719d9e1521SGerd Hoffmann args.nr_samples = c3d.nr_samples; 729d9e1521SGerd Hoffmann args.flags = c3d.flags; 739d9e1521SGerd Hoffmann virgl_renderer_resource_create(&args, NULL, 0); 749d9e1521SGerd Hoffmann } 759d9e1521SGerd Hoffmann 769d9e1521SGerd Hoffmann static void virgl_cmd_resource_unref(VirtIOGPU *g, 779d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 789d9e1521SGerd Hoffmann { 799d9e1521SGerd Hoffmann struct virtio_gpu_resource_unref unref; 805e8e3c4cSGerd Hoffmann struct iovec *res_iovs = NULL; 815e8e3c4cSGerd Hoffmann int num_iovs = 0; 829d9e1521SGerd Hoffmann 839d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(unref); 849d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_unref(unref.resource_id); 859d9e1521SGerd Hoffmann 865e8e3c4cSGerd Hoffmann virgl_renderer_resource_detach_iov(unref.resource_id, 875e8e3c4cSGerd Hoffmann &res_iovs, 885e8e3c4cSGerd Hoffmann &num_iovs); 895e8e3c4cSGerd Hoffmann if (res_iovs != NULL && num_iovs != 0) { 905e8e3c4cSGerd Hoffmann virtio_gpu_cleanup_mapping_iov(res_iovs, num_iovs); 915e8e3c4cSGerd Hoffmann } 929d9e1521SGerd Hoffmann virgl_renderer_resource_unref(unref.resource_id); 939d9e1521SGerd Hoffmann } 949d9e1521SGerd Hoffmann 959d9e1521SGerd Hoffmann static void virgl_cmd_context_create(VirtIOGPU *g, 969d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 979d9e1521SGerd Hoffmann { 989d9e1521SGerd Hoffmann struct virtio_gpu_ctx_create cc; 999d9e1521SGerd Hoffmann 1009d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cc); 1019d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_create(cc.hdr.ctx_id, 1029d9e1521SGerd Hoffmann cc.debug_name); 1039d9e1521SGerd Hoffmann 1049d9e1521SGerd Hoffmann virgl_renderer_context_create(cc.hdr.ctx_id, cc.nlen, 1059d9e1521SGerd Hoffmann cc.debug_name); 1069d9e1521SGerd Hoffmann } 1079d9e1521SGerd Hoffmann 1089d9e1521SGerd Hoffmann static void virgl_cmd_context_destroy(VirtIOGPU *g, 1099d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1109d9e1521SGerd Hoffmann { 1119d9e1521SGerd Hoffmann struct virtio_gpu_ctx_destroy cd; 1129d9e1521SGerd Hoffmann 1139d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cd); 1149d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_destroy(cd.hdr.ctx_id); 1159d9e1521SGerd Hoffmann 1169d9e1521SGerd Hoffmann virgl_renderer_context_destroy(cd.hdr.ctx_id); 1179d9e1521SGerd Hoffmann } 1189d9e1521SGerd Hoffmann 1199d9e1521SGerd Hoffmann static void virtio_gpu_rect_update(VirtIOGPU *g, int idx, int x, int y, 1209d9e1521SGerd Hoffmann int width, int height) 1219d9e1521SGerd Hoffmann { 1229d9e1521SGerd Hoffmann if (!g->scanout[idx].con) { 1239d9e1521SGerd Hoffmann return; 1249d9e1521SGerd Hoffmann } 1259d9e1521SGerd Hoffmann 1269d9e1521SGerd Hoffmann dpy_gl_update(g->scanout[idx].con, x, y, width, height); 1279d9e1521SGerd Hoffmann } 1289d9e1521SGerd Hoffmann 1299d9e1521SGerd Hoffmann static void virgl_cmd_resource_flush(VirtIOGPU *g, 1309d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1319d9e1521SGerd Hoffmann { 1329d9e1521SGerd Hoffmann struct virtio_gpu_resource_flush rf; 1339d9e1521SGerd Hoffmann int i; 1349d9e1521SGerd Hoffmann 1359d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(rf); 1369d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_flush(rf.resource_id, 1379d9e1521SGerd Hoffmann rf.r.width, rf.r.height, rf.r.x, rf.r.y); 1389d9e1521SGerd Hoffmann 1392fe76055SMarc-André Lureau for (i = 0; i < g->conf.max_outputs; i++) { 1409d9e1521SGerd Hoffmann if (g->scanout[i].resource_id != rf.resource_id) { 1419d9e1521SGerd Hoffmann continue; 1429d9e1521SGerd Hoffmann } 1439d9e1521SGerd Hoffmann virtio_gpu_rect_update(g, i, rf.r.x, rf.r.y, rf.r.width, rf.r.height); 1449d9e1521SGerd Hoffmann } 1459d9e1521SGerd Hoffmann } 1469d9e1521SGerd Hoffmann 1479d9e1521SGerd Hoffmann static void virgl_cmd_set_scanout(VirtIOGPU *g, 1489d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1499d9e1521SGerd Hoffmann { 1509d9e1521SGerd Hoffmann struct virtio_gpu_set_scanout ss; 1519d9e1521SGerd Hoffmann struct virgl_renderer_resource_info info; 1529d9e1521SGerd Hoffmann int ret; 1539d9e1521SGerd Hoffmann 1549d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(ss); 1559d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_set_scanout(ss.scanout_id, ss.resource_id, 1569d9e1521SGerd Hoffmann ss.r.width, ss.r.height, ss.r.x, ss.r.y); 1579d9e1521SGerd Hoffmann 1582fe76055SMarc-André Lureau if (ss.scanout_id >= g->conf.max_outputs) { 1599d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, "%s: illegal scanout id specified %d", 1609d9e1521SGerd Hoffmann __func__, ss.scanout_id); 1619d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID; 1629d9e1521SGerd Hoffmann return; 1639d9e1521SGerd Hoffmann } 1649d9e1521SGerd Hoffmann g->enable = 1; 1659d9e1521SGerd Hoffmann 1669d9e1521SGerd Hoffmann memset(&info, 0, sizeof(info)); 1679d9e1521SGerd Hoffmann 1689d9e1521SGerd Hoffmann if (ss.resource_id && ss.r.width && ss.r.height) { 1699d9e1521SGerd Hoffmann ret = virgl_renderer_resource_get_info(ss.resource_id, &info); 1709d9e1521SGerd Hoffmann if (ret == -1) { 1719d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, 1729d9e1521SGerd Hoffmann "%s: illegal resource specified %d\n", 1739d9e1521SGerd Hoffmann __func__, ss.resource_id); 1749d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID; 1759d9e1521SGerd Hoffmann return; 1769d9e1521SGerd Hoffmann } 1779d9e1521SGerd Hoffmann qemu_console_resize(g->scanout[ss.scanout_id].con, 1789d9e1521SGerd Hoffmann ss.r.width, ss.r.height); 1799d9e1521SGerd Hoffmann virgl_renderer_force_ctx_0(); 180f4c36bdaSGerd Hoffmann dpy_gl_scanout_texture(g->scanout[ss.scanout_id].con, info.tex_id, 1819d9e1521SGerd Hoffmann info.flags & 1 /* FIXME: Y_0_TOP */, 1829d8256ebSMarc-André Lureau info.width, info.height, 1839d9e1521SGerd Hoffmann ss.r.x, ss.r.y, ss.r.width, ss.r.height); 1849d9e1521SGerd Hoffmann } else { 1859d9e1521SGerd Hoffmann if (ss.scanout_id != 0) { 1869d9e1521SGerd Hoffmann dpy_gfx_replace_surface(g->scanout[ss.scanout_id].con, NULL); 1879d9e1521SGerd Hoffmann } 188975896fcSGerd Hoffmann dpy_gl_scanout_disable(g->scanout[ss.scanout_id].con); 1899d9e1521SGerd Hoffmann } 1909d9e1521SGerd Hoffmann g->scanout[ss.scanout_id].resource_id = ss.resource_id; 1919d9e1521SGerd Hoffmann } 1929d9e1521SGerd Hoffmann 1939d9e1521SGerd Hoffmann static void virgl_cmd_submit_3d(VirtIOGPU *g, 1949d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1959d9e1521SGerd Hoffmann { 1969d9e1521SGerd Hoffmann struct virtio_gpu_cmd_submit cs; 1979d9e1521SGerd Hoffmann void *buf; 1989d9e1521SGerd Hoffmann size_t s; 1999d9e1521SGerd Hoffmann 2009d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cs); 2019d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_submit(cs.hdr.ctx_id, cs.size); 2029d9e1521SGerd Hoffmann 2039d9e1521SGerd Hoffmann buf = g_malloc(cs.size); 2049d9e1521SGerd Hoffmann s = iov_to_buf(cmd->elem.out_sg, cmd->elem.out_num, 2059d9e1521SGerd Hoffmann sizeof(cs), buf, cs.size); 2069d9e1521SGerd Hoffmann if (s != cs.size) { 2079d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, "%s: size mismatch (%zd/%d)", 2089d9e1521SGerd Hoffmann __func__, s, cs.size); 2099d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER; 2108d94c1caSGerd Hoffmann goto out; 2119d9e1521SGerd Hoffmann } 2129d9e1521SGerd Hoffmann 2139d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 2149d9e1521SGerd Hoffmann g->stats.req_3d++; 2159d9e1521SGerd Hoffmann g->stats.bytes_3d += cs.size; 2169d9e1521SGerd Hoffmann } 2179d9e1521SGerd Hoffmann 2189d9e1521SGerd Hoffmann virgl_renderer_submit_cmd(buf, cs.hdr.ctx_id, cs.size / 4); 2199d9e1521SGerd Hoffmann 2208d94c1caSGerd Hoffmann out: 2219d9e1521SGerd Hoffmann g_free(buf); 2229d9e1521SGerd Hoffmann } 2239d9e1521SGerd Hoffmann 2249d9e1521SGerd Hoffmann static void virgl_cmd_transfer_to_host_2d(VirtIOGPU *g, 2259d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2269d9e1521SGerd Hoffmann { 2279d9e1521SGerd Hoffmann struct virtio_gpu_transfer_to_host_2d t2d; 2289d9e1521SGerd Hoffmann struct virtio_gpu_box box; 2299d9e1521SGerd Hoffmann 2309d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(t2d); 2319d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_toh_2d(t2d.resource_id); 2329d9e1521SGerd Hoffmann 2339d9e1521SGerd Hoffmann box.x = t2d.r.x; 2349d9e1521SGerd Hoffmann box.y = t2d.r.y; 2359d9e1521SGerd Hoffmann box.z = 0; 2369d9e1521SGerd Hoffmann box.w = t2d.r.width; 2379d9e1521SGerd Hoffmann box.h = t2d.r.height; 2389d9e1521SGerd Hoffmann box.d = 1; 2399d9e1521SGerd Hoffmann 2409d9e1521SGerd Hoffmann virgl_renderer_transfer_write_iov(t2d.resource_id, 2419d9e1521SGerd Hoffmann 0, 2429d9e1521SGerd Hoffmann 0, 2439d9e1521SGerd Hoffmann 0, 2449d9e1521SGerd Hoffmann 0, 2459d9e1521SGerd Hoffmann (struct virgl_box *)&box, 2469d9e1521SGerd Hoffmann t2d.offset, NULL, 0); 2479d9e1521SGerd Hoffmann } 2489d9e1521SGerd Hoffmann 2499d9e1521SGerd Hoffmann static void virgl_cmd_transfer_to_host_3d(VirtIOGPU *g, 2509d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2519d9e1521SGerd Hoffmann { 2529d9e1521SGerd Hoffmann struct virtio_gpu_transfer_host_3d t3d; 2539d9e1521SGerd Hoffmann 2549d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(t3d); 2559d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_toh_3d(t3d.resource_id); 2569d9e1521SGerd Hoffmann 2579d9e1521SGerd Hoffmann virgl_renderer_transfer_write_iov(t3d.resource_id, 2589d9e1521SGerd Hoffmann t3d.hdr.ctx_id, 2599d9e1521SGerd Hoffmann t3d.level, 2609d9e1521SGerd Hoffmann t3d.stride, 2619d9e1521SGerd Hoffmann t3d.layer_stride, 2629d9e1521SGerd Hoffmann (struct virgl_box *)&t3d.box, 2639d9e1521SGerd Hoffmann t3d.offset, NULL, 0); 2649d9e1521SGerd Hoffmann } 2659d9e1521SGerd Hoffmann 2669d9e1521SGerd Hoffmann static void 2679d9e1521SGerd Hoffmann virgl_cmd_transfer_from_host_3d(VirtIOGPU *g, 2689d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2699d9e1521SGerd Hoffmann { 2709d9e1521SGerd Hoffmann struct virtio_gpu_transfer_host_3d tf3d; 2719d9e1521SGerd Hoffmann 2729d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(tf3d); 2739d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_fromh_3d(tf3d.resource_id); 2749d9e1521SGerd Hoffmann 2759d9e1521SGerd Hoffmann virgl_renderer_transfer_read_iov(tf3d.resource_id, 2769d9e1521SGerd Hoffmann tf3d.hdr.ctx_id, 2779d9e1521SGerd Hoffmann tf3d.level, 2789d9e1521SGerd Hoffmann tf3d.stride, 2799d9e1521SGerd Hoffmann tf3d.layer_stride, 2809d9e1521SGerd Hoffmann (struct virgl_box *)&tf3d.box, 2819d9e1521SGerd Hoffmann tf3d.offset, NULL, 0); 2829d9e1521SGerd Hoffmann } 2839d9e1521SGerd Hoffmann 2849d9e1521SGerd Hoffmann 2859d9e1521SGerd Hoffmann static void virgl_resource_attach_backing(VirtIOGPU *g, 2869d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2879d9e1521SGerd Hoffmann { 2889d9e1521SGerd Hoffmann struct virtio_gpu_resource_attach_backing att_rb; 2899d9e1521SGerd Hoffmann struct iovec *res_iovs; 2909d9e1521SGerd Hoffmann int ret; 2919d9e1521SGerd Hoffmann 2929d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(att_rb); 2939d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_back_attach(att_rb.resource_id); 2949d9e1521SGerd Hoffmann 2950c244e50SGerd Hoffmann ret = virtio_gpu_create_mapping_iov(&att_rb, cmd, NULL, &res_iovs); 2969d9e1521SGerd Hoffmann if (ret != 0) { 2979d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC; 2989d9e1521SGerd Hoffmann return; 2999d9e1521SGerd Hoffmann } 3009d9e1521SGerd Hoffmann 30133243031SLi Qiang ret = virgl_renderer_resource_attach_iov(att_rb.resource_id, 3029d9e1521SGerd Hoffmann res_iovs, att_rb.nr_entries); 30333243031SLi Qiang 30433243031SLi Qiang if (ret != 0) 30533243031SLi Qiang virtio_gpu_cleanup_mapping_iov(res_iovs, att_rb.nr_entries); 3069d9e1521SGerd Hoffmann } 3079d9e1521SGerd Hoffmann 3089d9e1521SGerd Hoffmann static void virgl_resource_detach_backing(VirtIOGPU *g, 3099d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3109d9e1521SGerd Hoffmann { 3119d9e1521SGerd Hoffmann struct virtio_gpu_resource_detach_backing detach_rb; 3129d9e1521SGerd Hoffmann struct iovec *res_iovs = NULL; 3139d9e1521SGerd Hoffmann int num_iovs = 0; 3149d9e1521SGerd Hoffmann 3159d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(detach_rb); 3169d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_back_detach(detach_rb.resource_id); 3179d9e1521SGerd Hoffmann 3189d9e1521SGerd Hoffmann virgl_renderer_resource_detach_iov(detach_rb.resource_id, 3199d9e1521SGerd Hoffmann &res_iovs, 3209d9e1521SGerd Hoffmann &num_iovs); 3219d9e1521SGerd Hoffmann if (res_iovs == NULL || num_iovs == 0) { 3229d9e1521SGerd Hoffmann return; 3239d9e1521SGerd Hoffmann } 3249d9e1521SGerd Hoffmann virtio_gpu_cleanup_mapping_iov(res_iovs, num_iovs); 3259d9e1521SGerd Hoffmann } 3269d9e1521SGerd Hoffmann 3279d9e1521SGerd Hoffmann 3289d9e1521SGerd Hoffmann static void virgl_cmd_ctx_attach_resource(VirtIOGPU *g, 3299d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3309d9e1521SGerd Hoffmann { 3319d9e1521SGerd Hoffmann struct virtio_gpu_ctx_resource att_res; 3329d9e1521SGerd Hoffmann 3339d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(att_res); 3349d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_res_attach(att_res.hdr.ctx_id, 3359d9e1521SGerd Hoffmann att_res.resource_id); 3369d9e1521SGerd Hoffmann 3379d9e1521SGerd Hoffmann virgl_renderer_ctx_attach_resource(att_res.hdr.ctx_id, att_res.resource_id); 3389d9e1521SGerd Hoffmann } 3399d9e1521SGerd Hoffmann 3409d9e1521SGerd Hoffmann static void virgl_cmd_ctx_detach_resource(VirtIOGPU *g, 3419d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3429d9e1521SGerd Hoffmann { 3439d9e1521SGerd Hoffmann struct virtio_gpu_ctx_resource det_res; 3449d9e1521SGerd Hoffmann 3459d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(det_res); 3469d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_res_detach(det_res.hdr.ctx_id, 3479d9e1521SGerd Hoffmann det_res.resource_id); 3489d9e1521SGerd Hoffmann 3499d9e1521SGerd Hoffmann virgl_renderer_ctx_detach_resource(det_res.hdr.ctx_id, det_res.resource_id); 3509d9e1521SGerd Hoffmann } 3519d9e1521SGerd Hoffmann 3529d9e1521SGerd Hoffmann static void virgl_cmd_get_capset_info(VirtIOGPU *g, 3539d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3549d9e1521SGerd Hoffmann { 3559d9e1521SGerd Hoffmann struct virtio_gpu_get_capset_info info; 3569d9e1521SGerd Hoffmann struct virtio_gpu_resp_capset_info resp; 3579d9e1521SGerd Hoffmann 3589d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(info); 3599d9e1521SGerd Hoffmann 36042a8dadcSLi Qiang memset(&resp, 0, sizeof(resp)); 3619d9e1521SGerd Hoffmann if (info.capset_index == 0) { 3629d9e1521SGerd Hoffmann resp.capset_id = VIRTIO_GPU_CAPSET_VIRGL; 3639d9e1521SGerd Hoffmann virgl_renderer_get_cap_set(resp.capset_id, 3649d9e1521SGerd Hoffmann &resp.capset_max_version, 3659d9e1521SGerd Hoffmann &resp.capset_max_size); 3669d9e1521SGerd Hoffmann } else { 3679d9e1521SGerd Hoffmann resp.capset_max_version = 0; 3689d9e1521SGerd Hoffmann resp.capset_max_size = 0; 3699d9e1521SGerd Hoffmann } 3709d9e1521SGerd Hoffmann resp.hdr.type = VIRTIO_GPU_RESP_OK_CAPSET_INFO; 3719d9e1521SGerd Hoffmann virtio_gpu_ctrl_response(g, cmd, &resp.hdr, sizeof(resp)); 3729d9e1521SGerd Hoffmann } 3739d9e1521SGerd Hoffmann 3749d9e1521SGerd Hoffmann static void virgl_cmd_get_capset(VirtIOGPU *g, 3759d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3769d9e1521SGerd Hoffmann { 3779d9e1521SGerd Hoffmann struct virtio_gpu_get_capset gc; 3789d9e1521SGerd Hoffmann struct virtio_gpu_resp_capset *resp; 3799d9e1521SGerd Hoffmann uint32_t max_ver, max_size; 3809d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(gc); 3819d9e1521SGerd Hoffmann 3829d9e1521SGerd Hoffmann virgl_renderer_get_cap_set(gc.capset_id, &max_ver, 3839d9e1521SGerd Hoffmann &max_size); 384abd7f08bSPrasad J Pandit if (!max_size) { 385abd7f08bSPrasad J Pandit cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER; 386abd7f08bSPrasad J Pandit return; 387abd7f08bSPrasad J Pandit } 3889d9e1521SGerd Hoffmann 38985d9d044SLi Qiang resp = g_malloc0(sizeof(*resp) + max_size); 3909d9e1521SGerd Hoffmann resp->hdr.type = VIRTIO_GPU_RESP_OK_CAPSET; 3919d9e1521SGerd Hoffmann virgl_renderer_fill_caps(gc.capset_id, 3929d9e1521SGerd Hoffmann gc.capset_version, 3939d9e1521SGerd Hoffmann (void *)resp->capset_data); 3949d9e1521SGerd Hoffmann virtio_gpu_ctrl_response(g, cmd, &resp->hdr, sizeof(*resp) + max_size); 3959d9e1521SGerd Hoffmann g_free(resp); 3969d9e1521SGerd Hoffmann } 3979d9e1521SGerd Hoffmann 3989d9e1521SGerd Hoffmann void virtio_gpu_virgl_process_cmd(VirtIOGPU *g, 3999d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 4009d9e1521SGerd Hoffmann { 4019d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cmd->cmd_hdr); 4029d9e1521SGerd Hoffmann 403321c9adbSGerd Hoffmann cmd->waiting = g->renderer_blocked; 404321c9adbSGerd Hoffmann if (cmd->waiting) { 405321c9adbSGerd Hoffmann return; 406321c9adbSGerd Hoffmann } 407321c9adbSGerd Hoffmann 4089d9e1521SGerd Hoffmann virgl_renderer_force_ctx_0(); 4099d9e1521SGerd Hoffmann switch (cmd->cmd_hdr.type) { 4109d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_CREATE: 4119d9e1521SGerd Hoffmann virgl_cmd_context_create(g, cmd); 4129d9e1521SGerd Hoffmann break; 4139d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_DESTROY: 4149d9e1521SGerd Hoffmann virgl_cmd_context_destroy(g, cmd); 4159d9e1521SGerd Hoffmann break; 4169d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: 4179d9e1521SGerd Hoffmann virgl_cmd_create_resource_2d(g, cmd); 4189d9e1521SGerd Hoffmann break; 4199d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: 4209d9e1521SGerd Hoffmann virgl_cmd_create_resource_3d(g, cmd); 4219d9e1521SGerd Hoffmann break; 4229d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_SUBMIT_3D: 4239d9e1521SGerd Hoffmann virgl_cmd_submit_3d(g, cmd); 4249d9e1521SGerd Hoffmann break; 4259d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: 4269d9e1521SGerd Hoffmann virgl_cmd_transfer_to_host_2d(g, cmd); 4279d9e1521SGerd Hoffmann break; 4289d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: 4299d9e1521SGerd Hoffmann virgl_cmd_transfer_to_host_3d(g, cmd); 4309d9e1521SGerd Hoffmann break; 4319d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: 4329d9e1521SGerd Hoffmann virgl_cmd_transfer_from_host_3d(g, cmd); 4339d9e1521SGerd Hoffmann break; 4349d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: 4359d9e1521SGerd Hoffmann virgl_resource_attach_backing(g, cmd); 4369d9e1521SGerd Hoffmann break; 4379d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: 4389d9e1521SGerd Hoffmann virgl_resource_detach_backing(g, cmd); 4399d9e1521SGerd Hoffmann break; 4409d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_SET_SCANOUT: 4419d9e1521SGerd Hoffmann virgl_cmd_set_scanout(g, cmd); 4429d9e1521SGerd Hoffmann break; 4439d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_FLUSH: 4449d9e1521SGerd Hoffmann virgl_cmd_resource_flush(g, cmd); 4459d9e1521SGerd Hoffmann break; 4469d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_UNREF: 4479d9e1521SGerd Hoffmann virgl_cmd_resource_unref(g, cmd); 4489d9e1521SGerd Hoffmann break; 4499d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: 4509d9e1521SGerd Hoffmann /* TODO add security */ 4519d9e1521SGerd Hoffmann virgl_cmd_ctx_attach_resource(g, cmd); 4529d9e1521SGerd Hoffmann break; 4539d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: 4549d9e1521SGerd Hoffmann /* TODO add security */ 4559d9e1521SGerd Hoffmann virgl_cmd_ctx_detach_resource(g, cmd); 4569d9e1521SGerd Hoffmann break; 4579d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_CAPSET_INFO: 4589d9e1521SGerd Hoffmann virgl_cmd_get_capset_info(g, cmd); 4599d9e1521SGerd Hoffmann break; 4609d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_CAPSET: 4619d9e1521SGerd Hoffmann virgl_cmd_get_capset(g, cmd); 4629d9e1521SGerd Hoffmann break; 4639d9e1521SGerd Hoffmann 4649d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_DISPLAY_INFO: 4659d9e1521SGerd Hoffmann virtio_gpu_get_display_info(g, cmd); 4669d9e1521SGerd Hoffmann break; 4679d9e1521SGerd Hoffmann default: 4689d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC; 4699d9e1521SGerd Hoffmann break; 4709d9e1521SGerd Hoffmann } 4719d9e1521SGerd Hoffmann 4729d9e1521SGerd Hoffmann if (cmd->finished) { 4739d9e1521SGerd Hoffmann return; 4749d9e1521SGerd Hoffmann } 4759d9e1521SGerd Hoffmann if (cmd->error) { 4769d9e1521SGerd Hoffmann fprintf(stderr, "%s: ctrl 0x%x, error 0x%x\n", __func__, 4779d9e1521SGerd Hoffmann cmd->cmd_hdr.type, cmd->error); 4789d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, cmd->error); 4799d9e1521SGerd Hoffmann return; 4809d9e1521SGerd Hoffmann } 4819d9e1521SGerd Hoffmann if (!(cmd->cmd_hdr.flags & VIRTIO_GPU_FLAG_FENCE)) { 4829d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA); 4839d9e1521SGerd Hoffmann return; 4849d9e1521SGerd Hoffmann } 4859d9e1521SGerd Hoffmann 4869d9e1521SGerd Hoffmann trace_virtio_gpu_fence_ctrl(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type); 4879d9e1521SGerd Hoffmann virgl_renderer_create_fence(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type); 4889d9e1521SGerd Hoffmann } 4899d9e1521SGerd Hoffmann 4909d9e1521SGerd Hoffmann static void virgl_write_fence(void *opaque, uint32_t fence) 4919d9e1521SGerd Hoffmann { 4929d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 4939d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd, *tmp; 4949d9e1521SGerd Hoffmann 4959d9e1521SGerd Hoffmann QTAILQ_FOREACH_SAFE(cmd, &g->fenceq, next, tmp) { 4969d9e1521SGerd Hoffmann /* 4979d9e1521SGerd Hoffmann * the guest can end up emitting fences out of order 4989d9e1521SGerd Hoffmann * so we should check all fenced cmds not just the first one. 4999d9e1521SGerd Hoffmann */ 5009d9e1521SGerd Hoffmann if (cmd->cmd_hdr.fence_id > fence) { 5019d9e1521SGerd Hoffmann continue; 5029d9e1521SGerd Hoffmann } 5039d9e1521SGerd Hoffmann trace_virtio_gpu_fence_resp(cmd->cmd_hdr.fence_id); 5049d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA); 5059d9e1521SGerd Hoffmann QTAILQ_REMOVE(&g->fenceq, cmd, next); 5069d9e1521SGerd Hoffmann g_free(cmd); 5079d9e1521SGerd Hoffmann g->inflight--; 5089d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 5099d9e1521SGerd Hoffmann fprintf(stderr, "inflight: %3d (-)\r", g->inflight); 5109d9e1521SGerd Hoffmann } 5119d9e1521SGerd Hoffmann } 5129d9e1521SGerd Hoffmann } 5139d9e1521SGerd Hoffmann 5149d9e1521SGerd Hoffmann static virgl_renderer_gl_context 5159d9e1521SGerd Hoffmann virgl_create_context(void *opaque, int scanout_idx, 5169d9e1521SGerd Hoffmann struct virgl_renderer_gl_ctx_param *params) 5179d9e1521SGerd Hoffmann { 5189d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5199d9e1521SGerd Hoffmann QEMUGLContext ctx; 5209d9e1521SGerd Hoffmann QEMUGLParams qparams; 5219d9e1521SGerd Hoffmann 5229d9e1521SGerd Hoffmann qparams.major_ver = params->major_ver; 5239d9e1521SGerd Hoffmann qparams.minor_ver = params->minor_ver; 5249d9e1521SGerd Hoffmann 5259d9e1521SGerd Hoffmann ctx = dpy_gl_ctx_create(g->scanout[scanout_idx].con, &qparams); 5269d9e1521SGerd Hoffmann return (virgl_renderer_gl_context)ctx; 5279d9e1521SGerd Hoffmann } 5289d9e1521SGerd Hoffmann 5299d9e1521SGerd Hoffmann static void virgl_destroy_context(void *opaque, virgl_renderer_gl_context ctx) 5309d9e1521SGerd Hoffmann { 5319d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5329d9e1521SGerd Hoffmann QEMUGLContext qctx = (QEMUGLContext)ctx; 5339d9e1521SGerd Hoffmann 5349d9e1521SGerd Hoffmann dpy_gl_ctx_destroy(g->scanout[0].con, qctx); 5359d9e1521SGerd Hoffmann } 5369d9e1521SGerd Hoffmann 5379d9e1521SGerd Hoffmann static int virgl_make_context_current(void *opaque, int scanout_idx, 5389d9e1521SGerd Hoffmann virgl_renderer_gl_context ctx) 5399d9e1521SGerd Hoffmann { 5409d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5419d9e1521SGerd Hoffmann QEMUGLContext qctx = (QEMUGLContext)ctx; 5429d9e1521SGerd Hoffmann 5439d9e1521SGerd Hoffmann return dpy_gl_ctx_make_current(g->scanout[scanout_idx].con, qctx); 5449d9e1521SGerd Hoffmann } 5459d9e1521SGerd Hoffmann 5469d9e1521SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs = { 5479d9e1521SGerd Hoffmann .version = 1, 5489d9e1521SGerd Hoffmann .write_fence = virgl_write_fence, 5499d9e1521SGerd Hoffmann .create_gl_context = virgl_create_context, 5509d9e1521SGerd Hoffmann .destroy_gl_context = virgl_destroy_context, 5519d9e1521SGerd Hoffmann .make_current = virgl_make_context_current, 5529d9e1521SGerd Hoffmann }; 5539d9e1521SGerd Hoffmann 5549d9e1521SGerd Hoffmann static void virtio_gpu_print_stats(void *opaque) 5559d9e1521SGerd Hoffmann { 5569d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5579d9e1521SGerd Hoffmann 5589d9e1521SGerd Hoffmann if (g->stats.requests) { 5599d9e1521SGerd Hoffmann fprintf(stderr, "stats: vq req %4d, %3d -- 3D %4d (%5d)\n", 5609d9e1521SGerd Hoffmann g->stats.requests, 5619d9e1521SGerd Hoffmann g->stats.max_inflight, 5629d9e1521SGerd Hoffmann g->stats.req_3d, 5639d9e1521SGerd Hoffmann g->stats.bytes_3d); 5649d9e1521SGerd Hoffmann g->stats.requests = 0; 5659d9e1521SGerd Hoffmann g->stats.max_inflight = 0; 5669d9e1521SGerd Hoffmann g->stats.req_3d = 0; 5679d9e1521SGerd Hoffmann g->stats.bytes_3d = 0; 5689d9e1521SGerd Hoffmann } else { 5699d9e1521SGerd Hoffmann fprintf(stderr, "stats: idle\r"); 5709d9e1521SGerd Hoffmann } 5719d9e1521SGerd Hoffmann timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000); 5729d9e1521SGerd Hoffmann } 5739d9e1521SGerd Hoffmann 5749d9e1521SGerd Hoffmann static void virtio_gpu_fence_poll(void *opaque) 5759d9e1521SGerd Hoffmann { 5769d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5779d9e1521SGerd Hoffmann 5789d9e1521SGerd Hoffmann virgl_renderer_poll(); 5790c55a1cfSGerd Hoffmann virtio_gpu_process_cmdq(g); 5800c55a1cfSGerd Hoffmann if (!QTAILQ_EMPTY(&g->cmdq) || !QTAILQ_EMPTY(&g->fenceq)) { 5819d9e1521SGerd Hoffmann timer_mod(g->fence_poll, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 10); 5829d9e1521SGerd Hoffmann } 5839d9e1521SGerd Hoffmann } 5849d9e1521SGerd Hoffmann 5859d9e1521SGerd Hoffmann void virtio_gpu_virgl_fence_poll(VirtIOGPU *g) 5869d9e1521SGerd Hoffmann { 5879d9e1521SGerd Hoffmann virtio_gpu_fence_poll(g); 5889d9e1521SGerd Hoffmann } 5899d9e1521SGerd Hoffmann 5909d9e1521SGerd Hoffmann void virtio_gpu_virgl_reset(VirtIOGPU *g) 5919d9e1521SGerd Hoffmann { 5929d9e1521SGerd Hoffmann int i; 5939d9e1521SGerd Hoffmann 5949d9e1521SGerd Hoffmann /* virgl_renderer_reset() ??? */ 5959d9e1521SGerd Hoffmann for (i = 0; i < g->conf.max_outputs; i++) { 5969d9e1521SGerd Hoffmann if (i != 0) { 5979d9e1521SGerd Hoffmann dpy_gfx_replace_surface(g->scanout[i].con, NULL); 5989d9e1521SGerd Hoffmann } 599975896fcSGerd Hoffmann dpy_gl_scanout_disable(g->scanout[i].con); 6009d9e1521SGerd Hoffmann } 6019d9e1521SGerd Hoffmann } 6029d9e1521SGerd Hoffmann 603*c19f4fbcSGerd Hoffmann void virtio_gpu_gl_block(void *opaque, bool block) 604*c19f4fbcSGerd Hoffmann { 605*c19f4fbcSGerd Hoffmann VirtIOGPU *g = opaque; 606*c19f4fbcSGerd Hoffmann 607*c19f4fbcSGerd Hoffmann if (block) { 608*c19f4fbcSGerd Hoffmann g->renderer_blocked++; 609*c19f4fbcSGerd Hoffmann } else { 610*c19f4fbcSGerd Hoffmann g->renderer_blocked--; 611*c19f4fbcSGerd Hoffmann } 612*c19f4fbcSGerd Hoffmann assert(g->renderer_blocked >= 0); 613*c19f4fbcSGerd Hoffmann 614*c19f4fbcSGerd Hoffmann if (g->renderer_blocked == 0) { 615*c19f4fbcSGerd Hoffmann virtio_gpu_process_cmdq(g); 616*c19f4fbcSGerd Hoffmann } 617*c19f4fbcSGerd Hoffmann } 618*c19f4fbcSGerd Hoffmann 6199d9e1521SGerd Hoffmann int virtio_gpu_virgl_init(VirtIOGPU *g) 6209d9e1521SGerd Hoffmann { 6219d9e1521SGerd Hoffmann int ret; 6229d9e1521SGerd Hoffmann 6239d9e1521SGerd Hoffmann ret = virgl_renderer_init(g, 0, &virtio_gpu_3d_cbs); 6249d9e1521SGerd Hoffmann if (ret != 0) { 6259d9e1521SGerd Hoffmann return ret; 6269d9e1521SGerd Hoffmann } 6279d9e1521SGerd Hoffmann 6289d9e1521SGerd Hoffmann g->fence_poll = timer_new_ms(QEMU_CLOCK_VIRTUAL, 6299d9e1521SGerd Hoffmann virtio_gpu_fence_poll, g); 6309d9e1521SGerd Hoffmann 6319d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 6329d9e1521SGerd Hoffmann g->print_stats = timer_new_ms(QEMU_CLOCK_VIRTUAL, 6339d9e1521SGerd Hoffmann virtio_gpu_print_stats, g); 6349d9e1521SGerd Hoffmann timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000); 6359d9e1521SGerd Hoffmann } 6369d9e1521SGerd Hoffmann return 0; 6379d9e1521SGerd Hoffmann } 6389d9e1521SGerd Hoffmann 6399d9e1521SGerd Hoffmann #endif /* CONFIG_VIRGL */ 640