19d9e1521SGerd Hoffmann /* 29d9e1521SGerd Hoffmann * Virtio GPU Device 39d9e1521SGerd Hoffmann * 49d9e1521SGerd Hoffmann * Copyright Red Hat, Inc. 2013-2014 59d9e1521SGerd Hoffmann * 69d9e1521SGerd Hoffmann * Authors: 79d9e1521SGerd Hoffmann * Dave Airlie <airlied@redhat.com> 89d9e1521SGerd Hoffmann * Gerd Hoffmann <kraxel@redhat.com> 99d9e1521SGerd Hoffmann * 109d9e1521SGerd Hoffmann * This work is licensed under the terms of the GNU GPL, version 2 or later. 119d9e1521SGerd Hoffmann * See the COPYING file in the top-level directory. 129d9e1521SGerd Hoffmann */ 139d9e1521SGerd Hoffmann 149b8bfe21SPeter Maydell #include "qemu/osdep.h" 159d9e1521SGerd Hoffmann #include "qemu-common.h" 169d9e1521SGerd Hoffmann #include "qemu/iov.h" 179d9e1521SGerd Hoffmann #include "trace.h" 189d9e1521SGerd Hoffmann #include "hw/virtio/virtio.h" 199d9e1521SGerd Hoffmann #include "hw/virtio/virtio-gpu.h" 20d0f0c865SMarc-André Lureau #include "qapi/error.h" 219d9e1521SGerd Hoffmann 229d9e1521SGerd Hoffmann #ifdef CONFIG_VIRGL 239d9e1521SGerd Hoffmann 24a9c94277SMarkus Armbruster #include <virglrenderer.h> 259d9e1521SGerd Hoffmann 269d9e1521SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs; 279d9e1521SGerd Hoffmann 289d9e1521SGerd Hoffmann static void virgl_cmd_create_resource_2d(VirtIOGPU *g, 299d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 309d9e1521SGerd Hoffmann { 319d9e1521SGerd Hoffmann struct virtio_gpu_resource_create_2d c2d; 329d9e1521SGerd Hoffmann struct virgl_renderer_resource_create_args args; 339d9e1521SGerd Hoffmann 349d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(c2d); 359d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_create_2d(c2d.resource_id, c2d.format, 369d9e1521SGerd Hoffmann c2d.width, c2d.height); 379d9e1521SGerd Hoffmann 389d9e1521SGerd Hoffmann args.handle = c2d.resource_id; 399d9e1521SGerd Hoffmann args.target = 2; 409d9e1521SGerd Hoffmann args.format = c2d.format; 419d9e1521SGerd Hoffmann args.bind = (1 << 1); 429d9e1521SGerd Hoffmann args.width = c2d.width; 439d9e1521SGerd Hoffmann args.height = c2d.height; 449d9e1521SGerd Hoffmann args.depth = 1; 459d9e1521SGerd Hoffmann args.array_size = 1; 469d9e1521SGerd Hoffmann args.last_level = 0; 479d9e1521SGerd Hoffmann args.nr_samples = 0; 489d9e1521SGerd Hoffmann args.flags = VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP; 499d9e1521SGerd Hoffmann virgl_renderer_resource_create(&args, NULL, 0); 509d9e1521SGerd Hoffmann } 519d9e1521SGerd Hoffmann 529d9e1521SGerd Hoffmann static void virgl_cmd_create_resource_3d(VirtIOGPU *g, 539d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 549d9e1521SGerd Hoffmann { 559d9e1521SGerd Hoffmann struct virtio_gpu_resource_create_3d c3d; 569d9e1521SGerd Hoffmann struct virgl_renderer_resource_create_args args; 579d9e1521SGerd Hoffmann 589d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(c3d); 599d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_create_3d(c3d.resource_id, c3d.format, 609d9e1521SGerd Hoffmann c3d.width, c3d.height, c3d.depth); 619d9e1521SGerd Hoffmann 629d9e1521SGerd Hoffmann args.handle = c3d.resource_id; 639d9e1521SGerd Hoffmann args.target = c3d.target; 649d9e1521SGerd Hoffmann args.format = c3d.format; 659d9e1521SGerd Hoffmann args.bind = c3d.bind; 669d9e1521SGerd Hoffmann args.width = c3d.width; 679d9e1521SGerd Hoffmann args.height = c3d.height; 689d9e1521SGerd Hoffmann args.depth = c3d.depth; 699d9e1521SGerd Hoffmann args.array_size = c3d.array_size; 709d9e1521SGerd Hoffmann args.last_level = c3d.last_level; 719d9e1521SGerd Hoffmann args.nr_samples = c3d.nr_samples; 729d9e1521SGerd Hoffmann args.flags = c3d.flags; 739d9e1521SGerd Hoffmann virgl_renderer_resource_create(&args, NULL, 0); 749d9e1521SGerd Hoffmann } 759d9e1521SGerd Hoffmann 769d9e1521SGerd Hoffmann static void virgl_cmd_resource_unref(VirtIOGPU *g, 779d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 789d9e1521SGerd Hoffmann { 799d9e1521SGerd Hoffmann struct virtio_gpu_resource_unref unref; 809d9e1521SGerd Hoffmann 819d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(unref); 829d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_unref(unref.resource_id); 839d9e1521SGerd Hoffmann 849d9e1521SGerd Hoffmann virgl_renderer_resource_unref(unref.resource_id); 859d9e1521SGerd Hoffmann } 869d9e1521SGerd Hoffmann 879d9e1521SGerd Hoffmann static void virgl_cmd_context_create(VirtIOGPU *g, 889d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 899d9e1521SGerd Hoffmann { 909d9e1521SGerd Hoffmann struct virtio_gpu_ctx_create cc; 919d9e1521SGerd Hoffmann 929d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cc); 939d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_create(cc.hdr.ctx_id, 949d9e1521SGerd Hoffmann cc.debug_name); 959d9e1521SGerd Hoffmann 969d9e1521SGerd Hoffmann virgl_renderer_context_create(cc.hdr.ctx_id, cc.nlen, 979d9e1521SGerd Hoffmann cc.debug_name); 989d9e1521SGerd Hoffmann } 999d9e1521SGerd Hoffmann 1009d9e1521SGerd Hoffmann static void virgl_cmd_context_destroy(VirtIOGPU *g, 1019d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1029d9e1521SGerd Hoffmann { 1039d9e1521SGerd Hoffmann struct virtio_gpu_ctx_destroy cd; 1049d9e1521SGerd Hoffmann 1059d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cd); 1069d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_destroy(cd.hdr.ctx_id); 1079d9e1521SGerd Hoffmann 1089d9e1521SGerd Hoffmann virgl_renderer_context_destroy(cd.hdr.ctx_id); 1099d9e1521SGerd Hoffmann } 1109d9e1521SGerd Hoffmann 1119d9e1521SGerd Hoffmann static void virtio_gpu_rect_update(VirtIOGPU *g, int idx, int x, int y, 1129d9e1521SGerd Hoffmann int width, int height) 1139d9e1521SGerd Hoffmann { 1149d9e1521SGerd Hoffmann if (!g->scanout[idx].con) { 1159d9e1521SGerd Hoffmann return; 1169d9e1521SGerd Hoffmann } 1179d9e1521SGerd Hoffmann 1189d9e1521SGerd Hoffmann dpy_gl_update(g->scanout[idx].con, x, y, width, height); 1199d9e1521SGerd Hoffmann } 1209d9e1521SGerd Hoffmann 1219d9e1521SGerd Hoffmann static void virgl_cmd_resource_flush(VirtIOGPU *g, 1229d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1239d9e1521SGerd Hoffmann { 1249d9e1521SGerd Hoffmann struct virtio_gpu_resource_flush rf; 1259d9e1521SGerd Hoffmann int i; 1269d9e1521SGerd Hoffmann 1279d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(rf); 1289d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_flush(rf.resource_id, 1299d9e1521SGerd Hoffmann rf.r.width, rf.r.height, rf.r.x, rf.r.y); 1309d9e1521SGerd Hoffmann 1312fe76055SMarc-André Lureau for (i = 0; i < g->conf.max_outputs; i++) { 1329d9e1521SGerd Hoffmann if (g->scanout[i].resource_id != rf.resource_id) { 1339d9e1521SGerd Hoffmann continue; 1349d9e1521SGerd Hoffmann } 1359d9e1521SGerd Hoffmann virtio_gpu_rect_update(g, i, rf.r.x, rf.r.y, rf.r.width, rf.r.height); 1369d9e1521SGerd Hoffmann } 1379d9e1521SGerd Hoffmann } 1389d9e1521SGerd Hoffmann 1399d9e1521SGerd Hoffmann static void virgl_cmd_set_scanout(VirtIOGPU *g, 1409d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1419d9e1521SGerd Hoffmann { 1429d9e1521SGerd Hoffmann struct virtio_gpu_set_scanout ss; 1439d9e1521SGerd Hoffmann struct virgl_renderer_resource_info info; 1449d9e1521SGerd Hoffmann int ret; 1459d9e1521SGerd Hoffmann 1469d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(ss); 1479d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_set_scanout(ss.scanout_id, ss.resource_id, 1489d9e1521SGerd Hoffmann ss.r.width, ss.r.height, ss.r.x, ss.r.y); 1499d9e1521SGerd Hoffmann 1502fe76055SMarc-André Lureau if (ss.scanout_id >= g->conf.max_outputs) { 1519d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, "%s: illegal scanout id specified %d", 1529d9e1521SGerd Hoffmann __func__, ss.scanout_id); 1539d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID; 1549d9e1521SGerd Hoffmann return; 1559d9e1521SGerd Hoffmann } 1569d9e1521SGerd Hoffmann g->enable = 1; 1579d9e1521SGerd Hoffmann 1589d9e1521SGerd Hoffmann memset(&info, 0, sizeof(info)); 1599d9e1521SGerd Hoffmann 1609d9e1521SGerd Hoffmann if (ss.resource_id && ss.r.width && ss.r.height) { 1619d9e1521SGerd Hoffmann ret = virgl_renderer_resource_get_info(ss.resource_id, &info); 1629d9e1521SGerd Hoffmann if (ret == -1) { 1639d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, 1649d9e1521SGerd Hoffmann "%s: illegal resource specified %d\n", 1659d9e1521SGerd Hoffmann __func__, ss.resource_id); 1669d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID; 1679d9e1521SGerd Hoffmann return; 1689d9e1521SGerd Hoffmann } 1699d9e1521SGerd Hoffmann qemu_console_resize(g->scanout[ss.scanout_id].con, 1709d9e1521SGerd Hoffmann ss.r.width, ss.r.height); 1719d9e1521SGerd Hoffmann virgl_renderer_force_ctx_0(); 1729d9e1521SGerd Hoffmann dpy_gl_scanout(g->scanout[ss.scanout_id].con, info.tex_id, 1739d9e1521SGerd Hoffmann info.flags & 1 /* FIXME: Y_0_TOP */, 1749d8256ebSMarc-André Lureau info.width, info.height, 1759d9e1521SGerd Hoffmann ss.r.x, ss.r.y, ss.r.width, ss.r.height); 1769d9e1521SGerd Hoffmann } else { 1779d9e1521SGerd Hoffmann if (ss.scanout_id != 0) { 1789d9e1521SGerd Hoffmann dpy_gfx_replace_surface(g->scanout[ss.scanout_id].con, NULL); 1799d9e1521SGerd Hoffmann } 1809d9e1521SGerd Hoffmann dpy_gl_scanout(g->scanout[ss.scanout_id].con, 0, false, 1819d8256ebSMarc-André Lureau 0, 0, 0, 0, 0, 0); 1829d9e1521SGerd Hoffmann } 1839d9e1521SGerd Hoffmann g->scanout[ss.scanout_id].resource_id = ss.resource_id; 1849d9e1521SGerd Hoffmann } 1859d9e1521SGerd Hoffmann 1869d9e1521SGerd Hoffmann static void virgl_cmd_submit_3d(VirtIOGPU *g, 1879d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1889d9e1521SGerd Hoffmann { 1899d9e1521SGerd Hoffmann struct virtio_gpu_cmd_submit cs; 1909d9e1521SGerd Hoffmann void *buf; 1919d9e1521SGerd Hoffmann size_t s; 1929d9e1521SGerd Hoffmann 1939d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cs); 1949d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_submit(cs.hdr.ctx_id, cs.size); 1959d9e1521SGerd Hoffmann 1969d9e1521SGerd Hoffmann buf = g_malloc(cs.size); 1979d9e1521SGerd Hoffmann s = iov_to_buf(cmd->elem.out_sg, cmd->elem.out_num, 1989d9e1521SGerd Hoffmann sizeof(cs), buf, cs.size); 1999d9e1521SGerd Hoffmann if (s != cs.size) { 2009d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, "%s: size mismatch (%zd/%d)", 2019d9e1521SGerd Hoffmann __func__, s, cs.size); 2029d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER; 2038d94c1caSGerd Hoffmann goto out; 2049d9e1521SGerd Hoffmann } 2059d9e1521SGerd Hoffmann 2069d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 2079d9e1521SGerd Hoffmann g->stats.req_3d++; 2089d9e1521SGerd Hoffmann g->stats.bytes_3d += cs.size; 2099d9e1521SGerd Hoffmann } 2109d9e1521SGerd Hoffmann 2119d9e1521SGerd Hoffmann virgl_renderer_submit_cmd(buf, cs.hdr.ctx_id, cs.size / 4); 2129d9e1521SGerd Hoffmann 2138d94c1caSGerd Hoffmann out: 2149d9e1521SGerd Hoffmann g_free(buf); 2159d9e1521SGerd Hoffmann } 2169d9e1521SGerd Hoffmann 2179d9e1521SGerd Hoffmann static void virgl_cmd_transfer_to_host_2d(VirtIOGPU *g, 2189d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2199d9e1521SGerd Hoffmann { 2209d9e1521SGerd Hoffmann struct virtio_gpu_transfer_to_host_2d t2d; 2219d9e1521SGerd Hoffmann struct virtio_gpu_box box; 2229d9e1521SGerd Hoffmann 2239d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(t2d); 2249d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_toh_2d(t2d.resource_id); 2259d9e1521SGerd Hoffmann 2269d9e1521SGerd Hoffmann box.x = t2d.r.x; 2279d9e1521SGerd Hoffmann box.y = t2d.r.y; 2289d9e1521SGerd Hoffmann box.z = 0; 2299d9e1521SGerd Hoffmann box.w = t2d.r.width; 2309d9e1521SGerd Hoffmann box.h = t2d.r.height; 2319d9e1521SGerd Hoffmann box.d = 1; 2329d9e1521SGerd Hoffmann 2339d9e1521SGerd Hoffmann virgl_renderer_transfer_write_iov(t2d.resource_id, 2349d9e1521SGerd Hoffmann 0, 2359d9e1521SGerd Hoffmann 0, 2369d9e1521SGerd Hoffmann 0, 2379d9e1521SGerd Hoffmann 0, 2389d9e1521SGerd Hoffmann (struct virgl_box *)&box, 2399d9e1521SGerd Hoffmann t2d.offset, NULL, 0); 2409d9e1521SGerd Hoffmann } 2419d9e1521SGerd Hoffmann 2429d9e1521SGerd Hoffmann static void virgl_cmd_transfer_to_host_3d(VirtIOGPU *g, 2439d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2449d9e1521SGerd Hoffmann { 2459d9e1521SGerd Hoffmann struct virtio_gpu_transfer_host_3d t3d; 2469d9e1521SGerd Hoffmann 2479d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(t3d); 2489d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_toh_3d(t3d.resource_id); 2499d9e1521SGerd Hoffmann 2509d9e1521SGerd Hoffmann virgl_renderer_transfer_write_iov(t3d.resource_id, 2519d9e1521SGerd Hoffmann t3d.hdr.ctx_id, 2529d9e1521SGerd Hoffmann t3d.level, 2539d9e1521SGerd Hoffmann t3d.stride, 2549d9e1521SGerd Hoffmann t3d.layer_stride, 2559d9e1521SGerd Hoffmann (struct virgl_box *)&t3d.box, 2569d9e1521SGerd Hoffmann t3d.offset, NULL, 0); 2579d9e1521SGerd Hoffmann } 2589d9e1521SGerd Hoffmann 2599d9e1521SGerd Hoffmann static void 2609d9e1521SGerd Hoffmann virgl_cmd_transfer_from_host_3d(VirtIOGPU *g, 2619d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2629d9e1521SGerd Hoffmann { 2639d9e1521SGerd Hoffmann struct virtio_gpu_transfer_host_3d tf3d; 2649d9e1521SGerd Hoffmann 2659d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(tf3d); 2669d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_fromh_3d(tf3d.resource_id); 2679d9e1521SGerd Hoffmann 2689d9e1521SGerd Hoffmann virgl_renderer_transfer_read_iov(tf3d.resource_id, 2699d9e1521SGerd Hoffmann tf3d.hdr.ctx_id, 2709d9e1521SGerd Hoffmann tf3d.level, 2719d9e1521SGerd Hoffmann tf3d.stride, 2729d9e1521SGerd Hoffmann tf3d.layer_stride, 2739d9e1521SGerd Hoffmann (struct virgl_box *)&tf3d.box, 2749d9e1521SGerd Hoffmann tf3d.offset, NULL, 0); 2759d9e1521SGerd Hoffmann } 2769d9e1521SGerd Hoffmann 2779d9e1521SGerd Hoffmann 2789d9e1521SGerd Hoffmann static void virgl_resource_attach_backing(VirtIOGPU *g, 2799d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2809d9e1521SGerd Hoffmann { 2819d9e1521SGerd Hoffmann struct virtio_gpu_resource_attach_backing att_rb; 2829d9e1521SGerd Hoffmann struct iovec *res_iovs; 2839d9e1521SGerd Hoffmann int ret; 2849d9e1521SGerd Hoffmann 2859d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(att_rb); 2869d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_back_attach(att_rb.resource_id); 2879d9e1521SGerd Hoffmann 2880c244e50SGerd Hoffmann ret = virtio_gpu_create_mapping_iov(&att_rb, cmd, NULL, &res_iovs); 2899d9e1521SGerd Hoffmann if (ret != 0) { 2909d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC; 2919d9e1521SGerd Hoffmann return; 2929d9e1521SGerd Hoffmann } 2939d9e1521SGerd Hoffmann 2949d9e1521SGerd Hoffmann virgl_renderer_resource_attach_iov(att_rb.resource_id, 2959d9e1521SGerd Hoffmann res_iovs, att_rb.nr_entries); 2969d9e1521SGerd Hoffmann } 2979d9e1521SGerd Hoffmann 2989d9e1521SGerd Hoffmann static void virgl_resource_detach_backing(VirtIOGPU *g, 2999d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3009d9e1521SGerd Hoffmann { 3019d9e1521SGerd Hoffmann struct virtio_gpu_resource_detach_backing detach_rb; 3029d9e1521SGerd Hoffmann struct iovec *res_iovs = NULL; 3039d9e1521SGerd Hoffmann int num_iovs = 0; 3049d9e1521SGerd Hoffmann 3059d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(detach_rb); 3069d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_back_detach(detach_rb.resource_id); 3079d9e1521SGerd Hoffmann 3089d9e1521SGerd Hoffmann virgl_renderer_resource_detach_iov(detach_rb.resource_id, 3099d9e1521SGerd Hoffmann &res_iovs, 3109d9e1521SGerd Hoffmann &num_iovs); 3119d9e1521SGerd Hoffmann if (res_iovs == NULL || num_iovs == 0) { 3129d9e1521SGerd Hoffmann return; 3139d9e1521SGerd Hoffmann } 3149d9e1521SGerd Hoffmann virtio_gpu_cleanup_mapping_iov(res_iovs, num_iovs); 3159d9e1521SGerd Hoffmann } 3169d9e1521SGerd Hoffmann 3179d9e1521SGerd Hoffmann 3189d9e1521SGerd Hoffmann static void virgl_cmd_ctx_attach_resource(VirtIOGPU *g, 3199d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3209d9e1521SGerd Hoffmann { 3219d9e1521SGerd Hoffmann struct virtio_gpu_ctx_resource att_res; 3229d9e1521SGerd Hoffmann 3239d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(att_res); 3249d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_res_attach(att_res.hdr.ctx_id, 3259d9e1521SGerd Hoffmann att_res.resource_id); 3269d9e1521SGerd Hoffmann 3279d9e1521SGerd Hoffmann virgl_renderer_ctx_attach_resource(att_res.hdr.ctx_id, att_res.resource_id); 3289d9e1521SGerd Hoffmann } 3299d9e1521SGerd Hoffmann 3309d9e1521SGerd Hoffmann static void virgl_cmd_ctx_detach_resource(VirtIOGPU *g, 3319d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3329d9e1521SGerd Hoffmann { 3339d9e1521SGerd Hoffmann struct virtio_gpu_ctx_resource det_res; 3349d9e1521SGerd Hoffmann 3359d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(det_res); 3369d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_res_detach(det_res.hdr.ctx_id, 3379d9e1521SGerd Hoffmann det_res.resource_id); 3389d9e1521SGerd Hoffmann 3399d9e1521SGerd Hoffmann virgl_renderer_ctx_detach_resource(det_res.hdr.ctx_id, det_res.resource_id); 3409d9e1521SGerd Hoffmann } 3419d9e1521SGerd Hoffmann 3429d9e1521SGerd Hoffmann static void virgl_cmd_get_capset_info(VirtIOGPU *g, 3439d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3449d9e1521SGerd Hoffmann { 3459d9e1521SGerd Hoffmann struct virtio_gpu_get_capset_info info; 3469d9e1521SGerd Hoffmann struct virtio_gpu_resp_capset_info resp; 3479d9e1521SGerd Hoffmann 3489d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(info); 3499d9e1521SGerd Hoffmann 350*42a8dadcSLi Qiang memset(&resp, 0, sizeof(resp)); 3519d9e1521SGerd Hoffmann if (info.capset_index == 0) { 3529d9e1521SGerd Hoffmann resp.capset_id = VIRTIO_GPU_CAPSET_VIRGL; 3539d9e1521SGerd Hoffmann virgl_renderer_get_cap_set(resp.capset_id, 3549d9e1521SGerd Hoffmann &resp.capset_max_version, 3559d9e1521SGerd Hoffmann &resp.capset_max_size); 3569d9e1521SGerd Hoffmann } else { 3579d9e1521SGerd Hoffmann resp.capset_max_version = 0; 3589d9e1521SGerd Hoffmann resp.capset_max_size = 0; 3599d9e1521SGerd Hoffmann } 3609d9e1521SGerd Hoffmann resp.hdr.type = VIRTIO_GPU_RESP_OK_CAPSET_INFO; 3619d9e1521SGerd Hoffmann virtio_gpu_ctrl_response(g, cmd, &resp.hdr, sizeof(resp)); 3629d9e1521SGerd Hoffmann } 3639d9e1521SGerd Hoffmann 3649d9e1521SGerd Hoffmann static void virgl_cmd_get_capset(VirtIOGPU *g, 3659d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3669d9e1521SGerd Hoffmann { 3679d9e1521SGerd Hoffmann struct virtio_gpu_get_capset gc; 3689d9e1521SGerd Hoffmann struct virtio_gpu_resp_capset *resp; 3699d9e1521SGerd Hoffmann uint32_t max_ver, max_size; 3709d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(gc); 3719d9e1521SGerd Hoffmann 3729d9e1521SGerd Hoffmann virgl_renderer_get_cap_set(gc.capset_id, &max_ver, 3739d9e1521SGerd Hoffmann &max_size); 3749d9e1521SGerd Hoffmann resp = g_malloc(sizeof(*resp) + max_size); 3759d9e1521SGerd Hoffmann 3769d9e1521SGerd Hoffmann resp->hdr.type = VIRTIO_GPU_RESP_OK_CAPSET; 3779d9e1521SGerd Hoffmann virgl_renderer_fill_caps(gc.capset_id, 3789d9e1521SGerd Hoffmann gc.capset_version, 3799d9e1521SGerd Hoffmann (void *)resp->capset_data); 3809d9e1521SGerd Hoffmann virtio_gpu_ctrl_response(g, cmd, &resp->hdr, sizeof(*resp) + max_size); 3819d9e1521SGerd Hoffmann g_free(resp); 3829d9e1521SGerd Hoffmann } 3839d9e1521SGerd Hoffmann 3849d9e1521SGerd Hoffmann void virtio_gpu_virgl_process_cmd(VirtIOGPU *g, 3859d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3869d9e1521SGerd Hoffmann { 3879d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cmd->cmd_hdr); 3889d9e1521SGerd Hoffmann 389321c9adbSGerd Hoffmann cmd->waiting = g->renderer_blocked; 390321c9adbSGerd Hoffmann if (cmd->waiting) { 391321c9adbSGerd Hoffmann return; 392321c9adbSGerd Hoffmann } 393321c9adbSGerd Hoffmann 3949d9e1521SGerd Hoffmann virgl_renderer_force_ctx_0(); 3959d9e1521SGerd Hoffmann switch (cmd->cmd_hdr.type) { 3969d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_CREATE: 3979d9e1521SGerd Hoffmann virgl_cmd_context_create(g, cmd); 3989d9e1521SGerd Hoffmann break; 3999d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_DESTROY: 4009d9e1521SGerd Hoffmann virgl_cmd_context_destroy(g, cmd); 4019d9e1521SGerd Hoffmann break; 4029d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: 4039d9e1521SGerd Hoffmann virgl_cmd_create_resource_2d(g, cmd); 4049d9e1521SGerd Hoffmann break; 4059d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: 4069d9e1521SGerd Hoffmann virgl_cmd_create_resource_3d(g, cmd); 4079d9e1521SGerd Hoffmann break; 4089d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_SUBMIT_3D: 4099d9e1521SGerd Hoffmann virgl_cmd_submit_3d(g, cmd); 4109d9e1521SGerd Hoffmann break; 4119d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: 4129d9e1521SGerd Hoffmann virgl_cmd_transfer_to_host_2d(g, cmd); 4139d9e1521SGerd Hoffmann break; 4149d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: 4159d9e1521SGerd Hoffmann virgl_cmd_transfer_to_host_3d(g, cmd); 4169d9e1521SGerd Hoffmann break; 4179d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: 4189d9e1521SGerd Hoffmann virgl_cmd_transfer_from_host_3d(g, cmd); 4199d9e1521SGerd Hoffmann break; 4209d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: 4219d9e1521SGerd Hoffmann virgl_resource_attach_backing(g, cmd); 4229d9e1521SGerd Hoffmann break; 4239d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: 4249d9e1521SGerd Hoffmann virgl_resource_detach_backing(g, cmd); 4259d9e1521SGerd Hoffmann break; 4269d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_SET_SCANOUT: 4279d9e1521SGerd Hoffmann virgl_cmd_set_scanout(g, cmd); 4289d9e1521SGerd Hoffmann break; 4299d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_FLUSH: 4309d9e1521SGerd Hoffmann virgl_cmd_resource_flush(g, cmd); 4319d9e1521SGerd Hoffmann break; 4329d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_UNREF: 4339d9e1521SGerd Hoffmann virgl_cmd_resource_unref(g, cmd); 4349d9e1521SGerd Hoffmann break; 4359d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: 4369d9e1521SGerd Hoffmann /* TODO add security */ 4379d9e1521SGerd Hoffmann virgl_cmd_ctx_attach_resource(g, cmd); 4389d9e1521SGerd Hoffmann break; 4399d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: 4409d9e1521SGerd Hoffmann /* TODO add security */ 4419d9e1521SGerd Hoffmann virgl_cmd_ctx_detach_resource(g, cmd); 4429d9e1521SGerd Hoffmann break; 4439d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_CAPSET_INFO: 4449d9e1521SGerd Hoffmann virgl_cmd_get_capset_info(g, cmd); 4459d9e1521SGerd Hoffmann break; 4469d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_CAPSET: 4479d9e1521SGerd Hoffmann virgl_cmd_get_capset(g, cmd); 4489d9e1521SGerd Hoffmann break; 4499d9e1521SGerd Hoffmann 4509d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_DISPLAY_INFO: 4519d9e1521SGerd Hoffmann virtio_gpu_get_display_info(g, cmd); 4529d9e1521SGerd Hoffmann break; 4539d9e1521SGerd Hoffmann default: 4549d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC; 4559d9e1521SGerd Hoffmann break; 4569d9e1521SGerd Hoffmann } 4579d9e1521SGerd Hoffmann 4589d9e1521SGerd Hoffmann if (cmd->finished) { 4599d9e1521SGerd Hoffmann return; 4609d9e1521SGerd Hoffmann } 4619d9e1521SGerd Hoffmann if (cmd->error) { 4629d9e1521SGerd Hoffmann fprintf(stderr, "%s: ctrl 0x%x, error 0x%x\n", __func__, 4639d9e1521SGerd Hoffmann cmd->cmd_hdr.type, cmd->error); 4649d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, cmd->error); 4659d9e1521SGerd Hoffmann return; 4669d9e1521SGerd Hoffmann } 4679d9e1521SGerd Hoffmann if (!(cmd->cmd_hdr.flags & VIRTIO_GPU_FLAG_FENCE)) { 4689d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA); 4699d9e1521SGerd Hoffmann return; 4709d9e1521SGerd Hoffmann } 4719d9e1521SGerd Hoffmann 4729d9e1521SGerd Hoffmann trace_virtio_gpu_fence_ctrl(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type); 4739d9e1521SGerd Hoffmann virgl_renderer_create_fence(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type); 4749d9e1521SGerd Hoffmann } 4759d9e1521SGerd Hoffmann 4769d9e1521SGerd Hoffmann static void virgl_write_fence(void *opaque, uint32_t fence) 4779d9e1521SGerd Hoffmann { 4789d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 4799d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd, *tmp; 4809d9e1521SGerd Hoffmann 4819d9e1521SGerd Hoffmann QTAILQ_FOREACH_SAFE(cmd, &g->fenceq, next, tmp) { 4829d9e1521SGerd Hoffmann /* 4839d9e1521SGerd Hoffmann * the guest can end up emitting fences out of order 4849d9e1521SGerd Hoffmann * so we should check all fenced cmds not just the first one. 4859d9e1521SGerd Hoffmann */ 4869d9e1521SGerd Hoffmann if (cmd->cmd_hdr.fence_id > fence) { 4879d9e1521SGerd Hoffmann continue; 4889d9e1521SGerd Hoffmann } 4899d9e1521SGerd Hoffmann trace_virtio_gpu_fence_resp(cmd->cmd_hdr.fence_id); 4909d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA); 4919d9e1521SGerd Hoffmann QTAILQ_REMOVE(&g->fenceq, cmd, next); 4929d9e1521SGerd Hoffmann g_free(cmd); 4939d9e1521SGerd Hoffmann g->inflight--; 4949d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 4959d9e1521SGerd Hoffmann fprintf(stderr, "inflight: %3d (-)\r", g->inflight); 4969d9e1521SGerd Hoffmann } 4979d9e1521SGerd Hoffmann } 4989d9e1521SGerd Hoffmann } 4999d9e1521SGerd Hoffmann 5009d9e1521SGerd Hoffmann static virgl_renderer_gl_context 5019d9e1521SGerd Hoffmann virgl_create_context(void *opaque, int scanout_idx, 5029d9e1521SGerd Hoffmann struct virgl_renderer_gl_ctx_param *params) 5039d9e1521SGerd Hoffmann { 5049d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5059d9e1521SGerd Hoffmann QEMUGLContext ctx; 5069d9e1521SGerd Hoffmann QEMUGLParams qparams; 5079d9e1521SGerd Hoffmann 5089d9e1521SGerd Hoffmann qparams.major_ver = params->major_ver; 5099d9e1521SGerd Hoffmann qparams.minor_ver = params->minor_ver; 5109d9e1521SGerd Hoffmann 5119d9e1521SGerd Hoffmann ctx = dpy_gl_ctx_create(g->scanout[scanout_idx].con, &qparams); 5129d9e1521SGerd Hoffmann return (virgl_renderer_gl_context)ctx; 5139d9e1521SGerd Hoffmann } 5149d9e1521SGerd Hoffmann 5159d9e1521SGerd Hoffmann static void virgl_destroy_context(void *opaque, virgl_renderer_gl_context ctx) 5169d9e1521SGerd Hoffmann { 5179d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5189d9e1521SGerd Hoffmann QEMUGLContext qctx = (QEMUGLContext)ctx; 5199d9e1521SGerd Hoffmann 5209d9e1521SGerd Hoffmann dpy_gl_ctx_destroy(g->scanout[0].con, qctx); 5219d9e1521SGerd Hoffmann } 5229d9e1521SGerd Hoffmann 5239d9e1521SGerd Hoffmann static int virgl_make_context_current(void *opaque, int scanout_idx, 5249d9e1521SGerd Hoffmann virgl_renderer_gl_context ctx) 5259d9e1521SGerd Hoffmann { 5269d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5279d9e1521SGerd Hoffmann QEMUGLContext qctx = (QEMUGLContext)ctx; 5289d9e1521SGerd Hoffmann 5299d9e1521SGerd Hoffmann return dpy_gl_ctx_make_current(g->scanout[scanout_idx].con, qctx); 5309d9e1521SGerd Hoffmann } 5319d9e1521SGerd Hoffmann 5329d9e1521SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs = { 5339d9e1521SGerd Hoffmann .version = 1, 5349d9e1521SGerd Hoffmann .write_fence = virgl_write_fence, 5359d9e1521SGerd Hoffmann .create_gl_context = virgl_create_context, 5369d9e1521SGerd Hoffmann .destroy_gl_context = virgl_destroy_context, 5379d9e1521SGerd Hoffmann .make_current = virgl_make_context_current, 5389d9e1521SGerd Hoffmann }; 5399d9e1521SGerd Hoffmann 5409d9e1521SGerd Hoffmann static void virtio_gpu_print_stats(void *opaque) 5419d9e1521SGerd Hoffmann { 5429d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5439d9e1521SGerd Hoffmann 5449d9e1521SGerd Hoffmann if (g->stats.requests) { 5459d9e1521SGerd Hoffmann fprintf(stderr, "stats: vq req %4d, %3d -- 3D %4d (%5d)\n", 5469d9e1521SGerd Hoffmann g->stats.requests, 5479d9e1521SGerd Hoffmann g->stats.max_inflight, 5489d9e1521SGerd Hoffmann g->stats.req_3d, 5499d9e1521SGerd Hoffmann g->stats.bytes_3d); 5509d9e1521SGerd Hoffmann g->stats.requests = 0; 5519d9e1521SGerd Hoffmann g->stats.max_inflight = 0; 5529d9e1521SGerd Hoffmann g->stats.req_3d = 0; 5539d9e1521SGerd Hoffmann g->stats.bytes_3d = 0; 5549d9e1521SGerd Hoffmann } else { 5559d9e1521SGerd Hoffmann fprintf(stderr, "stats: idle\r"); 5569d9e1521SGerd Hoffmann } 5579d9e1521SGerd Hoffmann timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000); 5589d9e1521SGerd Hoffmann } 5599d9e1521SGerd Hoffmann 5609d9e1521SGerd Hoffmann static void virtio_gpu_fence_poll(void *opaque) 5619d9e1521SGerd Hoffmann { 5629d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5639d9e1521SGerd Hoffmann 5649d9e1521SGerd Hoffmann virgl_renderer_poll(); 5650c55a1cfSGerd Hoffmann virtio_gpu_process_cmdq(g); 5660c55a1cfSGerd Hoffmann if (!QTAILQ_EMPTY(&g->cmdq) || !QTAILQ_EMPTY(&g->fenceq)) { 5679d9e1521SGerd Hoffmann timer_mod(g->fence_poll, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 10); 5689d9e1521SGerd Hoffmann } 5699d9e1521SGerd Hoffmann } 5709d9e1521SGerd Hoffmann 5719d9e1521SGerd Hoffmann void virtio_gpu_virgl_fence_poll(VirtIOGPU *g) 5729d9e1521SGerd Hoffmann { 5739d9e1521SGerd Hoffmann virtio_gpu_fence_poll(g); 5749d9e1521SGerd Hoffmann } 5759d9e1521SGerd Hoffmann 5769d9e1521SGerd Hoffmann void virtio_gpu_virgl_reset(VirtIOGPU *g) 5779d9e1521SGerd Hoffmann { 5789d9e1521SGerd Hoffmann int i; 5799d9e1521SGerd Hoffmann 5809d9e1521SGerd Hoffmann /* virgl_renderer_reset() ??? */ 5819d9e1521SGerd Hoffmann for (i = 0; i < g->conf.max_outputs; i++) { 5829d9e1521SGerd Hoffmann if (i != 0) { 5839d9e1521SGerd Hoffmann dpy_gfx_replace_surface(g->scanout[i].con, NULL); 5849d9e1521SGerd Hoffmann } 5859d8256ebSMarc-André Lureau dpy_gl_scanout(g->scanout[i].con, 0, false, 0, 0, 0, 0, 0, 0); 5869d9e1521SGerd Hoffmann } 5879d9e1521SGerd Hoffmann } 5889d9e1521SGerd Hoffmann 5899d9e1521SGerd Hoffmann int virtio_gpu_virgl_init(VirtIOGPU *g) 5909d9e1521SGerd Hoffmann { 5919d9e1521SGerd Hoffmann int ret; 5929d9e1521SGerd Hoffmann 5939d9e1521SGerd Hoffmann ret = virgl_renderer_init(g, 0, &virtio_gpu_3d_cbs); 5949d9e1521SGerd Hoffmann if (ret != 0) { 5959d9e1521SGerd Hoffmann return ret; 5969d9e1521SGerd Hoffmann } 5979d9e1521SGerd Hoffmann 5989d9e1521SGerd Hoffmann g->fence_poll = timer_new_ms(QEMU_CLOCK_VIRTUAL, 5999d9e1521SGerd Hoffmann virtio_gpu_fence_poll, g); 6009d9e1521SGerd Hoffmann 6019d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 6029d9e1521SGerd Hoffmann g->print_stats = timer_new_ms(QEMU_CLOCK_VIRTUAL, 6039d9e1521SGerd Hoffmann virtio_gpu_print_stats, g); 6049d9e1521SGerd Hoffmann timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000); 6059d9e1521SGerd Hoffmann } 6069d9e1521SGerd Hoffmann return 0; 6079d9e1521SGerd Hoffmann } 6089d9e1521SGerd Hoffmann 6099d9e1521SGerd Hoffmann #endif /* CONFIG_VIRGL */ 610