19d9e1521SGerd Hoffmann /* 29d9e1521SGerd Hoffmann * Virtio GPU Device 39d9e1521SGerd Hoffmann * 49d9e1521SGerd Hoffmann * Copyright Red Hat, Inc. 2013-2014 59d9e1521SGerd Hoffmann * 69d9e1521SGerd Hoffmann * Authors: 79d9e1521SGerd Hoffmann * Dave Airlie <airlied@redhat.com> 89d9e1521SGerd Hoffmann * Gerd Hoffmann <kraxel@redhat.com> 99d9e1521SGerd Hoffmann * 109d9e1521SGerd Hoffmann * This work is licensed under the terms of the GNU GPL, version 2 or later. 119d9e1521SGerd Hoffmann * See the COPYING file in the top-level directory. 129d9e1521SGerd Hoffmann */ 139d9e1521SGerd Hoffmann 149b8bfe21SPeter Maydell #include "qemu/osdep.h" 159d9e1521SGerd Hoffmann #include "qemu-common.h" 169d9e1521SGerd Hoffmann #include "qemu/iov.h" 179d9e1521SGerd Hoffmann #include "trace.h" 189d9e1521SGerd Hoffmann #include "hw/virtio/virtio.h" 199d9e1521SGerd Hoffmann #include "hw/virtio/virtio-gpu.h" 209d9e1521SGerd Hoffmann 219d9e1521SGerd Hoffmann #ifdef CONFIG_VIRGL 229d9e1521SGerd Hoffmann 23a9c94277SMarkus Armbruster #include <virglrenderer.h> 249d9e1521SGerd Hoffmann 259d9e1521SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs; 269d9e1521SGerd Hoffmann 279d9e1521SGerd Hoffmann static void virgl_cmd_create_resource_2d(VirtIOGPU *g, 289d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 299d9e1521SGerd Hoffmann { 309d9e1521SGerd Hoffmann struct virtio_gpu_resource_create_2d c2d; 319d9e1521SGerd Hoffmann struct virgl_renderer_resource_create_args args; 329d9e1521SGerd Hoffmann 339d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(c2d); 349d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_create_2d(c2d.resource_id, c2d.format, 359d9e1521SGerd Hoffmann c2d.width, c2d.height); 369d9e1521SGerd Hoffmann 379d9e1521SGerd Hoffmann args.handle = c2d.resource_id; 389d9e1521SGerd Hoffmann args.target = 2; 399d9e1521SGerd Hoffmann args.format = c2d.format; 409d9e1521SGerd Hoffmann args.bind = (1 << 1); 419d9e1521SGerd Hoffmann args.width = c2d.width; 429d9e1521SGerd Hoffmann args.height = c2d.height; 439d9e1521SGerd Hoffmann args.depth = 1; 449d9e1521SGerd Hoffmann args.array_size = 1; 459d9e1521SGerd Hoffmann args.last_level = 0; 469d9e1521SGerd Hoffmann args.nr_samples = 0; 479d9e1521SGerd Hoffmann args.flags = VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP; 489d9e1521SGerd Hoffmann virgl_renderer_resource_create(&args, NULL, 0); 499d9e1521SGerd Hoffmann } 509d9e1521SGerd Hoffmann 519d9e1521SGerd Hoffmann static void virgl_cmd_create_resource_3d(VirtIOGPU *g, 529d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 539d9e1521SGerd Hoffmann { 549d9e1521SGerd Hoffmann struct virtio_gpu_resource_create_3d c3d; 559d9e1521SGerd Hoffmann struct virgl_renderer_resource_create_args args; 569d9e1521SGerd Hoffmann 579d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(c3d); 589d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_create_3d(c3d.resource_id, c3d.format, 599d9e1521SGerd Hoffmann c3d.width, c3d.height, c3d.depth); 609d9e1521SGerd Hoffmann 619d9e1521SGerd Hoffmann args.handle = c3d.resource_id; 629d9e1521SGerd Hoffmann args.target = c3d.target; 639d9e1521SGerd Hoffmann args.format = c3d.format; 649d9e1521SGerd Hoffmann args.bind = c3d.bind; 659d9e1521SGerd Hoffmann args.width = c3d.width; 669d9e1521SGerd Hoffmann args.height = c3d.height; 679d9e1521SGerd Hoffmann args.depth = c3d.depth; 689d9e1521SGerd Hoffmann args.array_size = c3d.array_size; 699d9e1521SGerd Hoffmann args.last_level = c3d.last_level; 709d9e1521SGerd Hoffmann args.nr_samples = c3d.nr_samples; 719d9e1521SGerd Hoffmann args.flags = c3d.flags; 729d9e1521SGerd Hoffmann virgl_renderer_resource_create(&args, NULL, 0); 739d9e1521SGerd Hoffmann } 749d9e1521SGerd Hoffmann 759d9e1521SGerd Hoffmann static void virgl_cmd_resource_unref(VirtIOGPU *g, 769d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 779d9e1521SGerd Hoffmann { 789d9e1521SGerd Hoffmann struct virtio_gpu_resource_unref unref; 795e8e3c4cSGerd Hoffmann struct iovec *res_iovs = NULL; 805e8e3c4cSGerd Hoffmann int num_iovs = 0; 819d9e1521SGerd Hoffmann 829d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(unref); 839d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_unref(unref.resource_id); 849d9e1521SGerd Hoffmann 855e8e3c4cSGerd Hoffmann virgl_renderer_resource_detach_iov(unref.resource_id, 865e8e3c4cSGerd Hoffmann &res_iovs, 875e8e3c4cSGerd Hoffmann &num_iovs); 885e8e3c4cSGerd Hoffmann if (res_iovs != NULL && num_iovs != 0) { 895e8e3c4cSGerd Hoffmann virtio_gpu_cleanup_mapping_iov(res_iovs, num_iovs); 905e8e3c4cSGerd Hoffmann } 919d9e1521SGerd Hoffmann virgl_renderer_resource_unref(unref.resource_id); 929d9e1521SGerd Hoffmann } 939d9e1521SGerd Hoffmann 949d9e1521SGerd Hoffmann static void virgl_cmd_context_create(VirtIOGPU *g, 959d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 969d9e1521SGerd Hoffmann { 979d9e1521SGerd Hoffmann struct virtio_gpu_ctx_create cc; 989d9e1521SGerd Hoffmann 999d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cc); 1009d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_create(cc.hdr.ctx_id, 1019d9e1521SGerd Hoffmann cc.debug_name); 1029d9e1521SGerd Hoffmann 1039d9e1521SGerd Hoffmann virgl_renderer_context_create(cc.hdr.ctx_id, cc.nlen, 1049d9e1521SGerd Hoffmann cc.debug_name); 1059d9e1521SGerd Hoffmann } 1069d9e1521SGerd Hoffmann 1079d9e1521SGerd Hoffmann static void virgl_cmd_context_destroy(VirtIOGPU *g, 1089d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1099d9e1521SGerd Hoffmann { 1109d9e1521SGerd Hoffmann struct virtio_gpu_ctx_destroy cd; 1119d9e1521SGerd Hoffmann 1129d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cd); 1139d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_destroy(cd.hdr.ctx_id); 1149d9e1521SGerd Hoffmann 1159d9e1521SGerd Hoffmann virgl_renderer_context_destroy(cd.hdr.ctx_id); 1169d9e1521SGerd Hoffmann } 1179d9e1521SGerd Hoffmann 1189d9e1521SGerd Hoffmann static void virtio_gpu_rect_update(VirtIOGPU *g, int idx, int x, int y, 1199d9e1521SGerd Hoffmann int width, int height) 1209d9e1521SGerd Hoffmann { 1219d9e1521SGerd Hoffmann if (!g->scanout[idx].con) { 1229d9e1521SGerd Hoffmann return; 1239d9e1521SGerd Hoffmann } 1249d9e1521SGerd Hoffmann 1259d9e1521SGerd Hoffmann dpy_gl_update(g->scanout[idx].con, x, y, width, height); 1269d9e1521SGerd Hoffmann } 1279d9e1521SGerd Hoffmann 1289d9e1521SGerd Hoffmann static void virgl_cmd_resource_flush(VirtIOGPU *g, 1299d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1309d9e1521SGerd Hoffmann { 1319d9e1521SGerd Hoffmann struct virtio_gpu_resource_flush rf; 1329d9e1521SGerd Hoffmann int i; 1339d9e1521SGerd Hoffmann 1349d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(rf); 1359d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_flush(rf.resource_id, 1369d9e1521SGerd Hoffmann rf.r.width, rf.r.height, rf.r.x, rf.r.y); 1379d9e1521SGerd Hoffmann 1382fe76055SMarc-André Lureau for (i = 0; i < g->conf.max_outputs; i++) { 1399d9e1521SGerd Hoffmann if (g->scanout[i].resource_id != rf.resource_id) { 1409d9e1521SGerd Hoffmann continue; 1419d9e1521SGerd Hoffmann } 1429d9e1521SGerd Hoffmann virtio_gpu_rect_update(g, i, rf.r.x, rf.r.y, rf.r.width, rf.r.height); 1439d9e1521SGerd Hoffmann } 1449d9e1521SGerd Hoffmann } 1459d9e1521SGerd Hoffmann 1469d9e1521SGerd Hoffmann static void virgl_cmd_set_scanout(VirtIOGPU *g, 1479d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1489d9e1521SGerd Hoffmann { 1499d9e1521SGerd Hoffmann struct virtio_gpu_set_scanout ss; 1509d9e1521SGerd Hoffmann struct virgl_renderer_resource_info info; 1519d9e1521SGerd Hoffmann int ret; 1529d9e1521SGerd Hoffmann 1539d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(ss); 1549d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_set_scanout(ss.scanout_id, ss.resource_id, 1559d9e1521SGerd Hoffmann ss.r.width, ss.r.height, ss.r.x, ss.r.y); 1569d9e1521SGerd Hoffmann 1572fe76055SMarc-André Lureau if (ss.scanout_id >= g->conf.max_outputs) { 1589d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, "%s: illegal scanout id specified %d", 1599d9e1521SGerd Hoffmann __func__, ss.scanout_id); 1609d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID; 1619d9e1521SGerd Hoffmann return; 1629d9e1521SGerd Hoffmann } 1639d9e1521SGerd Hoffmann g->enable = 1; 1649d9e1521SGerd Hoffmann 1659d9e1521SGerd Hoffmann memset(&info, 0, sizeof(info)); 1669d9e1521SGerd Hoffmann 1679d9e1521SGerd Hoffmann if (ss.resource_id && ss.r.width && ss.r.height) { 1689d9e1521SGerd Hoffmann ret = virgl_renderer_resource_get_info(ss.resource_id, &info); 1699d9e1521SGerd Hoffmann if (ret == -1) { 1709d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, 1719d9e1521SGerd Hoffmann "%s: illegal resource specified %d\n", 1729d9e1521SGerd Hoffmann __func__, ss.resource_id); 1739d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID; 1749d9e1521SGerd Hoffmann return; 1759d9e1521SGerd Hoffmann } 1769d9e1521SGerd Hoffmann qemu_console_resize(g->scanout[ss.scanout_id].con, 1779d9e1521SGerd Hoffmann ss.r.width, ss.r.height); 1789d9e1521SGerd Hoffmann virgl_renderer_force_ctx_0(); 179f4c36bdaSGerd Hoffmann dpy_gl_scanout_texture(g->scanout[ss.scanout_id].con, info.tex_id, 1809d9e1521SGerd Hoffmann info.flags & 1 /* FIXME: Y_0_TOP */, 1819d8256ebSMarc-André Lureau info.width, info.height, 1829d9e1521SGerd Hoffmann ss.r.x, ss.r.y, ss.r.width, ss.r.height); 1839d9e1521SGerd Hoffmann } else { 1849d9e1521SGerd Hoffmann if (ss.scanout_id != 0) { 1859d9e1521SGerd Hoffmann dpy_gfx_replace_surface(g->scanout[ss.scanout_id].con, NULL); 1869d9e1521SGerd Hoffmann } 187975896fcSGerd Hoffmann dpy_gl_scanout_disable(g->scanout[ss.scanout_id].con); 1889d9e1521SGerd Hoffmann } 1899d9e1521SGerd Hoffmann g->scanout[ss.scanout_id].resource_id = ss.resource_id; 1909d9e1521SGerd Hoffmann } 1919d9e1521SGerd Hoffmann 1929d9e1521SGerd Hoffmann static void virgl_cmd_submit_3d(VirtIOGPU *g, 1939d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 1949d9e1521SGerd Hoffmann { 1959d9e1521SGerd Hoffmann struct virtio_gpu_cmd_submit cs; 1969d9e1521SGerd Hoffmann void *buf; 1979d9e1521SGerd Hoffmann size_t s; 1989d9e1521SGerd Hoffmann 1999d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cs); 2009d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_submit(cs.hdr.ctx_id, cs.size); 2019d9e1521SGerd Hoffmann 2029d9e1521SGerd Hoffmann buf = g_malloc(cs.size); 2039d9e1521SGerd Hoffmann s = iov_to_buf(cmd->elem.out_sg, cmd->elem.out_num, 2049d9e1521SGerd Hoffmann sizeof(cs), buf, cs.size); 2059d9e1521SGerd Hoffmann if (s != cs.size) { 2069d9e1521SGerd Hoffmann qemu_log_mask(LOG_GUEST_ERROR, "%s: size mismatch (%zd/%d)", 2079d9e1521SGerd Hoffmann __func__, s, cs.size); 2089d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER; 2098d94c1caSGerd Hoffmann goto out; 2109d9e1521SGerd Hoffmann } 2119d9e1521SGerd Hoffmann 2129d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 2139d9e1521SGerd Hoffmann g->stats.req_3d++; 2149d9e1521SGerd Hoffmann g->stats.bytes_3d += cs.size; 2159d9e1521SGerd Hoffmann } 2169d9e1521SGerd Hoffmann 2179d9e1521SGerd Hoffmann virgl_renderer_submit_cmd(buf, cs.hdr.ctx_id, cs.size / 4); 2189d9e1521SGerd Hoffmann 2198d94c1caSGerd Hoffmann out: 2209d9e1521SGerd Hoffmann g_free(buf); 2219d9e1521SGerd Hoffmann } 2229d9e1521SGerd Hoffmann 2239d9e1521SGerd Hoffmann static void virgl_cmd_transfer_to_host_2d(VirtIOGPU *g, 2249d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2259d9e1521SGerd Hoffmann { 2269d9e1521SGerd Hoffmann struct virtio_gpu_transfer_to_host_2d t2d; 2279d9e1521SGerd Hoffmann struct virtio_gpu_box box; 2289d9e1521SGerd Hoffmann 2299d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(t2d); 2309d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_toh_2d(t2d.resource_id); 2319d9e1521SGerd Hoffmann 2329d9e1521SGerd Hoffmann box.x = t2d.r.x; 2339d9e1521SGerd Hoffmann box.y = t2d.r.y; 2349d9e1521SGerd Hoffmann box.z = 0; 2359d9e1521SGerd Hoffmann box.w = t2d.r.width; 2369d9e1521SGerd Hoffmann box.h = t2d.r.height; 2379d9e1521SGerd Hoffmann box.d = 1; 2389d9e1521SGerd Hoffmann 2399d9e1521SGerd Hoffmann virgl_renderer_transfer_write_iov(t2d.resource_id, 2409d9e1521SGerd Hoffmann 0, 2419d9e1521SGerd Hoffmann 0, 2429d9e1521SGerd Hoffmann 0, 2439d9e1521SGerd Hoffmann 0, 2449d9e1521SGerd Hoffmann (struct virgl_box *)&box, 2459d9e1521SGerd Hoffmann t2d.offset, NULL, 0); 2469d9e1521SGerd Hoffmann } 2479d9e1521SGerd Hoffmann 2489d9e1521SGerd Hoffmann static void virgl_cmd_transfer_to_host_3d(VirtIOGPU *g, 2499d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2509d9e1521SGerd Hoffmann { 2519d9e1521SGerd Hoffmann struct virtio_gpu_transfer_host_3d t3d; 2529d9e1521SGerd Hoffmann 2539d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(t3d); 2549d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_toh_3d(t3d.resource_id); 2559d9e1521SGerd Hoffmann 2569d9e1521SGerd Hoffmann virgl_renderer_transfer_write_iov(t3d.resource_id, 2579d9e1521SGerd Hoffmann t3d.hdr.ctx_id, 2589d9e1521SGerd Hoffmann t3d.level, 2599d9e1521SGerd Hoffmann t3d.stride, 2609d9e1521SGerd Hoffmann t3d.layer_stride, 2619d9e1521SGerd Hoffmann (struct virgl_box *)&t3d.box, 2629d9e1521SGerd Hoffmann t3d.offset, NULL, 0); 2639d9e1521SGerd Hoffmann } 2649d9e1521SGerd Hoffmann 2659d9e1521SGerd Hoffmann static void 2669d9e1521SGerd Hoffmann virgl_cmd_transfer_from_host_3d(VirtIOGPU *g, 2679d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2689d9e1521SGerd Hoffmann { 2699d9e1521SGerd Hoffmann struct virtio_gpu_transfer_host_3d tf3d; 2709d9e1521SGerd Hoffmann 2719d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(tf3d); 2729d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_xfer_fromh_3d(tf3d.resource_id); 2739d9e1521SGerd Hoffmann 2749d9e1521SGerd Hoffmann virgl_renderer_transfer_read_iov(tf3d.resource_id, 2759d9e1521SGerd Hoffmann tf3d.hdr.ctx_id, 2769d9e1521SGerd Hoffmann tf3d.level, 2779d9e1521SGerd Hoffmann tf3d.stride, 2789d9e1521SGerd Hoffmann tf3d.layer_stride, 2799d9e1521SGerd Hoffmann (struct virgl_box *)&tf3d.box, 2809d9e1521SGerd Hoffmann tf3d.offset, NULL, 0); 2819d9e1521SGerd Hoffmann } 2829d9e1521SGerd Hoffmann 2839d9e1521SGerd Hoffmann 2849d9e1521SGerd Hoffmann static void virgl_resource_attach_backing(VirtIOGPU *g, 2859d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 2869d9e1521SGerd Hoffmann { 2879d9e1521SGerd Hoffmann struct virtio_gpu_resource_attach_backing att_rb; 2889d9e1521SGerd Hoffmann struct iovec *res_iovs; 2899d9e1521SGerd Hoffmann int ret; 2909d9e1521SGerd Hoffmann 2919d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(att_rb); 2929d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_back_attach(att_rb.resource_id); 2939d9e1521SGerd Hoffmann 2940c244e50SGerd Hoffmann ret = virtio_gpu_create_mapping_iov(&att_rb, cmd, NULL, &res_iovs); 2959d9e1521SGerd Hoffmann if (ret != 0) { 2969d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC; 2979d9e1521SGerd Hoffmann return; 2989d9e1521SGerd Hoffmann } 2999d9e1521SGerd Hoffmann 30033243031SLi Qiang ret = virgl_renderer_resource_attach_iov(att_rb.resource_id, 3019d9e1521SGerd Hoffmann res_iovs, att_rb.nr_entries); 30233243031SLi Qiang 30333243031SLi Qiang if (ret != 0) 30433243031SLi Qiang virtio_gpu_cleanup_mapping_iov(res_iovs, att_rb.nr_entries); 3059d9e1521SGerd Hoffmann } 3069d9e1521SGerd Hoffmann 3079d9e1521SGerd Hoffmann static void virgl_resource_detach_backing(VirtIOGPU *g, 3089d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3099d9e1521SGerd Hoffmann { 3109d9e1521SGerd Hoffmann struct virtio_gpu_resource_detach_backing detach_rb; 3119d9e1521SGerd Hoffmann struct iovec *res_iovs = NULL; 3129d9e1521SGerd Hoffmann int num_iovs = 0; 3139d9e1521SGerd Hoffmann 3149d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(detach_rb); 3159d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_res_back_detach(detach_rb.resource_id); 3169d9e1521SGerd Hoffmann 3179d9e1521SGerd Hoffmann virgl_renderer_resource_detach_iov(detach_rb.resource_id, 3189d9e1521SGerd Hoffmann &res_iovs, 3199d9e1521SGerd Hoffmann &num_iovs); 3209d9e1521SGerd Hoffmann if (res_iovs == NULL || num_iovs == 0) { 3219d9e1521SGerd Hoffmann return; 3229d9e1521SGerd Hoffmann } 3239d9e1521SGerd Hoffmann virtio_gpu_cleanup_mapping_iov(res_iovs, num_iovs); 3249d9e1521SGerd Hoffmann } 3259d9e1521SGerd Hoffmann 3269d9e1521SGerd Hoffmann 3279d9e1521SGerd Hoffmann static void virgl_cmd_ctx_attach_resource(VirtIOGPU *g, 3289d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3299d9e1521SGerd Hoffmann { 3309d9e1521SGerd Hoffmann struct virtio_gpu_ctx_resource att_res; 3319d9e1521SGerd Hoffmann 3329d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(att_res); 3339d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_res_attach(att_res.hdr.ctx_id, 3349d9e1521SGerd Hoffmann att_res.resource_id); 3359d9e1521SGerd Hoffmann 3369d9e1521SGerd Hoffmann virgl_renderer_ctx_attach_resource(att_res.hdr.ctx_id, att_res.resource_id); 3379d9e1521SGerd Hoffmann } 3389d9e1521SGerd Hoffmann 3399d9e1521SGerd Hoffmann static void virgl_cmd_ctx_detach_resource(VirtIOGPU *g, 3409d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3419d9e1521SGerd Hoffmann { 3429d9e1521SGerd Hoffmann struct virtio_gpu_ctx_resource det_res; 3439d9e1521SGerd Hoffmann 3449d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(det_res); 3459d9e1521SGerd Hoffmann trace_virtio_gpu_cmd_ctx_res_detach(det_res.hdr.ctx_id, 3469d9e1521SGerd Hoffmann det_res.resource_id); 3479d9e1521SGerd Hoffmann 3489d9e1521SGerd Hoffmann virgl_renderer_ctx_detach_resource(det_res.hdr.ctx_id, det_res.resource_id); 3499d9e1521SGerd Hoffmann } 3509d9e1521SGerd Hoffmann 3519d9e1521SGerd Hoffmann static void virgl_cmd_get_capset_info(VirtIOGPU *g, 3529d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3539d9e1521SGerd Hoffmann { 3549d9e1521SGerd Hoffmann struct virtio_gpu_get_capset_info info; 3559d9e1521SGerd Hoffmann struct virtio_gpu_resp_capset_info resp; 3569d9e1521SGerd Hoffmann 3579d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(info); 3589d9e1521SGerd Hoffmann 35942a8dadcSLi Qiang memset(&resp, 0, sizeof(resp)); 3609d9e1521SGerd Hoffmann if (info.capset_index == 0) { 3619d9e1521SGerd Hoffmann resp.capset_id = VIRTIO_GPU_CAPSET_VIRGL; 3629d9e1521SGerd Hoffmann virgl_renderer_get_cap_set(resp.capset_id, 3639d9e1521SGerd Hoffmann &resp.capset_max_version, 3649d9e1521SGerd Hoffmann &resp.capset_max_size); 365*5643cc94SDave Airlie } else if (info.capset_index == 1) { 366*5643cc94SDave Airlie resp.capset_id = VIRTIO_GPU_CAPSET_VIRGL2; 367*5643cc94SDave Airlie virgl_renderer_get_cap_set(resp.capset_id, 368*5643cc94SDave Airlie &resp.capset_max_version, 369*5643cc94SDave Airlie &resp.capset_max_size); 3709d9e1521SGerd Hoffmann } else { 3719d9e1521SGerd Hoffmann resp.capset_max_version = 0; 3729d9e1521SGerd Hoffmann resp.capset_max_size = 0; 3739d9e1521SGerd Hoffmann } 3749d9e1521SGerd Hoffmann resp.hdr.type = VIRTIO_GPU_RESP_OK_CAPSET_INFO; 3759d9e1521SGerd Hoffmann virtio_gpu_ctrl_response(g, cmd, &resp.hdr, sizeof(resp)); 3769d9e1521SGerd Hoffmann } 3779d9e1521SGerd Hoffmann 3789d9e1521SGerd Hoffmann static void virgl_cmd_get_capset(VirtIOGPU *g, 3799d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 3809d9e1521SGerd Hoffmann { 3819d9e1521SGerd Hoffmann struct virtio_gpu_get_capset gc; 3829d9e1521SGerd Hoffmann struct virtio_gpu_resp_capset *resp; 3839d9e1521SGerd Hoffmann uint32_t max_ver, max_size; 3849d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(gc); 3859d9e1521SGerd Hoffmann 3869d9e1521SGerd Hoffmann virgl_renderer_get_cap_set(gc.capset_id, &max_ver, 3879d9e1521SGerd Hoffmann &max_size); 388abd7f08bSPrasad J Pandit if (!max_size) { 389abd7f08bSPrasad J Pandit cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER; 390abd7f08bSPrasad J Pandit return; 391abd7f08bSPrasad J Pandit } 3929d9e1521SGerd Hoffmann 39385d9d044SLi Qiang resp = g_malloc0(sizeof(*resp) + max_size); 3949d9e1521SGerd Hoffmann resp->hdr.type = VIRTIO_GPU_RESP_OK_CAPSET; 3959d9e1521SGerd Hoffmann virgl_renderer_fill_caps(gc.capset_id, 3969d9e1521SGerd Hoffmann gc.capset_version, 3979d9e1521SGerd Hoffmann (void *)resp->capset_data); 3989d9e1521SGerd Hoffmann virtio_gpu_ctrl_response(g, cmd, &resp->hdr, sizeof(*resp) + max_size); 3999d9e1521SGerd Hoffmann g_free(resp); 4009d9e1521SGerd Hoffmann } 4019d9e1521SGerd Hoffmann 4029d9e1521SGerd Hoffmann void virtio_gpu_virgl_process_cmd(VirtIOGPU *g, 4039d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd) 4049d9e1521SGerd Hoffmann { 4059d9e1521SGerd Hoffmann VIRTIO_GPU_FILL_CMD(cmd->cmd_hdr); 4069d9e1521SGerd Hoffmann 407321c9adbSGerd Hoffmann cmd->waiting = g->renderer_blocked; 408321c9adbSGerd Hoffmann if (cmd->waiting) { 409321c9adbSGerd Hoffmann return; 410321c9adbSGerd Hoffmann } 411321c9adbSGerd Hoffmann 4129d9e1521SGerd Hoffmann virgl_renderer_force_ctx_0(); 4139d9e1521SGerd Hoffmann switch (cmd->cmd_hdr.type) { 4149d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_CREATE: 4159d9e1521SGerd Hoffmann virgl_cmd_context_create(g, cmd); 4169d9e1521SGerd Hoffmann break; 4179d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_DESTROY: 4189d9e1521SGerd Hoffmann virgl_cmd_context_destroy(g, cmd); 4199d9e1521SGerd Hoffmann break; 4209d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: 4219d9e1521SGerd Hoffmann virgl_cmd_create_resource_2d(g, cmd); 4229d9e1521SGerd Hoffmann break; 4239d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: 4249d9e1521SGerd Hoffmann virgl_cmd_create_resource_3d(g, cmd); 4259d9e1521SGerd Hoffmann break; 4269d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_SUBMIT_3D: 4279d9e1521SGerd Hoffmann virgl_cmd_submit_3d(g, cmd); 4289d9e1521SGerd Hoffmann break; 4299d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: 4309d9e1521SGerd Hoffmann virgl_cmd_transfer_to_host_2d(g, cmd); 4319d9e1521SGerd Hoffmann break; 4329d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: 4339d9e1521SGerd Hoffmann virgl_cmd_transfer_to_host_3d(g, cmd); 4349d9e1521SGerd Hoffmann break; 4359d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: 4369d9e1521SGerd Hoffmann virgl_cmd_transfer_from_host_3d(g, cmd); 4379d9e1521SGerd Hoffmann break; 4389d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: 4399d9e1521SGerd Hoffmann virgl_resource_attach_backing(g, cmd); 4409d9e1521SGerd Hoffmann break; 4419d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: 4429d9e1521SGerd Hoffmann virgl_resource_detach_backing(g, cmd); 4439d9e1521SGerd Hoffmann break; 4449d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_SET_SCANOUT: 4459d9e1521SGerd Hoffmann virgl_cmd_set_scanout(g, cmd); 4469d9e1521SGerd Hoffmann break; 4479d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_FLUSH: 4489d9e1521SGerd Hoffmann virgl_cmd_resource_flush(g, cmd); 4499d9e1521SGerd Hoffmann break; 4509d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_RESOURCE_UNREF: 4519d9e1521SGerd Hoffmann virgl_cmd_resource_unref(g, cmd); 4529d9e1521SGerd Hoffmann break; 4539d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: 4549d9e1521SGerd Hoffmann /* TODO add security */ 4559d9e1521SGerd Hoffmann virgl_cmd_ctx_attach_resource(g, cmd); 4569d9e1521SGerd Hoffmann break; 4579d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: 4589d9e1521SGerd Hoffmann /* TODO add security */ 4599d9e1521SGerd Hoffmann virgl_cmd_ctx_detach_resource(g, cmd); 4609d9e1521SGerd Hoffmann break; 4619d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_CAPSET_INFO: 4629d9e1521SGerd Hoffmann virgl_cmd_get_capset_info(g, cmd); 4639d9e1521SGerd Hoffmann break; 4649d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_CAPSET: 4659d9e1521SGerd Hoffmann virgl_cmd_get_capset(g, cmd); 4669d9e1521SGerd Hoffmann break; 4679d9e1521SGerd Hoffmann 4689d9e1521SGerd Hoffmann case VIRTIO_GPU_CMD_GET_DISPLAY_INFO: 4699d9e1521SGerd Hoffmann virtio_gpu_get_display_info(g, cmd); 4709d9e1521SGerd Hoffmann break; 4719d9e1521SGerd Hoffmann default: 4729d9e1521SGerd Hoffmann cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC; 4739d9e1521SGerd Hoffmann break; 4749d9e1521SGerd Hoffmann } 4759d9e1521SGerd Hoffmann 4769d9e1521SGerd Hoffmann if (cmd->finished) { 4779d9e1521SGerd Hoffmann return; 4789d9e1521SGerd Hoffmann } 4799d9e1521SGerd Hoffmann if (cmd->error) { 4809d9e1521SGerd Hoffmann fprintf(stderr, "%s: ctrl 0x%x, error 0x%x\n", __func__, 4819d9e1521SGerd Hoffmann cmd->cmd_hdr.type, cmd->error); 4829d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, cmd->error); 4839d9e1521SGerd Hoffmann return; 4849d9e1521SGerd Hoffmann } 4859d9e1521SGerd Hoffmann if (!(cmd->cmd_hdr.flags & VIRTIO_GPU_FLAG_FENCE)) { 4869d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA); 4879d9e1521SGerd Hoffmann return; 4889d9e1521SGerd Hoffmann } 4899d9e1521SGerd Hoffmann 4909d9e1521SGerd Hoffmann trace_virtio_gpu_fence_ctrl(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type); 4919d9e1521SGerd Hoffmann virgl_renderer_create_fence(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type); 4929d9e1521SGerd Hoffmann } 4939d9e1521SGerd Hoffmann 4949d9e1521SGerd Hoffmann static void virgl_write_fence(void *opaque, uint32_t fence) 4959d9e1521SGerd Hoffmann { 4969d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 4979d9e1521SGerd Hoffmann struct virtio_gpu_ctrl_command *cmd, *tmp; 4989d9e1521SGerd Hoffmann 4999d9e1521SGerd Hoffmann QTAILQ_FOREACH_SAFE(cmd, &g->fenceq, next, tmp) { 5009d9e1521SGerd Hoffmann /* 5019d9e1521SGerd Hoffmann * the guest can end up emitting fences out of order 5029d9e1521SGerd Hoffmann * so we should check all fenced cmds not just the first one. 5039d9e1521SGerd Hoffmann */ 5049d9e1521SGerd Hoffmann if (cmd->cmd_hdr.fence_id > fence) { 5059d9e1521SGerd Hoffmann continue; 5069d9e1521SGerd Hoffmann } 5079d9e1521SGerd Hoffmann trace_virtio_gpu_fence_resp(cmd->cmd_hdr.fence_id); 5089d9e1521SGerd Hoffmann virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA); 5099d9e1521SGerd Hoffmann QTAILQ_REMOVE(&g->fenceq, cmd, next); 5109d9e1521SGerd Hoffmann g_free(cmd); 5119d9e1521SGerd Hoffmann g->inflight--; 5129d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 5139d9e1521SGerd Hoffmann fprintf(stderr, "inflight: %3d (-)\r", g->inflight); 5149d9e1521SGerd Hoffmann } 5159d9e1521SGerd Hoffmann } 5169d9e1521SGerd Hoffmann } 5179d9e1521SGerd Hoffmann 5189d9e1521SGerd Hoffmann static virgl_renderer_gl_context 5199d9e1521SGerd Hoffmann virgl_create_context(void *opaque, int scanout_idx, 5209d9e1521SGerd Hoffmann struct virgl_renderer_gl_ctx_param *params) 5219d9e1521SGerd Hoffmann { 5229d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5239d9e1521SGerd Hoffmann QEMUGLContext ctx; 5249d9e1521SGerd Hoffmann QEMUGLParams qparams; 5259d9e1521SGerd Hoffmann 5269d9e1521SGerd Hoffmann qparams.major_ver = params->major_ver; 5279d9e1521SGerd Hoffmann qparams.minor_ver = params->minor_ver; 5289d9e1521SGerd Hoffmann 5299d9e1521SGerd Hoffmann ctx = dpy_gl_ctx_create(g->scanout[scanout_idx].con, &qparams); 5309d9e1521SGerd Hoffmann return (virgl_renderer_gl_context)ctx; 5319d9e1521SGerd Hoffmann } 5329d9e1521SGerd Hoffmann 5339d9e1521SGerd Hoffmann static void virgl_destroy_context(void *opaque, virgl_renderer_gl_context ctx) 5349d9e1521SGerd Hoffmann { 5359d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5369d9e1521SGerd Hoffmann QEMUGLContext qctx = (QEMUGLContext)ctx; 5379d9e1521SGerd Hoffmann 5389d9e1521SGerd Hoffmann dpy_gl_ctx_destroy(g->scanout[0].con, qctx); 5399d9e1521SGerd Hoffmann } 5409d9e1521SGerd Hoffmann 5419d9e1521SGerd Hoffmann static int virgl_make_context_current(void *opaque, int scanout_idx, 5429d9e1521SGerd Hoffmann virgl_renderer_gl_context ctx) 5439d9e1521SGerd Hoffmann { 5449d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5459d9e1521SGerd Hoffmann QEMUGLContext qctx = (QEMUGLContext)ctx; 5469d9e1521SGerd Hoffmann 5479d9e1521SGerd Hoffmann return dpy_gl_ctx_make_current(g->scanout[scanout_idx].con, qctx); 5489d9e1521SGerd Hoffmann } 5499d9e1521SGerd Hoffmann 5509d9e1521SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs = { 5519d9e1521SGerd Hoffmann .version = 1, 5529d9e1521SGerd Hoffmann .write_fence = virgl_write_fence, 5539d9e1521SGerd Hoffmann .create_gl_context = virgl_create_context, 5549d9e1521SGerd Hoffmann .destroy_gl_context = virgl_destroy_context, 5559d9e1521SGerd Hoffmann .make_current = virgl_make_context_current, 5569d9e1521SGerd Hoffmann }; 5579d9e1521SGerd Hoffmann 5589d9e1521SGerd Hoffmann static void virtio_gpu_print_stats(void *opaque) 5599d9e1521SGerd Hoffmann { 5609d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5619d9e1521SGerd Hoffmann 5629d9e1521SGerd Hoffmann if (g->stats.requests) { 5639d9e1521SGerd Hoffmann fprintf(stderr, "stats: vq req %4d, %3d -- 3D %4d (%5d)\n", 5649d9e1521SGerd Hoffmann g->stats.requests, 5659d9e1521SGerd Hoffmann g->stats.max_inflight, 5669d9e1521SGerd Hoffmann g->stats.req_3d, 5679d9e1521SGerd Hoffmann g->stats.bytes_3d); 5689d9e1521SGerd Hoffmann g->stats.requests = 0; 5699d9e1521SGerd Hoffmann g->stats.max_inflight = 0; 5709d9e1521SGerd Hoffmann g->stats.req_3d = 0; 5719d9e1521SGerd Hoffmann g->stats.bytes_3d = 0; 5729d9e1521SGerd Hoffmann } else { 5739d9e1521SGerd Hoffmann fprintf(stderr, "stats: idle\r"); 5749d9e1521SGerd Hoffmann } 5759d9e1521SGerd Hoffmann timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000); 5769d9e1521SGerd Hoffmann } 5779d9e1521SGerd Hoffmann 5789d9e1521SGerd Hoffmann static void virtio_gpu_fence_poll(void *opaque) 5799d9e1521SGerd Hoffmann { 5809d9e1521SGerd Hoffmann VirtIOGPU *g = opaque; 5819d9e1521SGerd Hoffmann 5829d9e1521SGerd Hoffmann virgl_renderer_poll(); 5830c55a1cfSGerd Hoffmann virtio_gpu_process_cmdq(g); 5840c55a1cfSGerd Hoffmann if (!QTAILQ_EMPTY(&g->cmdq) || !QTAILQ_EMPTY(&g->fenceq)) { 5859d9e1521SGerd Hoffmann timer_mod(g->fence_poll, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 10); 5869d9e1521SGerd Hoffmann } 5879d9e1521SGerd Hoffmann } 5889d9e1521SGerd Hoffmann 5899d9e1521SGerd Hoffmann void virtio_gpu_virgl_fence_poll(VirtIOGPU *g) 5909d9e1521SGerd Hoffmann { 5919d9e1521SGerd Hoffmann virtio_gpu_fence_poll(g); 5929d9e1521SGerd Hoffmann } 5939d9e1521SGerd Hoffmann 5949d9e1521SGerd Hoffmann void virtio_gpu_virgl_reset(VirtIOGPU *g) 5959d9e1521SGerd Hoffmann { 5969d9e1521SGerd Hoffmann int i; 5979d9e1521SGerd Hoffmann 5989d9e1521SGerd Hoffmann /* virgl_renderer_reset() ??? */ 5999d9e1521SGerd Hoffmann for (i = 0; i < g->conf.max_outputs; i++) { 6009d9e1521SGerd Hoffmann if (i != 0) { 6019d9e1521SGerd Hoffmann dpy_gfx_replace_surface(g->scanout[i].con, NULL); 6029d9e1521SGerd Hoffmann } 603975896fcSGerd Hoffmann dpy_gl_scanout_disable(g->scanout[i].con); 6049d9e1521SGerd Hoffmann } 6059d9e1521SGerd Hoffmann } 6069d9e1521SGerd Hoffmann 607c19f4fbcSGerd Hoffmann void virtio_gpu_gl_block(void *opaque, bool block) 608c19f4fbcSGerd Hoffmann { 609c19f4fbcSGerd Hoffmann VirtIOGPU *g = opaque; 610c19f4fbcSGerd Hoffmann 611c19f4fbcSGerd Hoffmann if (block) { 612c19f4fbcSGerd Hoffmann g->renderer_blocked++; 613c19f4fbcSGerd Hoffmann } else { 614c19f4fbcSGerd Hoffmann g->renderer_blocked--; 615c19f4fbcSGerd Hoffmann } 616c19f4fbcSGerd Hoffmann assert(g->renderer_blocked >= 0); 617c19f4fbcSGerd Hoffmann 618c19f4fbcSGerd Hoffmann if (g->renderer_blocked == 0) { 619c19f4fbcSGerd Hoffmann virtio_gpu_process_cmdq(g); 620c19f4fbcSGerd Hoffmann } 621c19f4fbcSGerd Hoffmann } 622c19f4fbcSGerd Hoffmann 6239d9e1521SGerd Hoffmann int virtio_gpu_virgl_init(VirtIOGPU *g) 6249d9e1521SGerd Hoffmann { 6259d9e1521SGerd Hoffmann int ret; 6269d9e1521SGerd Hoffmann 6279d9e1521SGerd Hoffmann ret = virgl_renderer_init(g, 0, &virtio_gpu_3d_cbs); 6289d9e1521SGerd Hoffmann if (ret != 0) { 6299d9e1521SGerd Hoffmann return ret; 6309d9e1521SGerd Hoffmann } 6319d9e1521SGerd Hoffmann 6329d9e1521SGerd Hoffmann g->fence_poll = timer_new_ms(QEMU_CLOCK_VIRTUAL, 6339d9e1521SGerd Hoffmann virtio_gpu_fence_poll, g); 6349d9e1521SGerd Hoffmann 6359d9e1521SGerd Hoffmann if (virtio_gpu_stats_enabled(g->conf)) { 6369d9e1521SGerd Hoffmann g->print_stats = timer_new_ms(QEMU_CLOCK_VIRTUAL, 6379d9e1521SGerd Hoffmann virtio_gpu_print_stats, g); 6389d9e1521SGerd Hoffmann timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000); 6399d9e1521SGerd Hoffmann } 6409d9e1521SGerd Hoffmann return 0; 6419d9e1521SGerd Hoffmann } 6429d9e1521SGerd Hoffmann 643*5643cc94SDave Airlie int virtio_gpu_virgl_get_num_capsets(VirtIOGPU *g) 644*5643cc94SDave Airlie { 645*5643cc94SDave Airlie uint32_t capset2_max_ver, capset2_max_size; 646*5643cc94SDave Airlie virgl_renderer_get_cap_set(VIRTIO_GPU_CAPSET_VIRGL2, 647*5643cc94SDave Airlie &capset2_max_ver, 648*5643cc94SDave Airlie &capset2_max_size); 649*5643cc94SDave Airlie 650*5643cc94SDave Airlie return capset2_max_ver ? 2 : 1; 651*5643cc94SDave Airlie } 652*5643cc94SDave Airlie 6539d9e1521SGerd Hoffmann #endif /* CONFIG_VIRGL */ 654