@ -96,6 +96,10 @@ static void virtio_gpu_virgl_resume_cmdq_bh(void *opaque)
{
VirtIOGPU * g = opaque ;
if ( ! virtio_gpu_virgl_update_render_state ( g ) ) {
return ;
}
virtio_gpu_process_cmdq ( g ) ;
}
@ -344,14 +348,46 @@ static void virgl_cmd_create_resource_3d(VirtIOGPU *g,
virgl_renderer_resource_create ( & args , NULL , 0 ) ;
}
static int
virtio_gpu_virgl_resource_unref ( VirtIOGPU * g ,
struct virtio_gpu_virgl_resource * res ,
bool * suspended )
{
struct iovec * res_iovs = NULL ;
int num_iovs = 0 ;
# if VIRGL_VERSION_MAJOR >= 1
int ret ;
ret = virtio_gpu_virgl_unmap_resource_blob ( g , res , suspended ) ;
if ( ret ) {
return ret ;
}
if ( * suspended ) {
return 0 ;
}
# endif
virgl_renderer_resource_detach_iov ( res - > base . resource_id ,
& res_iovs ,
& num_iovs ) ;
if ( res_iovs ! = NULL & & num_iovs ! = 0 ) {
virtio_gpu_cleanup_mapping_iov ( g , res_iovs , num_iovs ) ;
}
virgl_renderer_resource_unref ( res - > base . resource_id ) ;
QTAILQ_REMOVE ( & g - > reslist , & res - > base , next ) ;
g_free ( res ) ;
return 0 ;
}
static void virgl_cmd_resource_unref ( VirtIOGPU * g ,
struct virtio_gpu_ctrl_command * cmd ,
bool * cmd_suspended )
{
struct virtio_gpu_resource_unref unref ;
struct virtio_gpu_virgl_resource * res ;
struct iovec * res_iovs = NULL ;
int num_iovs = 0 ;
VIRTIO_GPU_FILL_CMD ( unref ) ;
trace_virtio_gpu_cmd_res_unref ( unref . resource_id ) ;
@ -364,27 +400,21 @@ static void virgl_cmd_resource_unref(VirtIOGPU *g,
return ;
}
# if VIRGL_VERSION_MAJOR >= 1
if ( virtio_gpu_virgl_unmap_resource_blob ( g , res , cmd_suspended ) ) {
cmd - > error = VIRTIO_GPU_RESP_ERR_UNSPEC ;
return ;
}
if ( * cmd_suspended ) {
return ;
}
# endif
virtio_gpu_virgl_resource_unref ( g , res , cmd_suspended ) ;
}
virgl_renderer_resource_detach_iov ( unref . resource_id ,
& res_iovs ,
& num_iovs ) ;
if ( res_iovs ! = NULL & & num_iovs ! = 0 ) {
virtio_gpu_cleanup_mapping_iov ( g , res_iovs , num_iovs ) ;
}
virgl_renderer_resource_unref ( unref . resource_id ) ;
void virtio_gpu_virgl_resource_destroy ( VirtIOGPU * g ,
struct virtio_gpu_simple_resource * base ,
Error * * errp )
{
struct virtio_gpu_virgl_resource * res ;
bool suspended = false ;
QTAILQ_REMOVE ( & g - > reslist , & res - > base , next ) ;
res = container_of ( base , struct virtio_gpu_virgl_resource , base ) ;
g_free ( res ) ;
if ( virtio_gpu_virgl_resource_unref ( g , res , & suspended ) ) {
error_setg ( errp , " failed to destroy virgl resource " ) ;
}
}
static void virgl_cmd_context_create ( VirtIOGPU * g ,
@ -1291,6 +1321,10 @@ static void virtio_gpu_fence_poll(void *opaque)
VirtIOGPU * g = opaque ;
VirtIOGPUGL * gl = VIRTIO_GPU_GL ( g ) ;
if ( ! virtio_gpu_virgl_update_render_state ( g ) ) {
return ;
}
virgl_renderer_poll ( ) ;
virtio_gpu_process_cmdq ( g ) ;
if ( ! QTAILQ_EMPTY ( & g - > cmdq ) | | ! QTAILQ_EMPTY ( & g - > fenceq ) ) {
@ -1313,14 +1347,30 @@ void virtio_gpu_virgl_reset_scanout(VirtIOGPU *g)
}
}
void virtio_gpu_virgl_reset ( VirtIOGPU * g )
static bool virtio_gpu_virgl_reset ( VirtIOGPU * g )
{
struct virtio_gpu_simple_resource * res , * tmp ;
/*
* Virgl blob resource unmapping can be suspended and
* deferred on unref , ensure that destruction is completed .
*/
QTAILQ_FOREACH_SAFE ( res , & g - > reslist , next , tmp ) {
virtio_gpu_virgl_resource_destroy ( g , res , NULL ) ;
}
if ( ! QTAILQ_EMPTY ( & g - > reslist ) ) {
return false ;
}
virgl_renderer_reset ( ) ;
virtio_gpu_virgl_reset_async_fences ( g ) ;
return true ;
}
int virtio_gpu_virgl_init ( VirtIOGPU * g )
static int virtio_gpu_virgl_init ( VirtIOGPU * g )
{
int ret ;
uint32_t flags = 0 ;
@ -1398,6 +1448,35 @@ int virtio_gpu_virgl_init(VirtIOGPU *g)
return 0 ;
}
bool virtio_gpu_virgl_update_render_state ( VirtIOGPU * g )
{
VirtIOGPUGL * gl = VIRTIO_GPU_GL ( g ) ;
switch ( gl - > renderer_state ) {
case RS_RESET :
virgl_renderer_force_ctx_0 ( ) ;
if ( ! virtio_gpu_virgl_reset ( g ) ) {
return false ;
}
/* fallthrough */
case RS_START :
if ( virtio_gpu_virgl_init ( g ) ) {
gl - > renderer_state = RS_INIT_FAILED ;
return false ;
}
gl - > renderer_state = RS_INITED ;
break ;
case RS_INIT_FAILED :
return false ;
case RS_INITED :
break ;
}
return true ;
}
static void virtio_gpu_virgl_add_capset ( GArray * capset_ids , uint32_t capset_id )
{
g_array_append_val ( capset_ids , capset_id ) ;