c/util: Use cmd pool helpers in swapchain code

This commit is contained in:
Jakob Bornecrantz 2023-04-01 21:34:42 +01:00
parent 197461b4cd
commit d1906d1cf8
8 changed files with 132 additions and 47 deletions

View file

@ -155,7 +155,7 @@ compositor_end_session(struct xrt_compositor *xc)
if (c->deferred_surface) {
// Make sure we don't have anything to destroy.
comp_swapchain_garbage_collect(&c->base.cscgc);
comp_swapchain_shared_garbage_collect(&c->base.cscs);
comp_renderer_destroy(&c->r);
#ifdef XRT_FEATURE_WINDOW_PEEK
comp_window_peek_destroy(&c->peek);
@ -404,7 +404,7 @@ compositor_layer_commit(struct xrt_compositor *xc, xrt_graphics_sync_handle_t sy
COMP_SPEW(c, "LAYER_COMMIT finished drawing at %8.3fms", ns_to_ms(c->last_frame_time_ns));
// Now is a good point to garbage collect.
comp_swapchain_garbage_collect(&c->base.cscgc);
comp_swapchain_shared_garbage_collect(&c->base.cscs);
return XRT_SUCCESS;
}
@ -454,7 +454,10 @@ compositor_destroy(struct xrt_compositor *xc)
COMP_DEBUG(c, "COMP_DESTROY");
// Make sure we don't have anything to destroy.
comp_swapchain_garbage_collect(&c->base.cscgc);
comp_swapchain_shared_garbage_collect(&c->base.cscs);
// Must be destroyed before Vulkan.
comp_swapchain_shared_destroy(&c->base.cscs, vk);
comp_renderer_destroy(&c->r);
@ -750,6 +753,12 @@ compositor_init_vulkan(struct comp_compositor *c)
c->settings.client_gpu_deviceLUID = vk_res.client_gpu_deviceLUID;
c->settings.client_gpu_deviceLUID_valid = vk_res.client_gpu_deviceLUID_valid;
// Tie the lifetimes of swapchains to Vulkan.
xrt_result_t xret = comp_swapchain_shared_init(&c->base.cscs, vk);
if (xret != XRT_SUCCESS) {
return false;
}
return true;
}

View file

@ -1,4 +1,4 @@
// Copyright 2019-2022, Collabora, Ltd.
// Copyright 2019-2023, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
@ -209,6 +209,12 @@ compositor_init_vulkan(struct null_compositor *c)
c->sys_info.client_d3d_deviceLUID = vk_res.client_gpu_deviceLUID;
c->sys_info.client_d3d_deviceLUID_valid = vk_res.client_gpu_deviceLUID_valid;
// Tie the lifetimes of swapchains to Vulkan.
xrt_result_t xret = comp_swapchain_shared_init(&c->base.cscs, vk);
if (xret != XRT_SUCCESS) {
return false;
}
return true;
}
@ -441,7 +447,7 @@ null_compositor_layer_commit(struct xrt_compositor *xc, xrt_graphics_sync_handle
}
// Now is a good point to garbage collect.
comp_swapchain_garbage_collect(&c->base.cscgc);
comp_swapchain_shared_garbage_collect(&c->base.cscs);
return XRT_SUCCESS;
}
@ -498,7 +504,10 @@ null_compositor_destroy(struct xrt_compositor *xc)
NULL_DEBUG(c, "NULL_COMP_DESTROY");
// Make sure we don't have anything to destroy.
comp_swapchain_garbage_collect(&c->base.cscgc);
comp_swapchain_shared_garbage_collect(&c->base.cscs);
// Must be destroyed before Vulkan.
comp_swapchain_shared_destroy(&c->base.cscs, vk);
if (vk->cmd_pool != VK_NULL_HANDLE) {

View file

@ -1,4 +1,4 @@
// Copyright 2019-2022, Collabora, Ltd.
// Copyright 2019-2023, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
@ -70,7 +70,7 @@ base_create_swapchain(struct xrt_compositor *xc,
struct xrt_swapchain_create_properties xsccp = {0};
xrt_comp_get_swapchain_create_properties(xc, info, &xsccp);
return comp_swapchain_create(&cb->vk, &cb->cscgc, info, &xsccp, out_xsc);
return comp_swapchain_create(&cb->vk, &cb->cscs, info, &xsccp, out_xsc);
}
static xrt_result_t
@ -82,7 +82,7 @@ base_import_swapchain(struct xrt_compositor *xc,
{
struct comp_base *cb = comp_base(xc);
return comp_swapchain_import(&cb->vk, &cb->cscgc, info, native_images, image_count, out_xsc);
return comp_swapchain_import(&cb->vk, &cb->cscs, info, native_images, image_count, out_xsc);
}
static xrt_result_t
@ -265,7 +265,7 @@ comp_base_init(struct comp_base *cb)
cb->base.base.layer_equirect2 = base_layer_equirect2;
cb->base.base.wait_frame = base_wait_frame;
u_threading_stack_init(&cb->cscgc.destroy_swapchains);
u_threading_stack_init(&cb->cscs.destroy_swapchains);
os_precise_sleeper_init(&cb->sleeper);
}
@ -275,5 +275,5 @@ comp_base_fini(struct comp_base *cb)
{
os_precise_sleeper_deinit(&cb->sleeper);
u_threading_stack_fini(&cb->cscgc.destroy_swapchains);
u_threading_stack_fini(&cb->cscs.destroy_swapchains);
}

View file

@ -1,4 +1,4 @@
// Copyright 2019-2021, Collabora, Ltd.
// Copyright 2019-2023, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
@ -103,7 +103,7 @@ struct comp_base
struct os_precise_sleeper sleeper;
//! Swapchain garbage collector, used by swapchain, child class needs to call.
struct comp_swapchain_gc cscgc;
struct comp_swapchain_shared cscs;
//! We only need to track a single slot.
struct comp_layer_slot slot;

View file

@ -1,4 +1,4 @@
// Copyright 2019-2022, Collabora, Ltd.
// Copyright 2019-2023, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
@ -14,6 +14,7 @@
#include "util/u_handles.h"
#include "util/comp_swapchain.h"
#include "vk/vk_cmd_pool.h"
#include <stdio.h>
#include <stdlib.h>
@ -33,7 +34,7 @@ swapchain_destroy(struct xrt_swapchain *xsc)
VK_TRACE(sc->vk, "DESTROY");
u_threading_stack_push(&sc->gc->destroy_swapchains, sc);
u_threading_stack_push(&sc->cscs->destroy_swapchains, sc);
}
static xrt_result_t
@ -93,7 +94,7 @@ static struct comp_swapchain *
set_common_fields(struct comp_swapchain *sc,
comp_swapchain_destroy_func_t destroy_func,
struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
uint32_t image_count)
{
sc->base.base.destroy = swapchain_destroy;
@ -103,7 +104,7 @@ set_common_fields(struct comp_swapchain *sc,
sc->base.base.image_count = image_count;
sc->real_destroy = destroy_func;
sc->vk = vk;
sc->gc = cscgc;
sc->cscs = cscs;
// Make sure the handles are invalid.
for (uint32_t i = 0; i < ARRAY_SIZE(sc->base.images); i++) {
@ -185,7 +186,19 @@ do_post_create_vulkan_setup(struct vk_bundle *vk,
*
*/
vk_cmd_buffer_create_and_begin(vk, &cmd_buffer);
// To reduce the pointer chasing.
struct vk_cmd_pool *pool = &sc->cscs->pool;
// First lock.
vk_cmd_pool_lock(pool);
// Now lets create the command buffer.
ret = vk_cmd_pool_create_and_begin_cmd_buffer_locked(vk, pool, 0, &cmd_buffer);
if (ret != VK_SUCCESS) {
vk_cmd_pool_unlock(pool);
VK_ERROR(vk, "Failed to barrier images");
return;
}
VkImageAspectFlagBits image_barrier_aspect = vk_csci_get_barrier_aspect_mask(image_view_format);
@ -198,7 +211,7 @@ do_post_create_vulkan_setup(struct vk_bundle *vk,
};
for (uint32_t i = 0; i < image_count; i++) {
vk_cmd_image_barrier_gpu( //
vk_cmd_image_barrier_gpu_locked( //
vk, //
cmd_buffer, //
sc->vkic.images[i].handle, //
@ -209,7 +222,13 @@ do_post_create_vulkan_setup(struct vk_bundle *vk,
subresource_range); //
}
ret = vk_cmd_buffer_submit(vk, cmd_buffer);
// Done writing commands, submit to queue, waits for command to finish.
ret = vk_cmd_pool_end_submit_wait_and_free_cmd_buffer_locked(vk, pool, cmd_buffer);
// Done submitting commands.
vk_cmd_pool_unlock(pool);
// Check results from submit.
if (ret != VK_SUCCESS) {
//! @todo Propegate error
VK_ERROR(vk, "Failed to barrier images");
@ -289,7 +308,7 @@ xrt_result_t
comp_swapchain_create_init(struct comp_swapchain *sc,
comp_swapchain_destroy_func_t destroy_func,
struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
const struct xrt_swapchain_create_properties *xsccp)
{
@ -307,7 +326,7 @@ comp_swapchain_create_init(struct comp_swapchain *sc,
return XRT_ERROR_SWAPCHAIN_FLAG_VALID_BUT_UNSUPPORTED;
}
set_common_fields(sc, destroy_func, vk, cscgc, xsccp->image_count);
set_common_fields(sc, destroy_func, vk, cscs, xsccp->image_count);
// Use the image helper to allocate the images.
ret = vk_ic_allocate(vk, info, xsccp->image_count, &sc->vkic);
@ -337,7 +356,7 @@ xrt_result_t
comp_swapchain_import_init(struct comp_swapchain *sc,
comp_swapchain_destroy_func_t destroy_func,
struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
struct xrt_image_native *native_images,
uint32_t native_image_count)
@ -349,7 +368,7 @@ comp_swapchain_import_init(struct comp_swapchain *sc,
info->width, info->height, //
vk_format_string(info->format), info->format);
set_common_fields(sc, destroy_func, vk, cscgc, native_image_count);
set_common_fields(sc, destroy_func, vk, cscs, native_image_count);
// Use the image helper to get the images.
ret = vk_ic_from_natives(vk, info, native_images, native_image_count, &sc->vkic);
@ -383,16 +402,34 @@ comp_swapchain_teardown(struct comp_swapchain *sc)
/*
*
* 'Exported' garbage collection functions.
* 'Exported' shared functions.
*
*/
XRT_CHECK_RESULT xrt_result_t
comp_swapchain_shared_init(struct comp_swapchain_shared *cscs, struct vk_bundle *vk)
{
VkResult ret = vk_cmd_pool_init(vk, &cscs->pool, 0);
if (ret != VK_SUCCESS) {
VK_ERROR(vk, "vk_cmd_pool_init: %s", vk_result_string(ret));
return XRT_ERROR_VULKAN;
}
return XRT_SUCCESS;
}
void
comp_swapchain_garbage_collect(struct comp_swapchain_gc *cscgc)
comp_swapchain_shared_destroy(struct comp_swapchain_shared *cscs, struct vk_bundle *vk)
{
vk_cmd_pool_destroy(vk, &cscs->pool);
}
void
comp_swapchain_shared_garbage_collect(struct comp_swapchain_shared *cscs)
{
struct comp_swapchain *sc;
while ((sc = u_threading_stack_pop(&cscgc->destroy_swapchains))) {
while ((sc = u_threading_stack_pop(&cscs->destroy_swapchains))) {
sc->real_destroy(sc);
}
}
@ -423,7 +460,7 @@ comp_swapchain_get_create_properties(const struct xrt_swapchain_create_info *inf
xrt_result_t
comp_swapchain_create(struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
const struct xrt_swapchain_create_properties *xsccp,
struct xrt_swapchain **out_xsc)
@ -435,7 +472,7 @@ comp_swapchain_create(struct vk_bundle *vk,
sc, //
really_destroy, //
vk, //
cscgc, //
cscs, //
info, //
xsccp); //
if (xret != XRT_SUCCESS) {
@ -451,7 +488,7 @@ comp_swapchain_create(struct vk_bundle *vk,
xrt_result_t
comp_swapchain_import(struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
struct xrt_image_native *native_images,
uint32_t native_image_count,
@ -464,7 +501,7 @@ comp_swapchain_import(struct vk_bundle *vk,
sc, //
really_destroy, //
vk, //
cscgc, //
cscs, //
info, //
native_images, //
native_image_count); //

View file

@ -1,4 +1,4 @@
// Copyright 2019-2021, Collabora, Ltd.
// Copyright 2019-2023, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
@ -11,6 +11,7 @@
#pragma once
#include "vk/vk_image_allocator.h"
#include "vk/vk_cmd_pool.h"
#include "util/u_threading.h"
#include "util/u_index_fifo.h"
@ -32,14 +33,19 @@ struct comp_swapchain;
typedef void (*comp_swapchain_destroy_func_t)(struct comp_swapchain *sc);
/*!
* A garbage collector that collects swapchains to be safely destroyed.
* Shared resource(s) and garbage collector for swapchains. The garbage
* collector allows to delay the destruction until it's safe to destroy them.
* The lifetime of @p pool is handled by the compositor that implements this
* struct.
*
* @ingroup comp_util
*/
struct comp_swapchain_gc
struct comp_swapchain_shared
{
//! Thread object for safely destroying swapchain.
struct u_threading_stack destroy_swapchains;
struct vk_cmd_pool pool;
};
/*!
@ -81,7 +87,7 @@ struct comp_swapchain
struct xrt_swapchain_native base;
struct vk_bundle *vk;
struct comp_swapchain_gc *gc;
struct comp_swapchain_shared *cscs;
struct vk_image_collection vkic;
struct comp_swapchain_image images[XRT_MAX_SWAPCHAIN_IMAGES];
@ -133,7 +139,7 @@ xrt_result_t
comp_swapchain_create_init(struct comp_swapchain *sc,
comp_swapchain_destroy_func_t destroy_func,
struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
const struct xrt_swapchain_create_properties *xsccp);
@ -148,7 +154,7 @@ xrt_result_t
comp_swapchain_import_init(struct comp_swapchain *sc,
comp_swapchain_destroy_func_t destroy_func,
struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
struct xrt_image_native *native_images,
uint32_t native_image_count);
@ -164,10 +170,26 @@ comp_swapchain_teardown(struct comp_swapchain *sc);
/*
*
* 'Exported' garbage collection functions.
* 'Exported' shared struct functions.
*
*/
/*!
* Create the shared struct.
*
* @ingroup comp_util
*/
XRT_CHECK_RESULT xrt_result_t
comp_swapchain_shared_init(struct comp_swapchain_shared *cscs, struct vk_bundle *vk);
/*!
* Destroy the shared struct.
*
* @ingroup comp_util
*/
void
comp_swapchain_shared_destroy(struct comp_swapchain_shared *cscs, struct vk_bundle *vk);
/*!
* Do garbage collection, destroying any resources that has been scheduled for
* destruction from other threads.
@ -175,7 +197,7 @@ comp_swapchain_teardown(struct comp_swapchain *sc);
* @ingroup comp_util
*/
void
comp_swapchain_garbage_collect(struct comp_swapchain_gc *cscgc);
comp_swapchain_shared_garbage_collect(struct comp_swapchain_shared *cscs);
/*
@ -200,7 +222,7 @@ comp_swapchain_get_create_properties(const struct xrt_swapchain_create_info *inf
*/
xrt_result_t
comp_swapchain_create(struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
const struct xrt_swapchain_create_properties *xsccp,
struct xrt_swapchain **out_xsc);
@ -212,7 +234,7 @@ comp_swapchain_create(struct vk_bundle *vk,
*/
xrt_result_t
comp_swapchain_import(struct vk_bundle *vk,
struct comp_swapchain_gc *cscgc,
struct comp_swapchain_shared *cscs,
const struct xrt_swapchain_create_info *info,
struct xrt_image_native *native_images,
uint32_t image_count,

View file

@ -1,4 +1,4 @@
// Copyright 2019-2022, Collabora, Ltd.
// Copyright 2019-2023, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
@ -205,6 +205,12 @@ compositor_init_vulkan(struct sdl_compositor *c, enum u_logging_level log_level)
c->sys_info.client_d3d_deviceLUID = vk_res.client_gpu_deviceLUID;
c->sys_info.client_d3d_deviceLUID_valid = vk_res.client_gpu_deviceLUID_valid;
// Tie the lifetimes of swapchains to Vulkan.
xrt_result_t xret = comp_swapchain_shared_init(&c->base.cscs, vk);
if (xret != XRT_SUCCESS) {
return false;
}
return true;
}
@ -456,7 +462,7 @@ sdl_compositor_layer_commit(struct xrt_compositor *xc, xrt_graphics_sync_handle_
}
// Now is a good point to garbage collect.
comp_swapchain_garbage_collect(&c->base.cscgc);
comp_swapchain_shared_garbage_collect(&c->base.cscs);
return XRT_SUCCESS;
}
@ -514,8 +520,10 @@ sdl_compositor_destroy(struct xrt_compositor *xc)
SC_DEBUG(c, "DESTROY");
// Make sure we don't have anything to destroy.
comp_swapchain_garbage_collect(&c->base.cscgc);
comp_swapchain_shared_garbage_collect(&c->base.cscs);
// Must be destroyed before Vulkan.
comp_swapchain_shared_destroy(&c->base.cscs, vk);
if (vk->cmd_pool != VK_NULL_HANDLE) {
vk->vkDestroyCommandPool(vk->device, vk->cmd_pool, NULL);

View file

@ -1,4 +1,4 @@
// Copyright 2022, Collabora, Ltd.
// Copyright 2022-2023, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
@ -164,7 +164,7 @@ sdl_swapchain_create(struct xrt_compositor *xc,
&ssc->base, //
really_destroy, //
&sp->c.base.vk, //
&sp->c.base.cscgc, //
&sp->c.base.cscs, //
info, //
&xsccp); //
if (xret != XRT_SUCCESS) {
@ -197,7 +197,7 @@ sdl_swapchain_import(struct xrt_compositor *xc,
&ssc->base, //
really_destroy, //
&sp->c.base.vk, //
&sp->c.base.cscgc, //
&sp->c.base.cscs, //
info, //
native_images, //
native_image_count); //