2020-05-31 17:39:10 +00:00
|
|
|
// Copyright 2019-2020, Collabora, Ltd.
|
2019-03-18 05:52:32 +00:00
|
|
|
// SPDX-License-Identifier: BSL-1.0
|
|
|
|
/*!
|
|
|
|
* @file
|
|
|
|
* @brief Swapchain code for the main compositor.
|
|
|
|
* @author Jakob Bornecrantz <jakob@collabora.com>
|
2020-03-01 10:31:21 +00:00
|
|
|
* @ingroup comp_main
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
|
2019-03-21 20:19:52 +00:00
|
|
|
#include "util/u_misc.h"
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
#include "main/comp_compositor.h"
|
|
|
|
|
2020-05-30 15:58:34 +00:00
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <unistd.h>
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2020-07-16 14:39:20 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Swapchain member functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
static void
|
|
|
|
swapchain_destroy(struct xrt_swapchain *xsc)
|
|
|
|
{
|
|
|
|
struct comp_swapchain *sc = comp_swapchain(xsc);
|
|
|
|
|
|
|
|
COMP_SPEW(sc->c, "DESTROY");
|
|
|
|
|
2020-04-15 16:37:34 +00:00
|
|
|
u_threading_stack_push(&sc->c->threading.destroy_swapchains, sc);
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
2020-06-04 12:31:01 +00:00
|
|
|
static xrt_result_t
|
2020-06-09 20:56:26 +00:00
|
|
|
swapchain_acquire_image(struct xrt_swapchain *xsc, uint32_t *out_index)
|
2019-03-18 05:52:32 +00:00
|
|
|
{
|
|
|
|
struct comp_swapchain *sc = comp_swapchain(xsc);
|
|
|
|
|
|
|
|
COMP_SPEW(sc->c, "ACQUIRE_IMAGE");
|
2020-05-30 15:39:35 +00:00
|
|
|
|
|
|
|
// Returns negative on empty fifo.
|
2020-06-09 20:56:26 +00:00
|
|
|
int res = u_index_fifo_pop(&sc->fifo, out_index);
|
2020-06-04 12:31:01 +00:00
|
|
|
if (res >= 0) {
|
|
|
|
return XRT_SUCCESS;
|
|
|
|
} else {
|
|
|
|
return XRT_ERROR_NO_IMAGE_AVAILABLE;
|
|
|
|
}
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
2020-06-04 12:31:01 +00:00
|
|
|
static xrt_result_t
|
2019-03-18 05:52:32 +00:00
|
|
|
swapchain_wait_image(struct xrt_swapchain *xsc,
|
|
|
|
uint64_t timeout,
|
|
|
|
uint32_t index)
|
|
|
|
{
|
|
|
|
struct comp_swapchain *sc = comp_swapchain(xsc);
|
|
|
|
|
|
|
|
COMP_SPEW(sc->c, "WAIT_IMAGE");
|
2020-06-04 12:31:01 +00:00
|
|
|
return XRT_SUCCESS;
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
2020-06-04 12:31:01 +00:00
|
|
|
static xrt_result_t
|
2019-03-18 05:52:32 +00:00
|
|
|
swapchain_release_image(struct xrt_swapchain *xsc, uint32_t index)
|
|
|
|
{
|
|
|
|
struct comp_swapchain *sc = comp_swapchain(xsc);
|
|
|
|
|
|
|
|
COMP_SPEW(sc->c, "RELEASE_IMAGE");
|
2020-05-30 15:39:35 +00:00
|
|
|
|
2020-06-04 12:31:01 +00:00
|
|
|
int res = u_index_fifo_push(&sc->fifo, index);
|
2020-05-30 15:39:35 +00:00
|
|
|
|
2020-06-04 12:31:01 +00:00
|
|
|
if (res >= 0) {
|
|
|
|
return XRT_SUCCESS;
|
|
|
|
} else {
|
|
|
|
// FIFO full
|
|
|
|
return XRT_ERROR_NO_IMAGE_AVAILABLE;
|
|
|
|
}
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
2020-04-15 16:37:34 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
2020-07-16 14:39:20 +00:00
|
|
|
* Helper functions.
|
2020-04-15 16:37:34 +00:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2020-07-16 14:39:20 +00:00
|
|
|
static struct comp_swapchain *
|
|
|
|
alloc_and_set_funcs(struct comp_compositor *c, uint32_t num_images)
|
2019-03-18 05:52:32 +00:00
|
|
|
{
|
2019-03-21 20:19:52 +00:00
|
|
|
struct comp_swapchain *sc = U_TYPED_CALLOC(struct comp_swapchain);
|
2019-03-18 05:52:32 +00:00
|
|
|
sc->base.base.destroy = swapchain_destroy;
|
|
|
|
sc->base.base.acquire_image = swapchain_acquire_image;
|
|
|
|
sc->base.base.wait_image = swapchain_wait_image;
|
|
|
|
sc->base.base.release_image = swapchain_release_image;
|
|
|
|
sc->base.base.num_images = num_images;
|
|
|
|
sc->c = c;
|
|
|
|
|
2020-05-30 15:58:34 +00:00
|
|
|
// Make sure the fds are invalid.
|
|
|
|
for (uint32_t i = 0; i < ARRAY_SIZE(sc->base.images); i++) {
|
|
|
|
sc->base.images[i].fd = -1;
|
|
|
|
}
|
|
|
|
|
2020-07-16 14:39:20 +00:00
|
|
|
return sc;
|
|
|
|
}
|
2020-07-09 11:13:55 +00:00
|
|
|
|
2020-07-16 14:39:20 +00:00
|
|
|
static void
|
|
|
|
do_post_create_vulkan_setup(struct comp_compositor *c,
|
|
|
|
struct comp_swapchain *sc)
|
|
|
|
{
|
|
|
|
struct xrt_swapchain_create_info *info = &sc->vkic.info;
|
|
|
|
uint32_t num_images = sc->vkic.num_images;
|
|
|
|
VkCommandBuffer cmd_buffer;
|
2019-05-01 15:17:17 +00:00
|
|
|
|
2020-06-18 12:12:35 +00:00
|
|
|
VkComponentMapping components = {
|
|
|
|
.r = VK_COMPONENT_SWIZZLE_R,
|
|
|
|
.g = VK_COMPONENT_SWIZZLE_G,
|
|
|
|
.b = VK_COMPONENT_SWIZZLE_B,
|
|
|
|
.a = VK_COMPONENT_SWIZZLE_ONE,
|
|
|
|
};
|
|
|
|
|
2019-05-01 15:17:17 +00:00
|
|
|
for (uint32_t i = 0; i < num_images; i++) {
|
2020-06-18 12:12:35 +00:00
|
|
|
sc->images[i].views.alpha =
|
2020-07-02 20:25:56 +00:00
|
|
|
U_TYPED_ARRAY_CALLOC(VkImageView, info->array_size);
|
2020-06-18 12:12:35 +00:00
|
|
|
sc->images[i].views.no_alpha =
|
2020-07-02 20:25:56 +00:00
|
|
|
U_TYPED_ARRAY_CALLOC(VkImageView, info->array_size);
|
|
|
|
sc->images[i].array_size = info->array_size;
|
2019-05-01 15:17:17 +00:00
|
|
|
|
2020-07-09 11:13:55 +00:00
|
|
|
vk_create_sampler(&c->vk, &sc->images[i].sampler);
|
|
|
|
|
2020-07-02 20:25:56 +00:00
|
|
|
for (uint32_t layer = 0; layer < info->array_size; ++layer) {
|
2019-05-01 15:17:17 +00:00
|
|
|
VkImageSubresourceRange subresource_range = {
|
|
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
|
|
.baseMipLevel = 0,
|
|
|
|
.levelCount = 1,
|
|
|
|
.baseArrayLayer = layer,
|
|
|
|
.layerCount = 1,
|
|
|
|
};
|
|
|
|
|
2020-07-09 11:13:55 +00:00
|
|
|
vk_create_view(&c->vk, sc->vkic.images[i].handle,
|
2020-07-02 20:25:56 +00:00
|
|
|
(VkFormat)info->format,
|
|
|
|
subresource_range,
|
2020-06-18 12:12:35 +00:00
|
|
|
&sc->images[i].views.alpha[layer]);
|
|
|
|
vk_create_view_swizzle(
|
2020-07-09 11:13:55 +00:00
|
|
|
&c->vk, sc->vkic.images[i].handle,
|
|
|
|
(VkFormat)info->format, subresource_range,
|
|
|
|
components, &sc->images[i].views.no_alpha[layer]);
|
2019-05-01 15:17:17 +00:00
|
|
|
}
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
2020-05-30 15:39:35 +00:00
|
|
|
// Prime the fifo
|
|
|
|
for (uint32_t i = 0; i < num_images; i++) {
|
|
|
|
u_index_fifo_push(&sc->fifo, i);
|
|
|
|
}
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Transition image.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
vk_init_cmd_buffer(&c->vk, &cmd_buffer);
|
|
|
|
|
2019-05-01 15:17:17 +00:00
|
|
|
VkImageSubresourceRange subresource_range = {
|
|
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
|
|
.baseMipLevel = 0,
|
|
|
|
.levelCount = 1,
|
|
|
|
.baseArrayLayer = 0,
|
2020-07-02 20:25:56 +00:00
|
|
|
.layerCount = info->array_size,
|
2019-05-01 15:17:17 +00:00
|
|
|
};
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
for (uint32_t i = 0; i < num_images; i++) {
|
2020-07-09 11:13:55 +00:00
|
|
|
vk_set_image_layout(
|
|
|
|
&c->vk, cmd_buffer, sc->vkic.images[i].handle, 0,
|
|
|
|
VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
|
|
|
|
subresource_range);
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
vk_submit_cmd_buffer(&c->vk, cmd_buffer);
|
|
|
|
}
|
|
|
|
|
2020-06-18 12:12:35 +00:00
|
|
|
static void
|
|
|
|
clean_image_views(struct vk_bundle *vk,
|
|
|
|
size_t array_size,
|
|
|
|
VkImageView **views_ptr)
|
|
|
|
{
|
|
|
|
VkImageView *views = *views_ptr;
|
|
|
|
if (views == NULL) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < array_size; ++i) {
|
|
|
|
if (views[i] == VK_NULL_HANDLE) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
vk->vkDestroyImageView(vk->device, views[i], NULL);
|
|
|
|
views[i] = VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
|
|
|
|
free(views);
|
|
|
|
array_size = 0;
|
|
|
|
|
|
|
|
*views_ptr = NULL;
|
|
|
|
}
|
|
|
|
|
2020-05-31 17:39:10 +00:00
|
|
|
/*!
|
|
|
|
* Free and destroy any initialized fields on the given image, safe to pass in
|
|
|
|
* images that has one or all fields set to NULL.
|
|
|
|
*/
|
|
|
|
static void
|
2020-07-16 14:39:20 +00:00
|
|
|
image_cleanup(struct vk_bundle *vk, struct comp_swapchain_image *image)
|
2019-03-18 05:52:32 +00:00
|
|
|
{
|
2020-07-23 21:47:41 +00:00
|
|
|
/*
|
|
|
|
* This makes sure that any pending command buffer has completed and all
|
|
|
|
* resources referred by it can now be manipulated. This make sure that
|
|
|
|
* validation doesn't complain. This is done during image destruction so
|
|
|
|
* isn't time critical.
|
|
|
|
*/
|
2020-04-09 15:29:21 +00:00
|
|
|
vk->vkDeviceWaitIdle(vk->device);
|
|
|
|
|
2020-06-18 12:12:35 +00:00
|
|
|
clean_image_views(vk, image->array_size, &image->views.alpha);
|
|
|
|
clean_image_views(vk, image->array_size, &image->views.no_alpha);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2019-10-07 22:27:09 +00:00
|
|
|
if (image->sampler != VK_NULL_HANDLE) {
|
2019-03-18 05:52:32 +00:00
|
|
|
vk->vkDestroySampler(vk->device, image->sampler, NULL);
|
2019-10-07 22:27:09 +00:00
|
|
|
image->sampler = VK_NULL_HANDLE;
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-15 16:37:34 +00:00
|
|
|
|
2020-07-16 14:39:20 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Exported functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
xrt_result_t
|
|
|
|
comp_swapchain_create(struct xrt_compositor *xc,
|
2020-08-07 16:00:01 +00:00
|
|
|
const struct xrt_swapchain_create_info *info,
|
2020-07-16 14:39:20 +00:00
|
|
|
struct xrt_swapchain **out_xsc)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
uint32_t num_images = 3;
|
|
|
|
VkResult ret;
|
|
|
|
|
|
|
|
if ((info->create & XRT_SWAPCHAIN_CREATE_STATIC_IMAGE) != 0) {
|
|
|
|
num_images = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct comp_swapchain *sc = alloc_and_set_funcs(c, num_images);
|
|
|
|
|
|
|
|
COMP_DEBUG(c, "CREATE %p %dx%d", (void *)sc, info->width, info->height);
|
|
|
|
|
|
|
|
// Use the image helper to allocate the images.
|
|
|
|
ret = vk_ic_allocate(&c->vk, info, num_images, &sc->vkic);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
free(sc);
|
|
|
|
return XRT_ERROR_VULKAN;
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef XRT_OS_LINUX
|
|
|
|
int fds[ARRAY_SIZE(sc->vkic.images)];
|
|
|
|
|
|
|
|
vk_ic_get_fds(&c->vk, &sc->vkic, ARRAY_SIZE(fds), fds);
|
|
|
|
for (uint32_t i = 0; i < sc->vkic.num_images; i++) {
|
|
|
|
sc->base.images[i].fd = fds[i];
|
|
|
|
sc->base.images[i].size = sc->vkic.images[i].size;
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
#error "OS not supported"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
do_post_create_vulkan_setup(c, sc);
|
|
|
|
|
|
|
|
*out_xsc = &sc->base.base;
|
|
|
|
|
|
|
|
return XRT_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
xrt_result_t
|
|
|
|
comp_swapchain_import(struct xrt_compositor *xc,
|
2020-08-07 16:00:01 +00:00
|
|
|
const struct xrt_swapchain_create_info *info,
|
2020-07-16 14:39:20 +00:00
|
|
|
struct xrt_image_native *native_images,
|
|
|
|
uint32_t num_images,
|
|
|
|
struct xrt_swapchain **out_xsc)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
VkResult ret;
|
|
|
|
|
|
|
|
struct comp_swapchain *sc = alloc_and_set_funcs(c, num_images);
|
|
|
|
|
|
|
|
COMP_DEBUG(c, "CREATE FROM NATIVE %p %dx%d", (void *)sc, info->width,
|
|
|
|
info->height);
|
|
|
|
|
|
|
|
// Use the image helper to get the images.
|
|
|
|
ret = vk_ic_from_natives(&c->vk, info, native_images, num_images,
|
|
|
|
&sc->vkic);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
return XRT_ERROR_VULKAN;
|
|
|
|
}
|
|
|
|
|
|
|
|
do_post_create_vulkan_setup(c, sc);
|
|
|
|
|
|
|
|
*out_xsc = &sc->base.base;
|
|
|
|
|
|
|
|
return XRT_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2020-04-15 16:37:34 +00:00
|
|
|
void
|
|
|
|
comp_swapchain_really_destroy(struct comp_swapchain *sc)
|
|
|
|
{
|
|
|
|
struct vk_bundle *vk = &sc->c->vk;
|
|
|
|
|
|
|
|
COMP_SPEW(sc->c, "REALLY DESTROY");
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < sc->base.base.num_images; i++) {
|
2020-07-16 14:39:20 +00:00
|
|
|
image_cleanup(vk, &sc->images[i]);
|
2020-04-15 16:37:34 +00:00
|
|
|
}
|
|
|
|
|
2020-05-30 15:58:34 +00:00
|
|
|
for (uint32_t i = 0; i < sc->base.base.num_images; i++) {
|
|
|
|
if (sc->base.images[i].fd < 0) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
close(sc->base.images[i].fd);
|
|
|
|
sc->base.images[i].fd = -1;
|
|
|
|
}
|
|
|
|
|
2020-07-09 11:13:55 +00:00
|
|
|
vk_ic_destroy(vk, &sc->vkic);
|
|
|
|
|
2020-04-15 16:37:34 +00:00
|
|
|
free(sc);
|
|
|
|
}
|