monado/src/xrt/auxiliary/vk/vk_image_allocator.c

387 lines
11 KiB
C
Raw Normal View History

2020-07-08 14:53:52 +00:00
// Copyright 2020, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Vulkan image allocator helper.
* @author Jakob Bornecrantz <jakob@collabora.com>
* @ingroup aux_vk
*/
#include "util/u_misc.h"
#include "util/u_logging.h"
2020-08-13 20:00:55 +00:00
#include "util/u_handles.h"
2020-07-08 14:53:52 +00:00
#include "vk/vk_image_allocator.h"
2020-08-12 22:01:59 +00:00
#include <xrt/xrt_handles.h>
2020-07-08 14:53:52 +00:00
#ifdef XRT_OS_LINUX
#include <unistd.h>
#endif
#ifdef XRT_GRAPHICS_BUFFER_HANDLE_IS_AHARDWAREBUFFER
#include "android/android_ahardwarebuffer_allocator.h"
#endif
2020-07-08 14:53:52 +00:00
/*
*
* Helper functions.
*
*/
2022-02-19 17:47:59 +00:00
VkExternalMemoryHandleTypeFlags
get_image_memory_handle_type(void)
{
#if defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_AHARDWAREBUFFER)
return VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
#elif defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_WIN32_HANDLE)
return VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
#elif defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_FD)
return VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
#else
#error "need port"
#endif
}
2020-07-08 14:53:52 +00:00
static VkResult
2021-01-14 14:13:48 +00:00
create_image(struct vk_bundle *vk, const struct xrt_swapchain_create_info *info, struct vk_image *out_image)
2020-07-08 14:53:52 +00:00
{
// This is the format we allocate the image in, can be changed further down.
VkFormat image_format = (VkFormat)info->format;
VkImageUsageFlags image_usage = vk_csci_get_image_usage_flags( //
vk, //
image_format, //
info->bits); //
if (image_usage == 0) {
U_LOG_E("create_image: Unsupported swapchain usage flags");
return VK_ERROR_FEATURE_NOT_PRESENT;
}
2020-07-08 14:53:52 +00:00
VkDeviceMemory device_memory = VK_NULL_HANDLE;
VkImage image = VK_NULL_HANDLE;
VkResult ret = VK_SUCCESS;
VkDeviceSize size;
#if defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_AHARDWAREBUFFER)
/*
* Get AHardwareBuffer props
*/
AHardwareBuffer *a_buffer = NULL;
xrt_result_t xrt_res = ahardwarebuffer_image_allocate(info, &a_buffer);
if (xrt_res != XRT_SUCCESS) {
U_LOG_E("Failed to ahardwarebuffer_image_allocate.");
return ret;
}
2022-02-19 17:47:59 +00:00
// Out->pNext
VkAndroidHardwareBufferFormatPropertiesANDROID a_buffer_format_props = {
2021-01-14 14:13:48 +00:00
.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
};
2022-02-19 17:47:59 +00:00
// Out
VkAndroidHardwareBufferPropertiesANDROID a_buffer_props = {
2021-01-14 14:13:48 +00:00
.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
.pNext = &a_buffer_format_props,
};
2021-01-14 14:13:48 +00:00
ret = vk->vkGetAndroidHardwareBufferPropertiesANDROID(vk->device, a_buffer, &a_buffer_props);
if (ret != VK_SUCCESS) {
2021-01-14 14:13:48 +00:00
U_LOG_E("vkGetAndroidHardwareBufferPropertiesANDROID: %s", vk_result_string(ret));
return ret;
}
#endif
2020-07-08 14:53:52 +00:00
2022-02-19 17:47:59 +00:00
/*
*
* Start of create image call.
*
*/
void *next_chain = NULL;
#define CHAIN(STRUCT) \
do { \
(STRUCT).pNext = next_chain; \
next_chain = (void *)&(STRUCT); \
} while (false)
2020-07-08 14:53:52 +00:00
/*
* Create the image.
*/
2022-02-19 17:47:59 +00:00
VkExternalMemoryHandleTypeFlags memory_handle_type = get_image_memory_handle_type();
2021-01-14 14:13:48 +00:00
VkExternalMemoryImageCreateInfoKHR external_memory_image_create_info = {
2022-02-19 17:47:59 +00:00
.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
.handleTypes = memory_handle_type,
.pNext = next_chain,
2020-07-08 14:53:52 +00:00
};
2022-02-19 17:47:59 +00:00
CHAIN(external_memory_image_create_info);
2020-07-08 14:53:52 +00:00
#if defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_AHARDWAREBUFFER)
VkExternalFormatANDROID format_android = {
.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
.externalFormat = a_buffer_format_props.externalFormat,
};
2022-02-19 17:47:59 +00:00
CHAIN(format_android);
// Android can't allocate native sRGB.
// Use UNORM and correct gamma later.
if (image_format == VK_FORMAT_R8G8B8A8_SRGB) {
image_format = VK_FORMAT_R8G8B8A8_UNORM;
}
#endif
2021-01-14 14:13:48 +00:00
VkImageCreateInfo create_info = {
2022-02-19 17:47:59 +00:00
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
.pNext = next_chain,
.imageType = VK_IMAGE_TYPE_2D,
.format = image_format,
.extent = {.width = info->width, .height = info->height, .depth = 1},
.mipLevels = info->mip_count,
.arrayLayers = info->array_size,
.samples = VK_SAMPLE_COUNT_1_BIT,
.tiling = VK_IMAGE_TILING_OPTIMAL,
.usage = image_usage,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
2020-07-08 14:53:52 +00:00
};
ret = vk->vkCreateImage(vk->device, &create_info, NULL, &image);
if (ret != VK_SUCCESS) {
U_LOG_E("vkCreateImage: %s", vk_result_string(ret));
return ret;
}
2022-02-19 17:47:59 +00:00
// In
VkImageMemoryRequirementsInfo2 memory_requirements_info = {
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
.image = image,
};
2022-02-19 17:47:59 +00:00
// Out->pNext
VkMemoryDedicatedRequirements memory_dedicated_requirements = {
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
};
2022-02-19 17:47:59 +00:00
// Out
VkMemoryRequirements2 memory_requirements = {
.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
.pNext = &memory_dedicated_requirements,
};
2022-02-19 17:47:59 +00:00
vk->vkGetImageMemoryRequirements2(vk->device, &memory_requirements_info, &memory_requirements);
/*
* On tegra we must not use dedicated allocation when it is only preferred to avoid black textures and driver
* errors when blitting from opengl interop textures.
*
* On desktop nvidia and everywhere else we must always use dedicated allocation even when it is neither
* preferred nor required to avoid fences timing out and driver errors like "Graphics Exception on GPC 0: 3D-C
* MEMLAYOUT Violation."
*/
VkBool32 use_dedicated_allocation;
if (vk->is_tegra) {
use_dedicated_allocation = memory_dedicated_requirements.requiresDedicatedAllocation != VK_FALSE;
} else {
use_dedicated_allocation = VK_TRUE;
}
U_LOG_D("create_image: Use dedicated allocation: %d (preferred: %d, required: %d)", use_dedicated_allocation,
memory_dedicated_requirements.prefersDedicatedAllocation,
memory_dedicated_requirements.requiresDedicatedAllocation);
2020-07-08 14:53:52 +00:00
/*
* Create and bind the memory.
*/
2022-02-19 17:47:59 +00:00
// In->pNext->pNext
2020-07-08 14:53:52 +00:00
VkMemoryDedicatedAllocateInfoKHR dedicated_memory_info = {
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
.image = image,
.buffer = VK_NULL_HANDLE,
};
2022-02-19 17:47:59 +00:00
// In->pNext
2020-07-08 14:53:52 +00:00
VkExportMemoryAllocateInfo export_alloc_info = {
.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
.pNext = use_dedicated_allocation ? &dedicated_memory_info : NULL,
2022-02-19 17:47:59 +00:00
.handleTypes = memory_handle_type,
2020-07-08 14:53:52 +00:00
};
2021-01-14 14:13:48 +00:00
ret = vk_alloc_and_bind_image_memory(vk, image, SIZE_MAX, &export_alloc_info, &device_memory, &size);
2020-07-08 14:53:52 +00:00
if (ret != VK_SUCCESS) {
U_LOG_E("vkAllocateMemory: %s", vk_result_string(ret));
vk->vkDestroyImage(vk->device, image, NULL);
return ret;
}
out_image->handle = image;
out_image->memory = device_memory;
out_image->size = size;
out_image->use_dedicated_allocation = use_dedicated_allocation;
2020-07-08 14:53:52 +00:00
return ret;
}
static void
destroy_image(struct vk_bundle *vk, struct vk_image *image)
{
if (image->handle != VK_NULL_HANDLE) {
vk->vkDestroyImage(vk->device, image->handle, NULL);
image->handle = VK_NULL_HANDLE;
}
if (image->memory != VK_NULL_HANDLE) {
vk->vkFreeMemory(vk->device, image->memory, NULL);
image->memory = VK_NULL_HANDLE;
}
}
/*
*
* 'Exported' functions.
*
*/
VkResult
vk_ic_allocate(struct vk_bundle *vk,
const struct xrt_swapchain_create_info *xscci,
2021-11-08 22:53:52 +00:00
uint32_t image_count,
2020-07-08 14:53:52 +00:00
struct vk_image_collection *out_vkic)
{
VkResult ret = VK_SUCCESS;
2021-11-08 22:53:52 +00:00
if (image_count > ARRAY_SIZE(out_vkic->images)) {
2020-07-08 14:53:52 +00:00
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
size_t i = 0;
2021-11-08 22:53:52 +00:00
for (; i < image_count; i++) {
2020-07-08 14:53:52 +00:00
ret = create_image(vk, xscci, &out_vkic->images[i]);
if (ret != VK_SUCCESS) {
break;
}
}
// Set the fields.
2021-11-08 22:53:52 +00:00
out_vkic->image_count = image_count;
2020-07-08 14:53:52 +00:00
out_vkic->info = *xscci;
if (ret == VK_SUCCESS) {
return ret;
}
// i is the index of the failed image, everything before that index
// succeeded and needs to be destroyed. If i is zero no call succeeded.
while (i > 0) {
i--;
destroy_image(vk, &out_vkic->images[i]);
}
U_ZERO(out_vkic);
return ret;
}
/*!
* Imports and set images from the given FDs.
*/
VkResult
vk_ic_from_natives(struct vk_bundle *vk,
const struct xrt_swapchain_create_info *xscci,
2020-07-08 14:53:52 +00:00
struct xrt_image_native *native_images,
2021-11-08 22:53:52 +00:00
uint32_t image_count,
2020-07-08 14:53:52 +00:00
struct vk_image_collection *out_vkic)
{
VkResult ret = VK_ERROR_INITIALIZATION_FAILED;
2021-11-08 22:53:52 +00:00
if (image_count > ARRAY_SIZE(out_vkic->images)) {
2020-07-08 14:53:52 +00:00
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
size_t i = 0;
2021-11-08 22:53:52 +00:00
for (; i < image_count; i++) {
// Ensure that all handles are consumed or none are.
2021-01-14 14:13:48 +00:00
xrt_graphics_buffer_handle_t buf = u_graphics_buffer_ref(native_images[i].handle);
2020-07-08 14:53:52 +00:00
2022-02-19 17:47:59 +00:00
ret = vk_create_image_from_native( //
vk, // vk_bundle
xscci, // info
&native_images[i], // image_native
&out_vkic->images[i].handle, // out_image
&out_vkic->images[i].memory); // out_mem
2020-07-08 14:53:52 +00:00
if (ret != VK_SUCCESS) {
2020-08-13 20:00:55 +00:00
u_graphics_buffer_unref(&buf);
2020-07-08 14:53:52 +00:00
break;
}
native_images[i].handle = buf;
2020-07-08 14:53:52 +00:00
}
// Set the fields.
2021-11-08 22:53:52 +00:00
out_vkic->image_count = image_count;
2020-07-08 14:53:52 +00:00
out_vkic->info = *xscci;
if (ret == VK_SUCCESS) {
// We have consumed all handles now, close all of the copies we
2020-07-08 14:53:52 +00:00
// made, all this to make sure we do all or nothing.
2021-11-08 22:53:52 +00:00
for (size_t k = 0; k < image_count; k++) {
2020-08-13 20:00:55 +00:00
u_graphics_buffer_unref(&native_images[k].handle);
2020-07-08 14:53:52 +00:00
native_images[k].size = 0;
}
return ret;
}
// i is the index of the failed image, everything before that index
// succeeded and needs to be destroyed. If i is zero no call succeeded.
while (i > 0) {
i--;
destroy_image(vk, &out_vkic->images[i]);
}
U_ZERO(out_vkic);
return ret;
}
void
vk_ic_destroy(struct vk_bundle *vk, struct vk_image_collection *vkic)
{
2021-11-08 22:53:52 +00:00
for (size_t i = 0; i < vkic->image_count; i++) {
2020-07-08 14:53:52 +00:00
destroy_image(vk, &vkic->images[i]);
}
2021-11-08 22:53:52 +00:00
vkic->image_count = 0;
2020-07-08 14:53:52 +00:00
U_ZERO(&vkic->info);
}
VkResult
vk_ic_get_handles(struct vk_bundle *vk,
struct vk_image_collection *vkic,
uint32_t max_handles,
xrt_graphics_buffer_handle_t *out_handles)
2020-07-08 14:53:52 +00:00
{
VkResult ret = VK_SUCCESS;
size_t i = 0;
2021-11-08 22:53:52 +00:00
for (; i < vkic->image_count && i < max_handles; i++) {
ret = vk_get_native_handle_from_device_memory(vk, vkic->images[i].memory, &out_handles[i]);
2020-07-08 14:53:52 +00:00
if (ret != VK_SUCCESS) {
break;
}
}
if (ret == VK_SUCCESS) {
return ret;
}
// i is the index of the failed fd, everything before that index
// succeeded and needs to be closed. If i is zero no call succeeded.
while (i > 0) {
i--;
2020-08-13 20:00:55 +00:00
u_graphics_buffer_unref(&out_handles[i]);
2020-07-08 14:53:52 +00:00
}
return ret;
}