2022-02-18 20:19:14 +00:00
|
|
|
// Copyright 2019-2022, Collabora, Ltd.
|
2019-03-18 05:52:32 +00:00
|
|
|
// SPDX-License-Identifier: BSL-1.0
|
|
|
|
/*!
|
|
|
|
* @file
|
|
|
|
* @brief Common Vulkan code header.
|
2021-11-09 16:55:57 +00:00
|
|
|
*
|
|
|
|
* Note that some sections of this are generated
|
|
|
|
* by `scripts/generate_vk_helpers.py` - lists of functions
|
|
|
|
* and of optional extensions to check for. In those,
|
|
|
|
* please update the script and run it, instead of editing
|
|
|
|
* directly in this file. The generated parts are delimited
|
|
|
|
* by special comments.
|
|
|
|
*
|
2019-03-18 05:52:32 +00:00
|
|
|
* @author Jakob Bornecrantz <jakob@collabora.com>
|
|
|
|
* @author Lubosz Sarnecki <lubosz.sarnecki@collabora.com>
|
2022-02-19 01:15:03 +00:00
|
|
|
* @author Moses Turner <moses@collabora.com>
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
#include "xrt/xrt_compiler.h"
|
2019-03-18 05:52:32 +00:00
|
|
|
#include "xrt/xrt_compositor.h"
|
|
|
|
#include "xrt/xrt_vulkan_includes.h"
|
2020-07-20 20:50:21 +00:00
|
|
|
#include "xrt/xrt_handles.h"
|
2020-12-18 13:14:29 +00:00
|
|
|
#include "util/u_logging.h"
|
2021-12-06 22:14:17 +00:00
|
|
|
#include "util/u_string_list.h"
|
2021-01-08 15:29:58 +00:00
|
|
|
#include "os/os_threading.h"
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
#ifdef __cplusplus
|
|
|
|
extern "C" {
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Structs
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
/*!
|
2020-02-23 11:55:11 +00:00
|
|
|
* A bundle of Vulkan functions and objects, used by both @ref comp and @ref
|
2022-05-17 21:58:36 +00:00
|
|
|
* comp_client. Note that they both have different instances of the object, and
|
|
|
|
* thus different VkInstance, etc.
|
2019-03-18 05:52:32 +00:00
|
|
|
*
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
struct vk_bundle
|
|
|
|
{
|
2021-11-20 14:50:47 +00:00
|
|
|
enum u_logging_level log_level;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
VkInstance instance;
|
2023-01-20 13:11:36 +00:00
|
|
|
uint32_t version;
|
2019-03-18 05:52:32 +00:00
|
|
|
VkPhysicalDevice physical_device;
|
2020-07-16 12:55:37 +00:00
|
|
|
int physical_device_index;
|
2019-03-18 05:52:32 +00:00
|
|
|
VkDevice device;
|
|
|
|
uint32_t queue_family_index;
|
|
|
|
uint32_t queue_index;
|
2020-05-31 19:42:05 +00:00
|
|
|
VkQueue queue;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-01-08 15:29:58 +00:00
|
|
|
struct os_mutex queue_mutex;
|
|
|
|
|
2022-02-24 17:09:22 +00:00
|
|
|
struct
|
|
|
|
{
|
2022-07-20 21:56:13 +00:00
|
|
|
#if defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_WIN32_HANDLE)
|
2022-08-02 18:39:39 +00:00
|
|
|
bool color_image_import_opaque_win32;
|
|
|
|
bool color_image_export_opaque_win32;
|
|
|
|
bool depth_image_import_opaque_win32;
|
|
|
|
bool depth_image_export_opaque_win32;
|
|
|
|
|
|
|
|
bool color_image_import_d3d11;
|
|
|
|
bool color_image_export_d3d11;
|
|
|
|
bool depth_image_import_d3d11;
|
|
|
|
bool depth_image_export_d3d11;
|
|
|
|
|
|
|
|
#elif defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_FD)
|
|
|
|
bool color_image_import_opaque_fd;
|
|
|
|
bool color_image_export_opaque_fd;
|
|
|
|
bool depth_image_import_opaque_fd;
|
|
|
|
bool depth_image_export_opaque_fd;
|
|
|
|
|
|
|
|
#elif defined(XRT_GRAPHICS_BUFFER_HANDLE_IS_AHARDWAREBUFFER)
|
|
|
|
bool color_image_import_opaque_fd;
|
|
|
|
bool color_image_export_opaque_fd;
|
|
|
|
bool depth_image_import_opaque_fd;
|
|
|
|
bool depth_image_export_opaque_fd;
|
|
|
|
|
|
|
|
bool color_image_import_ahardwarebuffer;
|
|
|
|
bool color_image_export_ahardwarebuffer;
|
|
|
|
bool depth_image_import_ahardwarebuffer;
|
|
|
|
bool depth_image_export_ahardwarebuffer;
|
2022-07-20 21:56:13 +00:00
|
|
|
#endif
|
|
|
|
|
2022-02-24 17:09:22 +00:00
|
|
|
#if defined(XRT_GRAPHICS_SYNC_HANDLE_IS_FD)
|
|
|
|
bool fence_sync_fd;
|
|
|
|
bool fence_opaque_fd;
|
2022-02-28 19:26:31 +00:00
|
|
|
|
|
|
|
bool binary_semaphore_sync_fd;
|
|
|
|
bool binary_semaphore_opaque_fd;
|
|
|
|
|
|
|
|
bool timeline_semaphore_sync_fd;
|
|
|
|
bool timeline_semaphore_opaque_fd;
|
2022-02-24 17:09:22 +00:00
|
|
|
#elif defined(XRT_GRAPHICS_SYNC_HANDLE_IS_WIN32_HANDLE)
|
|
|
|
bool fence_win32_handle;
|
2022-02-28 19:26:31 +00:00
|
|
|
|
2022-05-20 23:54:46 +00:00
|
|
|
bool binary_semaphore_d3d12_fence;
|
2022-02-28 19:26:31 +00:00
|
|
|
bool binary_semaphore_win32_handle;
|
|
|
|
|
2022-05-20 23:54:46 +00:00
|
|
|
bool timeline_semaphore_d3d12_fence;
|
2022-02-28 19:26:31 +00:00
|
|
|
bool timeline_semaphore_win32_handle;
|
2022-02-24 17:09:22 +00:00
|
|
|
#else
|
|
|
|
#error "Need port for fence sync handles checkers"
|
|
|
|
#endif
|
|
|
|
} external;
|
|
|
|
|
2021-12-06 23:38:55 +00:00
|
|
|
// beginning of GENERATED instance extension code - do not modify - used by scripts
|
|
|
|
bool has_EXT_display_surface_counter;
|
|
|
|
// end of GENERATED instance extension code - do not modify - used by scripts
|
|
|
|
|
|
|
|
// beginning of GENERATED device extension code - do not modify - used by scripts
|
2022-05-08 11:55:48 +00:00
|
|
|
bool has_KHR_external_fence_fd;
|
|
|
|
bool has_KHR_external_semaphore_fd;
|
2022-11-04 10:24:31 +00:00
|
|
|
bool has_KHR_global_priority;
|
2022-05-28 22:59:47 +00:00
|
|
|
bool has_KHR_image_format_list;
|
2022-08-25 01:01:32 +00:00
|
|
|
bool has_KHR_maintenance1;
|
|
|
|
bool has_KHR_maintenance2;
|
|
|
|
bool has_KHR_maintenance3;
|
|
|
|
bool has_KHR_maintenance4;
|
2021-12-03 21:30:38 +00:00
|
|
|
bool has_KHR_timeline_semaphore;
|
2022-04-23 21:25:01 +00:00
|
|
|
bool has_EXT_calibrated_timestamps;
|
2022-05-08 11:53:07 +00:00
|
|
|
bool has_EXT_display_control;
|
2022-11-05 21:12:51 +00:00
|
|
|
bool has_EXT_external_memory_dma_buf;
|
2021-04-12 19:44:49 +00:00
|
|
|
bool has_EXT_global_priority;
|
2022-11-05 21:12:51 +00:00
|
|
|
bool has_EXT_image_drm_format_modifier;
|
2021-11-09 16:04:07 +00:00
|
|
|
bool has_EXT_robustness2;
|
2021-12-03 21:40:13 +00:00
|
|
|
bool has_GOOGLE_display_timing;
|
2021-12-06 23:38:55 +00:00
|
|
|
// end of GENERATED device extension code - do not modify - used by scripts
|
2021-01-07 13:51:49 +00:00
|
|
|
|
2022-03-01 13:42:13 +00:00
|
|
|
struct
|
|
|
|
{
|
2022-04-23 22:46:46 +00:00
|
|
|
//! Are timestamps available for compute and graphics queues?
|
|
|
|
bool timestamp_compute_and_graphics;
|
|
|
|
|
|
|
|
//! Nanoseconds per gpu tick.
|
|
|
|
float timestamp_period;
|
|
|
|
|
2022-04-24 09:54:12 +00:00
|
|
|
//! Valid bits in the queue selected.
|
|
|
|
uint32_t timestamp_valid_bits;
|
|
|
|
|
2022-03-01 13:42:13 +00:00
|
|
|
//! Were timeline semaphore requested, available, and enabled?
|
|
|
|
bool timeline_semaphore;
|
2022-08-25 12:15:02 +00:00
|
|
|
|
|
|
|
//! Per stage limit on sampled images (includes combined).
|
|
|
|
uint32_t max_per_stage_descriptor_sampled_images;
|
|
|
|
|
|
|
|
//! Per stage limit on storage images.
|
|
|
|
uint32_t max_per_stage_descriptor_storage_images;
|
2022-03-01 13:42:13 +00:00
|
|
|
} features;
|
|
|
|
|
|
|
|
//! Is the GPU a tegra device.
|
2021-08-31 01:11:36 +00:00
|
|
|
bool is_tegra;
|
|
|
|
|
2021-12-07 19:15:34 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
VkDebugReportCallbackEXT debug_report_cb;
|
|
|
|
|
|
|
|
VkPhysicalDeviceMemoryProperties device_memory_props;
|
|
|
|
|
|
|
|
VkCommandPool cmd_pool;
|
|
|
|
|
2021-01-12 12:49:41 +00:00
|
|
|
struct os_mutex cmd_pool_mutex;
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
// Loader functions
|
|
|
|
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
|
|
|
|
PFN_vkCreateInstance vkCreateInstance;
|
2021-09-28 00:08:49 +00:00
|
|
|
PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
// beginning of GENERATED instance loader code - do not modify - used by scripts
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkDestroyInstance vkDestroyInstance;
|
2021-11-09 00:07:21 +00:00
|
|
|
PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateDevice vkCreateDevice;
|
2021-11-09 00:07:21 +00:00
|
|
|
PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT;
|
|
|
|
PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
|
2021-11-09 00:07:21 +00:00
|
|
|
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
|
|
|
|
PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2;
|
|
|
|
PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2;
|
|
|
|
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
|
|
|
|
PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties;
|
2022-11-05 21:12:51 +00:00
|
|
|
PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2;
|
2021-11-09 00:07:21 +00:00
|
|
|
PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2;
|
2022-02-24 12:49:42 +00:00
|
|
|
PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
|
|
|
|
PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
|
2022-04-05 20:28:43 +00:00
|
|
|
PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2022-04-23 21:25:01 +00:00
|
|
|
#if defined(VK_EXT_calibrated_timestamps)
|
|
|
|
PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT;
|
|
|
|
|
|
|
|
#endif // defined(VK_EXT_calibrated_timestamps)
|
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
#if defined(VK_USE_PLATFORM_DISPLAY_KHR)
|
2021-06-07 15:35:52 +00:00
|
|
|
PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR;
|
2021-11-09 00:07:21 +00:00
|
|
|
PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR;
|
|
|
|
PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR;
|
|
|
|
PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR;
|
|
|
|
PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT;
|
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_DISPLAY_KHR)
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
#if defined(VK_USE_PLATFORM_XCB_KHR)
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR;
|
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_XCB_KHR)
|
2021-11-09 00:07:21 +00:00
|
|
|
|
|
|
|
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_WAYLAND_KHR)
|
2021-11-09 00:07:21 +00:00
|
|
|
|
|
|
|
#if defined(VK_USE_PLATFORM_WAYLAND_KHR) && defined(VK_EXT_acquire_drm_display)
|
2021-06-07 15:35:52 +00:00
|
|
|
PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT;
|
|
|
|
PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_WAYLAND_KHR) && defined(VK_EXT_acquire_drm_display)
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
#if defined(VK_USE_PLATFORM_XLIB_XRANDR_EXT)
|
|
|
|
PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT;
|
|
|
|
PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_XLIB_XRANDR_EXT)
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
#if defined(VK_USE_PLATFORM_ANDROID_KHR)
|
|
|
|
PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR;
|
2020-08-21 15:46:22 +00:00
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_ANDROID_KHR)
|
2021-05-18 00:12:31 +00:00
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
#if defined(VK_USE_PLATFORM_WIN32_KHR)
|
|
|
|
PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR;
|
2021-12-08 23:54:25 +00:00
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_WIN32_KHR)
|
2021-11-09 16:51:35 +00:00
|
|
|
|
2021-12-08 23:54:25 +00:00
|
|
|
#if defined(VK_EXT_display_surface_counter)
|
|
|
|
PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT;
|
|
|
|
#endif // defined(VK_EXT_display_surface_counter)
|
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
// end of GENERATED instance loader code - do not modify - used by scripts
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
// beginning of GENERATED device loader code - do not modify - used by scripts
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkDestroyDevice vkDestroyDevice;
|
|
|
|
PFN_vkDeviceWaitIdle vkDeviceWaitIdle;
|
|
|
|
PFN_vkAllocateMemory vkAllocateMemory;
|
|
|
|
PFN_vkFreeMemory vkFreeMemory;
|
|
|
|
PFN_vkMapMemory vkMapMemory;
|
|
|
|
PFN_vkUnmapMemory vkUnmapMemory;
|
2020-10-08 22:07:32 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateBuffer vkCreateBuffer;
|
|
|
|
PFN_vkDestroyBuffer vkDestroyBuffer;
|
|
|
|
PFN_vkBindBufferMemory vkBindBufferMemory;
|
|
|
|
|
|
|
|
PFN_vkCreateImage vkCreateImage;
|
2021-11-09 00:07:21 +00:00
|
|
|
PFN_vkDestroyImage vkDestroyImage;
|
|
|
|
PFN_vkBindImageMemory vkBindImageMemory;
|
|
|
|
|
|
|
|
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
|
|
|
|
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
|
2021-11-09 19:41:44 +00:00
|
|
|
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2;
|
2021-11-08 17:21:31 +00:00
|
|
|
PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateImageView vkCreateImageView;
|
|
|
|
PFN_vkDestroyImageView vkDestroyImageView;
|
|
|
|
|
|
|
|
PFN_vkCreateSampler vkCreateSampler;
|
|
|
|
PFN_vkDestroySampler vkDestroySampler;
|
|
|
|
|
|
|
|
PFN_vkCreateShaderModule vkCreateShaderModule;
|
|
|
|
PFN_vkDestroyShaderModule vkDestroyShaderModule;
|
|
|
|
|
2022-04-23 21:15:49 +00:00
|
|
|
PFN_vkCreateQueryPool vkCreateQueryPool;
|
|
|
|
PFN_vkDestroyQueryPool vkDestroyQueryPool;
|
|
|
|
PFN_vkGetQueryPoolResults vkGetQueryPoolResults;
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateCommandPool vkCreateCommandPool;
|
|
|
|
PFN_vkDestroyCommandPool vkDestroyCommandPool;
|
2022-05-26 21:34:37 +00:00
|
|
|
PFN_vkResetCommandPool vkResetCommandPool;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
|
|
|
|
PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
|
2022-04-23 21:15:49 +00:00
|
|
|
PFN_vkCmdBeginQuery vkCmdBeginQuery;
|
|
|
|
PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults;
|
|
|
|
PFN_vkCmdEndQuery vkCmdEndQuery;
|
|
|
|
PFN_vkCmdResetQueryPool vkCmdResetQueryPool;
|
|
|
|
PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
|
|
|
|
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
|
|
|
|
PFN_vkCmdSetScissor vkCmdSetScissor;
|
|
|
|
PFN_vkCmdSetViewport vkCmdSetViewport;
|
|
|
|
PFN_vkCmdClearColorImage vkCmdClearColorImage;
|
|
|
|
PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
|
|
|
|
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
|
|
|
|
PFN_vkCmdBindPipeline vkCmdBindPipeline;
|
2019-10-07 23:12:06 +00:00
|
|
|
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
|
2019-10-23 21:00:22 +00:00
|
|
|
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCmdDraw vkCmdDraw;
|
2019-10-23 21:00:22 +00:00
|
|
|
PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
|
2021-06-01 17:26:03 +00:00
|
|
|
PFN_vkCmdDispatch vkCmdDispatch;
|
2021-11-08 17:21:31 +00:00
|
|
|
PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
|
2021-06-07 20:37:12 +00:00
|
|
|
PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
|
2021-11-08 17:21:31 +00:00
|
|
|
PFN_vkCmdCopyImage vkCmdCopyImage;
|
2021-06-07 20:37:12 +00:00
|
|
|
PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer;
|
2022-02-19 01:15:03 +00:00
|
|
|
PFN_vkCmdBlitImage vkCmdBlitImage;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkEndCommandBuffer vkEndCommandBuffer;
|
|
|
|
PFN_vkFreeCommandBuffers vkFreeCommandBuffers;
|
|
|
|
|
|
|
|
PFN_vkCreateRenderPass vkCreateRenderPass;
|
|
|
|
PFN_vkDestroyRenderPass vkDestroyRenderPass;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateFramebuffer vkCreateFramebuffer;
|
|
|
|
PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreatePipelineCache vkCreatePipelineCache;
|
|
|
|
PFN_vkDestroyPipelineCache vkDestroyPipelineCache;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2021-06-01 17:26:03 +00:00
|
|
|
PFN_vkResetDescriptorPool vkResetDescriptorPool;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
|
|
|
|
PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
|
2020-10-11 16:52:25 +00:00
|
|
|
PFN_vkFreeDescriptorSets vkFreeDescriptorSets;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2021-06-01 17:26:03 +00:00
|
|
|
PFN_vkCreateComputePipelines vkCreateComputePipelines;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
|
|
|
|
PFN_vkDestroyPipeline vkDestroyPipeline;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
|
|
|
|
PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
|
|
|
|
PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
|
|
|
|
PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
|
|
|
|
|
|
|
|
PFN_vkGetDeviceQueue vkGetDeviceQueue;
|
|
|
|
PFN_vkQueueSubmit vkQueueSubmit;
|
|
|
|
PFN_vkQueueWaitIdle vkQueueWaitIdle;
|
|
|
|
|
|
|
|
PFN_vkCreateSemaphore vkCreateSemaphore;
|
2021-11-09 00:07:21 +00:00
|
|
|
#if defined(VK_KHR_timeline_semaphore)
|
2021-11-09 19:41:44 +00:00
|
|
|
PFN_vkSignalSemaphoreKHR vkSignalSemaphore;
|
2021-12-03 21:23:52 +00:00
|
|
|
PFN_vkWaitSemaphoresKHR vkWaitSemaphores;
|
|
|
|
PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValue;
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_KHR_timeline_semaphore)
|
2021-11-09 00:07:21 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkDestroySemaphore vkDestroySemaphore;
|
|
|
|
|
|
|
|
PFN_vkCreateFence vkCreateFence;
|
|
|
|
PFN_vkWaitForFences vkWaitForFences;
|
2021-08-16 13:37:29 +00:00
|
|
|
PFN_vkGetFenceStatus vkGetFenceStatus;
|
2019-03-18 05:52:32 +00:00
|
|
|
PFN_vkDestroyFence vkDestroyFence;
|
2020-04-15 13:07:02 +00:00
|
|
|
PFN_vkResetFences vkResetFences;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
|
|
|
|
PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
|
|
|
|
PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
|
|
|
|
PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
|
|
|
|
PFN_vkQueuePresentKHR vkQueuePresentKHR;
|
2020-06-01 02:00:56 +00:00
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
#if defined(VK_USE_PLATFORM_WIN32_KHR)
|
|
|
|
PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR;
|
2022-02-23 15:05:09 +00:00
|
|
|
PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR;
|
2022-03-18 23:19:02 +00:00
|
|
|
PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR;
|
|
|
|
PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR;
|
|
|
|
PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR;
|
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_WIN32_KHR)
|
2021-11-09 00:07:21 +00:00
|
|
|
|
|
|
|
#if !defined(VK_USE_PLATFORM_WIN32_KHR)
|
|
|
|
PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR;
|
2022-03-18 23:19:02 +00:00
|
|
|
PFN_vkGetFenceFdKHR vkGetFenceFdKHR;
|
2020-06-01 02:00:56 +00:00
|
|
|
PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR;
|
2021-03-09 14:00:42 +00:00
|
|
|
PFN_vkImportFenceFdKHR vkImportFenceFdKHR;
|
2022-03-18 23:19:02 +00:00
|
|
|
PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR;
|
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // !defined(VK_USE_PLATFORM_WIN32_KHR)
|
2021-11-09 00:07:21 +00:00
|
|
|
|
|
|
|
#if defined(VK_USE_PLATFORM_ANDROID_KHR)
|
|
|
|
PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID;
|
|
|
|
PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID;
|
|
|
|
|
2021-11-09 16:55:57 +00:00
|
|
|
#endif // defined(VK_USE_PLATFORM_ANDROID_KHR)
|
2021-02-24 13:59:23 +00:00
|
|
|
|
2022-04-23 21:25:01 +00:00
|
|
|
#if defined(VK_EXT_calibrated_timestamps)
|
|
|
|
PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT;
|
|
|
|
|
|
|
|
#endif // defined(VK_EXT_calibrated_timestamps)
|
|
|
|
|
2021-02-24 13:59:23 +00:00
|
|
|
PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE;
|
2021-12-08 23:19:16 +00:00
|
|
|
|
|
|
|
#if defined(VK_EXT_display_control)
|
|
|
|
PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT;
|
|
|
|
PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT;
|
|
|
|
PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT;
|
2022-11-05 21:12:51 +00:00
|
|
|
|
2021-12-08 23:19:16 +00:00
|
|
|
#endif // defined(VK_EXT_display_control)
|
|
|
|
|
2022-11-05 21:12:51 +00:00
|
|
|
#if defined(VK_EXT_image_drm_format_modifier)
|
|
|
|
PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT;
|
|
|
|
#endif // defined(VK_EXT_image_drm_format_modifier)
|
|
|
|
|
2021-11-09 00:07:21 +00:00
|
|
|
// end of GENERATED device loader code - do not modify - used by scripts
|
2019-03-18 05:52:32 +00:00
|
|
|
};
|
|
|
|
|
2020-05-06 14:38:12 +00:00
|
|
|
struct vk_buffer
|
|
|
|
{
|
|
|
|
VkBuffer handle;
|
|
|
|
VkDeviceMemory memory;
|
|
|
|
uint32_t size;
|
|
|
|
void *data;
|
|
|
|
};
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2020-05-06 14:40:03 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* String helper functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT const char *
|
2019-03-18 05:52:32 +00:00
|
|
|
vk_result_string(VkResult code);
|
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT const char *
|
2022-03-26 23:39:48 +00:00
|
|
|
vk_format_string(VkFormat code);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT const char *
|
2019-03-18 05:52:32 +00:00
|
|
|
vk_present_mode_string(VkPresentModeKHR code);
|
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT const char *
|
2019-03-18 05:52:32 +00:00
|
|
|
vk_power_state_string(VkDisplayPowerStateEXT code);
|
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT const char *
|
2019-03-18 05:52:32 +00:00
|
|
|
vk_color_space_string(VkColorSpaceKHR code);
|
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT const char *
|
2022-02-18 13:16:30 +00:00
|
|
|
vk_format_feature_string(VkFormatFeatureFlagBits code);
|
|
|
|
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT const char *
|
2022-02-18 13:16:30 +00:00
|
|
|
xrt_swapchain_usage_string(enum xrt_swapchain_usage_bits code);
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Function and helpers.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2021-11-20 14:50:47 +00:00
|
|
|
#define VK_TRACE(d, ...) U_LOG_IFL_T(d->log_level, __VA_ARGS__)
|
|
|
|
#define VK_DEBUG(d, ...) U_LOG_IFL_D(d->log_level, __VA_ARGS__)
|
|
|
|
#define VK_INFO(d, ...) U_LOG_IFL_I(d->log_level, __VA_ARGS__)
|
|
|
|
#define VK_WARN(d, ...) U_LOG_IFL_W(d->log_level, __VA_ARGS__)
|
|
|
|
#define VK_ERROR(d, ...) U_LOG_IFL_E(d->log_level, __VA_ARGS__)
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-02-12 15:26:10 +00:00
|
|
|
/*!
|
|
|
|
* @brief Check a Vulkan VkResult, writing an error to the log and returning true if not VK_SUCCESS
|
|
|
|
*
|
|
|
|
* @param fun a string literal with the name of the Vulkan function, for logging purposes.
|
|
|
|
* @param res a VkResult from that function.
|
|
|
|
* @param file a string literal with the source code filename, such as from __FILE__
|
|
|
|
* @param line a source code line number, such as from __LINE__
|
|
|
|
*
|
|
|
|
* @see vk_check_error, vk_check_error_with_free which wrap this for easier usage.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT bool
|
2020-05-06 10:19:49 +00:00
|
|
|
vk_has_error(VkResult res, const char *fun, const char *file, int line);
|
|
|
|
|
2021-02-12 15:26:10 +00:00
|
|
|
/*!
|
2021-03-30 17:08:52 +00:00
|
|
|
* @def vk_check_error
|
2021-02-12 15:26:10 +00:00
|
|
|
* @brief Perform checking of a Vulkan result, returning in case it is not VK_SUCCESS.
|
|
|
|
*
|
|
|
|
* @param fun A string literal with the name of the Vulkan function, for logging purposes.
|
|
|
|
* @param res a VkResult from that function.
|
|
|
|
* @param ret value to return, if any, upon error
|
|
|
|
*
|
|
|
|
* @see vk_has_error which is wrapped by this macro
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2021-01-14 14:13:48 +00:00
|
|
|
#define vk_check_error(fun, res, ret) \
|
2021-03-26 16:08:33 +00:00
|
|
|
do { \
|
|
|
|
if (vk_has_error(res, fun, __FILE__, __LINE__)) \
|
|
|
|
return ret; \
|
|
|
|
} while (0)
|
2020-05-06 10:19:49 +00:00
|
|
|
|
2021-02-12 15:26:10 +00:00
|
|
|
/*!
|
2021-03-30 17:08:52 +00:00
|
|
|
* @def vk_check_error_with_free
|
2021-02-12 15:26:10 +00:00
|
|
|
* @brief Perform checking of a Vulkan result, freeing an allocation and returning in case it is not VK_SUCCESS.
|
|
|
|
*
|
|
|
|
* @param fun A string literal with the name of the Vulkan function, for logging purposes.
|
|
|
|
* @param res a VkResult from that function.
|
|
|
|
* @param ret value to return, if any, upon error
|
|
|
|
* @param to_free expression to pass to `free()` upon error
|
|
|
|
*
|
|
|
|
* @see vk_has_error which is wrapped by this macro
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2021-01-14 14:13:48 +00:00
|
|
|
#define vk_check_error_with_free(fun, res, ret, to_free) \
|
2021-03-26 16:08:33 +00:00
|
|
|
do { \
|
|
|
|
if (vk_has_error(res, fun, __FILE__, __LINE__)) { \
|
|
|
|
free(to_free); \
|
|
|
|
return ret; \
|
|
|
|
} \
|
|
|
|
} while (0)
|
2021-01-07 13:51:49 +00:00
|
|
|
|
2022-03-30 18:29:05 +00:00
|
|
|
|
2022-03-30 19:08:50 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Printing helpers, in the vk_print.c file.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Print device information to the logger at the given logging level,
|
|
|
|
* if the vk_bundle has that level enabled.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
vk_print_device_info(struct vk_bundle *vk,
|
|
|
|
enum u_logging_level log_level,
|
|
|
|
VkPhysicalDeviceProperties *pdp,
|
|
|
|
uint32_t gpu_index,
|
|
|
|
const char *title);
|
|
|
|
|
2022-05-08 13:23:34 +00:00
|
|
|
/*!
|
|
|
|
* Print device information about the device that bundle manages at the given
|
|
|
|
* logging level if the vk_bundle has that level enabled.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
vk_print_opened_device_info(struct vk_bundle *vk, enum u_logging_level log_level);
|
|
|
|
|
2022-04-23 22:47:35 +00:00
|
|
|
/*!
|
|
|
|
* Print device features to the logger at the given logging level, if the
|
|
|
|
* vk_bundle has that level enabled.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
vk_print_features_info(struct vk_bundle *vk, enum u_logging_level log_level);
|
|
|
|
|
2022-03-30 19:08:50 +00:00
|
|
|
/*!
|
|
|
|
* Print external handle features to the logger at the given logging level,
|
|
|
|
* if the vk_bundle has that level enabled.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
vk_print_external_handles_info(struct vk_bundle *vk, enum u_logging_level log_level);
|
|
|
|
|
|
|
|
|
2022-03-30 18:29:05 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Struct init functions, in the vk_function_loaders.c file.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2019-04-03 00:27:05 +00:00
|
|
|
/*!
|
2022-03-30 18:29:05 +00:00
|
|
|
* Can be done on a completely bare bundle.
|
|
|
|
*
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-04-03 00:27:05 +00:00
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_get_loader_functions(struct vk_bundle *vk, PFN_vkGetInstanceProcAddr g);
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
2022-03-30 18:29:05 +00:00
|
|
|
* Requires a instance to have been created and set on the bundle.
|
|
|
|
*
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_get_instance_functions(struct vk_bundle *vk);
|
|
|
|
|
2021-03-30 16:49:51 +00:00
|
|
|
/*!
|
2022-03-30 18:29:05 +00:00
|
|
|
* Requires a device to have been created and set on the bundle.
|
2021-03-30 16:49:51 +00:00
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
2022-03-30 18:29:05 +00:00
|
|
|
vk_get_device_functions(struct vk_bundle *vk);
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Bundle init functions, in the vk_bundle_init.c file.
|
|
|
|
*
|
|
|
|
*/
|
2021-03-30 16:49:51 +00:00
|
|
|
|
|
|
|
/*!
|
2022-03-30 18:29:05 +00:00
|
|
|
* Only requires @ref vk_get_loader_functions to have been called.
|
|
|
|
*
|
2021-03-30 16:49:51 +00:00
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-03-30 18:29:05 +00:00
|
|
|
struct u_string_list *
|
|
|
|
vk_build_instance_extensions(struct vk_bundle *vk,
|
|
|
|
struct u_string_list *required_instance_ext_list,
|
|
|
|
struct u_string_list *optional_instance_ext_list);
|
2021-03-30 16:49:51 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
2022-03-30 18:29:05 +00:00
|
|
|
* Fills in has_* in vk_bundle given a string of prefiltered instance extensions
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
2022-03-30 18:29:05 +00:00
|
|
|
void
|
|
|
|
vk_fill_in_has_instance_extensions(struct vk_bundle *vk, struct u_string_list *ext_list);
|
|
|
|
|
2022-11-15 00:47:42 +00:00
|
|
|
/*!
|
|
|
|
* Setup the physical device, this is called by vk_create_device but has uses
|
|
|
|
* for outside of that.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_select_physical_device(struct vk_bundle *vk, int forced_index);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2021-06-09 14:48:57 +00:00
|
|
|
/*!
|
|
|
|
* Used to enable device features as a argument @ref vk_create_device.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
struct vk_device_features
|
|
|
|
{
|
|
|
|
bool shader_storage_image_write_without_format;
|
|
|
|
bool null_descriptor;
|
2021-12-07 19:15:34 +00:00
|
|
|
bool timeline_semaphore;
|
2021-06-09 14:48:57 +00:00
|
|
|
};
|
|
|
|
|
2021-12-06 23:38:55 +00:00
|
|
|
/*!
|
2022-03-30 18:29:05 +00:00
|
|
|
* Creates a VkDevice and initialises the VkQueue.
|
|
|
|
*
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2020-11-23 20:09:41 +00:00
|
|
|
vk_create_device(struct vk_bundle *vk,
|
|
|
|
int forced_index,
|
2021-06-06 00:25:00 +00:00
|
|
|
bool only_compute,
|
2022-04-12 21:20:11 +00:00
|
|
|
VkQueueGlobalPriorityEXT global_priority,
|
2021-12-06 22:14:17 +00:00
|
|
|
struct u_string_list *required_device_ext_list,
|
|
|
|
struct u_string_list *optional_device_ext_list,
|
2021-06-09 14:48:57 +00:00
|
|
|
const struct vk_device_features *optional_device_features);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2022-03-30 18:29:05 +00:00
|
|
|
/*!
|
|
|
|
* @brief Initialize mutexes in the @ref vk_bundle.
|
|
|
|
*
|
|
|
|
* Not required for all uses, but a precondition for some.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_init_mutex(struct vk_bundle *vk);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* @brief De-initialize mutexes in the @ref vk_bundle.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_deinit_mutex(struct vk_bundle *vk);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Requires device and queue to have been set up.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2022-03-30 18:29:05 +00:00
|
|
|
vk_init_cmd_pool(struct vk_bundle *vk);
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
|
|
|
* Initialize a bundle with objects given to us by client code,
|
|
|
|
* used by @ref client_vk_compositor in @ref comp_client.
|
|
|
|
*
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2019-03-18 05:52:32 +00:00
|
|
|
vk_init_from_given(struct vk_bundle *vk,
|
|
|
|
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr,
|
|
|
|
VkInstance instance,
|
|
|
|
VkPhysicalDevice physical_device,
|
|
|
|
VkDevice device,
|
|
|
|
uint32_t queue_family_index,
|
2022-03-01 11:59:21 +00:00
|
|
|
uint32_t queue_index,
|
2022-05-08 12:02:25 +00:00
|
|
|
bool external_fence_fd_enabled,
|
|
|
|
bool external_semaphore_fd_enabled,
|
2022-03-01 12:04:38 +00:00
|
|
|
bool timeline_semaphore_enabled,
|
2022-03-01 11:59:21 +00:00
|
|
|
enum u_logging_level log_level);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2022-03-30 18:29:05 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Other functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
bool
|
2021-01-14 14:13:48 +00:00
|
|
|
vk_get_memory_type(struct vk_bundle *vk, uint32_t type_bits, VkMemoryPropertyFlags memory_props, uint32_t *out_type_id);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2019-03-25 15:44:01 +00:00
|
|
|
/*!
|
|
|
|
* Allocate memory for an image and bind it to that image.
|
|
|
|
*
|
|
|
|
* Handles the following steps:
|
|
|
|
*
|
|
|
|
* - calling vkGetImageMemoryRequirements
|
|
|
|
* - comparing against the max_size
|
|
|
|
* - getting the memory type (as dictated by the VkMemoryRequirements and
|
|
|
|
* VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
|
|
|
|
* - calling vkAllocateMemory
|
|
|
|
* - calling vkBindImageMemory
|
|
|
|
* - calling vkDestroyMemory in case of an error.
|
|
|
|
*
|
|
|
|
* If this fails, it cleans up the VkDeviceMemory.
|
|
|
|
*
|
|
|
|
* @param vk Vulkan bundle
|
|
|
|
* @param image The VkImage to allocate for and bind.
|
|
|
|
* @param max_size The maximum value you'll allow for
|
|
|
|
* VkMemoryRequirements::size. Pass SIZE_MAX if you will accept any size
|
|
|
|
* that works.
|
|
|
|
* @param pNext_for_allocate (Optional) a pointer to use in the pNext chain of
|
|
|
|
* VkMemoryAllocateInfo.
|
2022-04-24 13:05:55 +00:00
|
|
|
* @param caller_name Used for error printing, this function is called from
|
|
|
|
* various sources and takes next chains that could influence the result
|
|
|
|
* of various calls inside of it. Since it's up to this function to print
|
|
|
|
* any errors it will add the caller name to error messages.
|
2019-03-25 15:44:01 +00:00
|
|
|
* @param out_mem Output parameter: will be set to the allocated memory if
|
|
|
|
* everything succeeds. Not modified if there is an error.
|
|
|
|
* @param out_size (Optional) pointer to receive the value of
|
|
|
|
* VkMemoryRequirements::size.
|
|
|
|
*
|
|
|
|
* If this fails, you may want to destroy your VkImage as well, since this
|
|
|
|
* routine is usually used in combination with vkCreateImage.
|
|
|
|
*
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-25 15:44:01 +00:00
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2019-03-25 15:44:01 +00:00
|
|
|
vk_alloc_and_bind_image_memory(struct vk_bundle *vk,
|
|
|
|
VkImage image,
|
|
|
|
size_t max_size,
|
|
|
|
const void *pNext_for_allocate,
|
2022-04-24 13:05:55 +00:00
|
|
|
const char *caller_name,
|
2019-03-25 15:44:01 +00:00
|
|
|
VkDeviceMemory *out_mem,
|
|
|
|
VkDeviceSize *out_size);
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
2021-04-06 22:56:49 +00:00
|
|
|
*
|
|
|
|
* @brief Creates a Vulkan device memory and image from a native graphics buffer handle.
|
|
|
|
*
|
|
|
|
* In case of error, ownership is never transferred and the caller should close the handle themselves.
|
|
|
|
*
|
|
|
|
* In case of success, the underlying Vulkan functionality's ownership semantics apply: ownership of the @p image_native
|
|
|
|
* handle may have transferred, a reference may have been added, or the Vulkan objects may rely on the caller to keep
|
|
|
|
* the native handle alive until the Vulkan objects are destroyed. Which option applies depends on the particular native
|
|
|
|
* handle type used.
|
|
|
|
*
|
|
|
|
* See the corresponding specification texts:
|
|
|
|
*
|
|
|
|
* - Windows:
|
|
|
|
* https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkImportMemoryWin32HandleInfoKHR
|
|
|
|
* - Linux: https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkImportMemoryFdInfoKHR
|
|
|
|
* - Android:
|
|
|
|
* https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkImportAndroidHardwareBufferInfoANDROID
|
|
|
|
*
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2020-07-14 22:13:07 +00:00
|
|
|
vk_create_image_from_native(struct vk_bundle *vk,
|
2020-08-07 15:58:05 +00:00
|
|
|
const struct xrt_swapchain_create_info *info,
|
2020-07-14 22:13:07 +00:00
|
|
|
struct xrt_image_native *image_native,
|
|
|
|
VkImage *out_image,
|
|
|
|
VkDeviceMemory *out_mem);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2022-04-05 21:19:36 +00:00
|
|
|
/*!
|
|
|
|
* Given a DeviceMemory handle created to be exportable, outputs the native buffer type (FD on desktop Linux)
|
|
|
|
* equivalent.
|
|
|
|
*
|
2022-05-17 18:53:03 +00:00
|
|
|
* Caller assumes ownership of handle which should be unreferenced with @ref u_graphics_buffer_unref when no longer
|
|
|
|
* needed.
|
2022-04-05 21:19:36 +00:00
|
|
|
*
|
|
|
|
* @param vk Vulkan bundle
|
|
|
|
* @param device_memory The memory to get the handle of
|
|
|
|
* @param[out] out_handle A pointer to the handle to populate
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2022-04-05 21:19:36 +00:00
|
|
|
vk_get_native_handle_from_device_memory(struct vk_bundle *vk,
|
|
|
|
VkDeviceMemory device_memory,
|
|
|
|
xrt_graphics_buffer_handle_t *out_handle);
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2022-02-19 00:30:16 +00:00
|
|
|
* Helper to create a VkImage
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_image_simple(struct vk_bundle *vk,
|
2020-05-06 11:27:35 +00:00
|
|
|
VkExtent2D extent,
|
2019-03-18 05:52:32 +00:00
|
|
|
VkFormat format,
|
2020-05-06 11:27:35 +00:00
|
|
|
VkImageUsageFlags usage,
|
2019-03-18 05:52:32 +00:00
|
|
|
VkDeviceMemory *out_mem,
|
|
|
|
VkImage *out_image);
|
|
|
|
|
2022-08-25 11:56:18 +00:00
|
|
|
/*!
|
|
|
|
* Helper to create a mutable RG88B8A8 VkImage that specializes in the two
|
|
|
|
* UNORM and SRGB variants of that formats.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_image_mutable_rgba(
|
|
|
|
struct vk_bundle *vk, VkExtent2D extent, VkImageUsageFlags usage, VkDeviceMemory *out_mem, VkImage *out_image);
|
|
|
|
|
2022-02-19 00:30:16 +00:00
|
|
|
/*!
|
|
|
|
* @ingroup aux_vk
|
|
|
|
* Helper to create a VkImage, with more options for tiling and memory storage.
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_image_advanced(struct vk_bundle *vk,
|
|
|
|
VkExtent3D extent,
|
|
|
|
VkFormat format,
|
|
|
|
VkImageTiling image_tiling,
|
|
|
|
VkImageUsageFlags image_usage_flags,
|
|
|
|
VkMemoryPropertyFlags memory_property_flags,
|
|
|
|
VkDeviceMemory *out_mem,
|
|
|
|
VkImage *out_image);
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
VkResult
|
2021-01-14 14:13:48 +00:00
|
|
|
vk_create_sampler(struct vk_bundle *vk, VkSamplerAddressMode clamp_mode, VkSampler *out_sampler);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2022-08-25 13:27:42 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Helpers for creating ímage views.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
/*!
|
2020-02-23 12:30:26 +00:00
|
|
|
* @ingroup aux_vk
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_view(struct vk_bundle *vk,
|
|
|
|
VkImage image,
|
2022-07-01 19:23:53 +00:00
|
|
|
VkImageViewType type,
|
2019-03-18 05:52:32 +00:00
|
|
|
VkFormat format,
|
2019-04-30 15:22:19 +00:00
|
|
|
VkImageSubresourceRange subresource_range,
|
2019-03-18 05:52:32 +00:00
|
|
|
VkImageView *out_view);
|
|
|
|
|
2020-06-18 11:44:46 +00:00
|
|
|
/*!
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_view_swizzle(struct vk_bundle *vk,
|
|
|
|
VkImage image,
|
2022-07-01 19:23:53 +00:00
|
|
|
VkImageViewType type,
|
2020-06-18 11:44:46 +00:00
|
|
|
VkFormat format,
|
|
|
|
VkImageSubresourceRange subresource_range,
|
|
|
|
VkComponentMapping components,
|
|
|
|
VkImageView *out_view);
|
|
|
|
|
2022-08-25 13:27:42 +00:00
|
|
|
/*!
|
|
|
|
* Creates a image with a specific subset of usage, useful for a mutable images
|
|
|
|
* where one format might not support all usages defined by the image.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_view_usage(struct vk_bundle *vk,
|
|
|
|
VkImage image,
|
|
|
|
VkImageViewType type,
|
|
|
|
VkFormat format,
|
|
|
|
VkImageUsageFlags image_usage,
|
|
|
|
VkImageSubresourceRange subresource_range,
|
|
|
|
VkImageView *out_view);
|
|
|
|
|
2022-02-17 19:29:57 +00:00
|
|
|
|
2022-08-25 13:10:46 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Helpers for creating descriptor pools and sets.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2020-05-06 14:44:24 +00:00
|
|
|
bool
|
|
|
|
vk_init_descriptor_pool(struct vk_bundle *vk,
|
|
|
|
const VkDescriptorPoolSize *pool_sizes,
|
|
|
|
uint32_t pool_size_count,
|
|
|
|
uint32_t set_count,
|
|
|
|
VkDescriptorPool *out_descriptor_pool);
|
|
|
|
|
|
|
|
bool
|
|
|
|
vk_allocate_descriptor_sets(struct vk_bundle *vk,
|
|
|
|
VkDescriptorPool descriptor_pool,
|
|
|
|
uint32_t count,
|
|
|
|
const VkDescriptorSetLayout *set_layout,
|
|
|
|
VkDescriptorSet *sets);
|
|
|
|
|
2022-08-25 13:10:46 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Helpers for creating buffers.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2020-05-06 14:38:12 +00:00
|
|
|
bool
|
|
|
|
vk_buffer_init(struct vk_bundle *vk,
|
|
|
|
VkDeviceSize size,
|
|
|
|
VkBufferUsageFlags usage,
|
|
|
|
VkMemoryPropertyFlags properties,
|
|
|
|
VkBuffer *out_buffer,
|
|
|
|
VkDeviceMemory *out_mem);
|
|
|
|
|
|
|
|
void
|
|
|
|
vk_buffer_destroy(struct vk_buffer *self, struct vk_bundle *vk);
|
2020-05-06 14:44:24 +00:00
|
|
|
|
2020-08-20 01:25:12 +00:00
|
|
|
bool
|
2021-01-14 14:13:48 +00:00
|
|
|
vk_update_buffer(struct vk_bundle *vk, float *buffer, size_t buffer_size, VkDeviceMemory memory);
|
2020-05-06 14:40:03 +00:00
|
|
|
|
2022-04-08 13:10:36 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
c/render: render_resources has its own command pool;
Currently, there is a single command pool in the vk bundle, shared by
everyone. Since command pools (and command buffers allocated from those
pools) can only be used on one thread at a time, this requires locking.
However, the main point of having these annoying command pool things in
the first place is that you can use one for each thread/lifetime/area in
the app and avoid the overhead of the locks (both computational and
cognitive).
In this change I have given the rendering bits of the compositor its own
command pool. Instead of allocating and freeing a command buffer every
frame, a single command buffer is allocated from the pool during
initialization, and the pool is reset at the beginning of each frame.
Normally, multiple pools would need to be used, but this is not
necessary in monado because frames are serialized. The `TRANSIENT` and
`ONE_TIME_SUBMIT` flags have been added, which can allow for some driver
optimizations. The render code no longer takes out the command pool
mutex. The shared command pool is still there for a few remaining
places where vulkan work needs to be done outside the compositor.
I used the command buffer vulkan helpers when possible, but I would
maybe propose the idea of removing them, since they aren't really
wrapping much at this point. The `C` macro helps a lot and it's a bit
easier to see the Vulkan details in front of you instead of needing to
switch back and forth between the helper.
Later, I think it would be cool to apply and document some constraints
like "the queue is only accessed in functions XYZ, the render_resources
command pool must only be accessed in layer_commit from 1 thread" etc.
2022-05-26 22:19:51 +00:00
|
|
|
* Helpers for writing command buffers using the global command pool.
|
2022-04-08 13:10:36 +00:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2022-09-09 09:46:21 +00:00
|
|
|
/*!
|
|
|
|
* Create a new command buffer, takes the pool lock.
|
|
|
|
*
|
|
|
|
* @pre Requires successful call to vk_init_mutex.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_cmd_buffer_create(struct vk_bundle *vk, VkCommandBuffer *out_cmd_buffer);
|
|
|
|
|
2021-03-30 16:49:51 +00:00
|
|
|
/*!
|
2022-09-09 09:43:49 +00:00
|
|
|
* Create and begins a new command buffer, takes the pool lock.
|
2022-04-08 13:10:36 +00:00
|
|
|
*
|
|
|
|
* @pre Requires successful call to vk_init_mutex.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
VkResult
|
2022-09-09 09:43:49 +00:00
|
|
|
vk_cmd_buffer_create_and_begin(struct vk_bundle *vk, VkCommandBuffer *out_cmd_buffer);
|
2022-04-08 13:10:36 +00:00
|
|
|
|
|
|
|
/*!
|
|
|
|
* A do everything command buffer submission function, during the operation
|
|
|
|
* the pool lock will be taken and released.
|
|
|
|
*
|
|
|
|
* * Creates a new fence.
|
|
|
|
* * Submits @p cmd_buffer to the queue, along with the fence.
|
|
|
|
* * Waits for the fence to complete.
|
|
|
|
* * Destroys the fence.
|
|
|
|
* * Destroy @p cmd_buffer.
|
|
|
|
*
|
|
|
|
* @pre Requires successful call to vk_init_mutex.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2022-09-09 09:43:49 +00:00
|
|
|
vk_cmd_buffer_submit(struct vk_bundle *vk, VkCommandBuffer cmd_buffer);
|
2022-04-08 13:10:36 +00:00
|
|
|
|
|
|
|
/*!
|
|
|
|
* Submits to the given queue, with the given fence.
|
|
|
|
*
|
|
|
|
* @pre Requires successful call to vk_init_mutex.
|
|
|
|
*
|
2021-03-30 16:49:51 +00:00
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-07-13 17:28:51 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2021-01-14 14:13:48 +00:00
|
|
|
vk_locked_submit(struct vk_bundle *vk, VkQueue queue, uint32_t count, const VkSubmitInfo *infos, VkFence fence);
|
2021-01-12 13:46:16 +00:00
|
|
|
|
2022-04-08 13:10:36 +00:00
|
|
|
/*!
|
2022-05-17 21:58:36 +00:00
|
|
|
* Set the image layout using a barrier command, takes the pool lock.
|
2022-04-08 13:10:36 +00:00
|
|
|
*
|
|
|
|
* @pre Requires successful call to vk_init_mutex.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-04-08 18:17:28 +00:00
|
|
|
void
|
|
|
|
vk_cmd_image_barrier_gpu(struct vk_bundle *vk,
|
|
|
|
VkCommandBuffer cmd_buffer,
|
|
|
|
VkImage image,
|
|
|
|
VkAccessFlags src_access_mask,
|
|
|
|
VkAccessFlags dst_access_mask,
|
|
|
|
VkImageLayout old_layout,
|
|
|
|
VkImageLayout new_layout,
|
|
|
|
VkImageSubresourceRange subresource_range);
|
2022-04-08 13:10:36 +00:00
|
|
|
|
|
|
|
/*!
|
2022-04-08 18:17:28 +00:00
|
|
|
* Inserts a image barrier command, doesn't take any locks.
|
2022-04-08 13:10:36 +00:00
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
void
|
2022-04-08 18:17:28 +00:00
|
|
|
vk_cmd_image_barrier_locked(struct vk_bundle *vk,
|
|
|
|
VkCommandBuffer cmd_buffer,
|
|
|
|
VkImage image,
|
|
|
|
VkAccessFlags src_access_mask,
|
|
|
|
VkAccessFlags dst_access_mask,
|
|
|
|
VkImageLayout old_image_layout,
|
|
|
|
VkImageLayout new_image_layout,
|
|
|
|
VkPipelineStageFlags src_stage_mask,
|
|
|
|
VkPipelineStageFlags dst_stage_mask,
|
|
|
|
VkImageSubresourceRange subresource_range);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Inserts a image barrier command specifically for GPU commands,
|
|
|
|
* doesn't take any locks.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
vk_cmd_image_barrier_gpu_locked(struct vk_bundle *vk,
|
|
|
|
VkCommandBuffer cmd_buffer,
|
|
|
|
VkImage image,
|
|
|
|
VkAccessFlags src_access_mask,
|
|
|
|
VkAccessFlags dst_access_mask,
|
|
|
|
VkImageLayout old_layout,
|
|
|
|
VkImageLayout new_layout,
|
|
|
|
VkImageSubresourceRange subresource_range);
|
2022-04-08 13:10:36 +00:00
|
|
|
|
2021-11-10 13:09:07 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* State creation helpers, in the vk_state_creators.c file.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Arguments to @ref vk_create_descriptor_pool function.
|
|
|
|
*/
|
|
|
|
struct vk_descriptor_pool_info
|
|
|
|
{
|
|
|
|
uint32_t uniform_per_descriptor_count; //!< VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
|
|
|
|
uint32_t sampler_per_descriptor_count; //!< VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
|
|
|
|
uint32_t storage_image_per_descriptor_count; //!< VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
|
|
|
|
uint32_t storage_buffer_per_descriptor_count; //!< VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
|
|
|
|
|
|
|
|
//! The max count of created descriptors.
|
|
|
|
uint32_t descriptor_count;
|
|
|
|
|
|
|
|
//! Are descriptors freeable, or must vkResetDescriptorPool be used.
|
|
|
|
bool freeable;
|
|
|
|
};
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Creates a descriptor pool, made for a single layout.
|
|
|
|
*
|
|
|
|
* Does error logging.
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_descriptor_pool(struct vk_bundle *vk,
|
|
|
|
const struct vk_descriptor_pool_info *info,
|
|
|
|
VkDescriptorPool *out_descriptor_pool);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Creates a descriptor set.
|
|
|
|
*
|
|
|
|
* Does error logging.
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_descriptor_set(struct vk_bundle *vk,
|
|
|
|
VkDescriptorPool descriptor_pool,
|
|
|
|
VkDescriptorSetLayout descriptor_layout,
|
|
|
|
VkDescriptorSet *out_descriptor_set);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Creates a pipeline layout from a single descriptor set layout.
|
|
|
|
*
|
|
|
|
* Does error logging.
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_pipeline_layout(struct vk_bundle *vk,
|
|
|
|
VkDescriptorSetLayout descriptor_set_layout,
|
|
|
|
VkPipelineLayout *out_pipeline_layout);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Creates a pipeline cache.
|
|
|
|
*
|
|
|
|
* Does error logging.
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_pipeline_cache(struct vk_bundle *vk, VkPipelineCache *out_pipeline_cache);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* Creates a compute pipeline, assumes entry function is called 'main'.
|
|
|
|
*
|
|
|
|
* Does error logging.
|
|
|
|
*/
|
|
|
|
VkResult
|
|
|
|
vk_create_compute_pipeline(struct vk_bundle *vk,
|
|
|
|
VkPipelineCache pipeline_cache,
|
|
|
|
VkShaderModule shader,
|
|
|
|
VkPipelineLayout pipeline_layout,
|
2022-04-18 09:50:09 +00:00
|
|
|
const VkSpecializationInfo *specialization_info,
|
2021-11-10 13:09:07 +00:00
|
|
|
VkPipeline *out_compute_pipeline);
|
|
|
|
|
|
|
|
|
2022-02-18 13:16:30 +00:00
|
|
|
/*
|
|
|
|
*
|
2022-09-07 19:32:40 +00:00
|
|
|
* Compositor buffer and swapchain image flags helpers, in the vk_compositor_flags.c file.
|
2022-02-18 13:16:30 +00:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2022-09-07 19:32:40 +00:00
|
|
|
/*!
|
|
|
|
* Return the extern handle type that a buffer should be created with.
|
|
|
|
*
|
|
|
|
* cb = Compositor Buffer.
|
|
|
|
*/
|
|
|
|
VkExternalMemoryHandleTypeFlags
|
|
|
|
vk_cb_get_buffer_external_handle_type(struct vk_bundle *vk);
|
|
|
|
|
2022-04-01 18:43:20 +00:00
|
|
|
/*!
|
|
|
|
* Helper for all of the supported formats to check support for.
|
|
|
|
*
|
|
|
|
* These are the available formats we will expose to our clients.
|
|
|
|
*
|
|
|
|
* In order of what we prefer. Start with a SRGB format that works on
|
|
|
|
* both OpenGL and Vulkan. The two linear formats that works on both
|
|
|
|
* OpenGL and Vulkan. A SRGB format that only works on Vulkan. The last
|
|
|
|
* two formats should not be used as they are linear but doesn't have
|
|
|
|
* enough bits to express it without resulting in banding.
|
|
|
|
*
|
|
|
|
* The format VK_FORMAT_A2B10G10R10_UNORM_PACK32 is not listed since
|
|
|
|
* 10 bits are not considered enough to do linear colors without
|
|
|
|
* banding. If there was a sRGB variant of it then we would have used it
|
|
|
|
* instead but there isn't. Since it's not a popular format it's best
|
|
|
|
* not to list it rather then listing it and people falling into the
|
|
|
|
* trap. The absolute minimum is R11G11B10, but is a really weird format
|
|
|
|
* so we are not exposing it.
|
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
#define VK_CSCI_FORMATS(THING_COLOR, THING_DS, THING_D, THING_S) \
|
|
|
|
/* color formats */ \
|
|
|
|
THING_COLOR(R16G16B16A16_UNORM) /* OGL VK */ \
|
|
|
|
THING_COLOR(R16G16B16A16_SFLOAT) /* OGL VK */ \
|
|
|
|
THING_COLOR(R16G16B16_UNORM) /* OGL VK - Uncommon. */ \
|
|
|
|
THING_COLOR(R16G16B16_SFLOAT) /* OGL VK - Uncommon. */ \
|
|
|
|
THING_COLOR(R8G8B8A8_SRGB) /* OGL VK */ \
|
|
|
|
THING_COLOR(B8G8R8A8_SRGB) /* VK */ \
|
|
|
|
THING_COLOR(R8G8B8_SRGB) /* OGL VK - Uncommon. */ \
|
|
|
|
THING_COLOR(R8G8B8A8_UNORM) /* OGL VK - Bad color precision. */ \
|
|
|
|
THING_COLOR(B8G8R8A8_UNORM) /* VK - Bad color precision. */ \
|
|
|
|
THING_COLOR(R8G8B8_UNORM) /* OGL VK - Uncommon. Bad color precision. */ \
|
|
|
|
THING_COLOR(B8G8R8_UNORM) /* VK - Uncommon. Bad color precision. */ \
|
|
|
|
THING_COLOR(R5G6B5_UNORM_PACK16) /* OLG VK - Bad color precision. */ \
|
2022-09-16 18:27:15 +00:00
|
|
|
THING_COLOR(R32_SFLOAT) /* OGL VK */ \
|
2022-04-01 18:43:20 +00:00
|
|
|
/* depth formats */ \
|
|
|
|
THING_D(D32_SFLOAT) /* OGL VK */ \
|
|
|
|
THING_D(D16_UNORM) /* OGL VK */ \
|
|
|
|
THING_D(X8_D24_UNORM_PACK32) /* OGL VK */ \
|
|
|
|
/* depth stencil formats */ \
|
|
|
|
THING_DS(D24_UNORM_S8_UINT) /* OGL VK */ \
|
|
|
|
THING_DS(D32_SFLOAT_S8_UINT) /* OGL VK */ \
|
|
|
|
/* stencil format */ \
|
|
|
|
THING_S(S8_UINT)
|
|
|
|
|
2022-02-18 20:19:14 +00:00
|
|
|
/*!
|
|
|
|
* Returns the access flags for the compositor to app barriers.
|
2022-02-18 23:31:44 +00:00
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
2022-02-18 20:19:14 +00:00
|
|
|
*/
|
2022-02-18 13:16:30 +00:00
|
|
|
VkAccessFlags
|
2022-02-18 20:19:14 +00:00
|
|
|
vk_csci_get_barrier_access_mask(enum xrt_swapchain_usage_bits bits);
|
2022-02-18 13:16:30 +00:00
|
|
|
|
|
|
|
/*!
|
2022-02-18 20:19:14 +00:00
|
|
|
* Return the optimal layout for this format, this is the layout as given to the
|
|
|
|
* app so is bound to the OpenXR spec.
|
2022-02-18 23:31:44 +00:00
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
2022-02-18 13:16:30 +00:00
|
|
|
*/
|
|
|
|
VkImageLayout
|
2022-02-18 20:19:14 +00:00
|
|
|
vk_csci_get_barrier_optimal_layout(VkFormat format);
|
2022-02-18 13:16:30 +00:00
|
|
|
|
|
|
|
/*!
|
2022-02-18 20:19:14 +00:00
|
|
|
* Return the barrier aspect mask for this format, this is intended for the
|
|
|
|
* barriers that flush the data out before and after transfers between the
|
|
|
|
* application and compositor.
|
2022-02-18 23:31:44 +00:00
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
2022-02-18 13:16:30 +00:00
|
|
|
*/
|
|
|
|
VkImageAspectFlags
|
2022-02-18 20:19:14 +00:00
|
|
|
vk_csci_get_barrier_aspect_mask(VkFormat format);
|
2022-02-18 13:16:30 +00:00
|
|
|
|
|
|
|
/*!
|
2022-02-18 20:19:14 +00:00
|
|
|
* Returns the usage bits for a given selected format and usage.
|
|
|
|
*
|
|
|
|
* For color formats always adds:
|
|
|
|
* * `VK_IMAGE_USAGE_SAMPLED_BIT` for compositor reading in shaders.
|
|
|
|
*
|
|
|
|
* For depth & stencil formats always adds:
|
|
|
|
* * `VK_IMAGE_USAGE_SAMPLED_BIT` for compositor reading in shaders.
|
|
|
|
*
|
|
|
|
* For depth formats always adds:
|
|
|
|
* * `VK_IMAGE_USAGE_SAMPLED_BIT` for compositor reading in shaders.
|
|
|
|
*
|
|
|
|
* For stencil formats always adds:
|
|
|
|
* * `VK_IMAGE_USAGE_SAMPLED_BIT` for compositor reading in shaders.
|
2022-02-18 23:31:44 +00:00
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
2022-02-18 13:16:30 +00:00
|
|
|
*/
|
|
|
|
VkImageUsageFlags
|
2022-02-18 20:19:14 +00:00
|
|
|
vk_csci_get_image_usage_flags(struct vk_bundle *vk, VkFormat format, enum xrt_swapchain_usage_bits bits);
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* For images views created by the compositor to sample the images, what aspect
|
|
|
|
* should be set. For color it's the color, for depth and stencil it's only
|
|
|
|
* depth as both are disallowed by the Vulkan spec, for depth only depth, and
|
|
|
|
* for stencil only it's stencil.
|
2022-02-18 23:31:44 +00:00
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
2022-02-18 20:19:14 +00:00
|
|
|
*/
|
|
|
|
VkImageAspectFlags
|
|
|
|
vk_csci_get_image_view_aspect(VkFormat format, enum xrt_swapchain_usage_bits bits);
|
2022-02-18 13:16:30 +00:00
|
|
|
|
2022-03-26 23:47:58 +00:00
|
|
|
/*!
|
|
|
|
* Return the extern handle type that a image should be created with.
|
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
|
|
|
*/
|
|
|
|
VkExternalMemoryHandleTypeFlags
|
2022-07-20 21:59:33 +00:00
|
|
|
vk_csci_get_image_external_handle_type(struct vk_bundle *vk, struct xrt_image_native *xin);
|
2022-02-18 13:16:30 +00:00
|
|
|
|
2022-08-02 18:39:39 +00:00
|
|
|
/*!
|
|
|
|
* Get whether a given image can be imported/exported for a handle type.
|
|
|
|
*
|
|
|
|
* CSCI = Compositor SwapChain Images.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
vk_csci_get_image_external_support(struct vk_bundle *vk,
|
|
|
|
VkFormat image_format,
|
|
|
|
enum xrt_swapchain_usage_bits bits,
|
|
|
|
VkExternalMemoryHandleTypeFlags handle_type,
|
|
|
|
bool *out_importable,
|
|
|
|
bool *out_exportable);
|
2022-02-19 01:59:14 +00:00
|
|
|
|
2022-09-07 19:32:40 +00:00
|
|
|
|
2022-02-15 14:16:58 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Sync objects, in the vk_sync_objects.c file.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2022-04-28 09:53:23 +00:00
|
|
|
/*!
|
|
|
|
* Is there a good likelihood that the import/export of a timeline semaphore
|
|
|
|
* will succeed, in other words will the below functions work.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
XRT_CHECK_RESULT bool
|
|
|
|
vk_can_import_and_export_timeline_semaphore(struct vk_bundle *vk);
|
|
|
|
|
2022-02-16 13:10:40 +00:00
|
|
|
/*!
|
|
|
|
* @brief Creates a Vulkan fence, submits it to the default VkQueue and return
|
|
|
|
* its native graphics sync handle.
|
|
|
|
*
|
|
|
|
* In case of error, out_native is not touched by the function.
|
|
|
|
*
|
|
|
|
* See @ref vk_create_fence_sync_from_native for ownership semantics on import.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-03-11 20:41:26 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2022-02-16 13:10:40 +00:00
|
|
|
vk_create_and_submit_fence_native(struct vk_bundle *vk, xrt_graphics_sync_handle_t *out_native);
|
|
|
|
|
2022-02-15 14:16:58 +00:00
|
|
|
/*!
|
|
|
|
* @brief Creates a Vulkan fence from a native graphics sync handle.
|
|
|
|
*
|
|
|
|
* In case of error, ownership is never transferred and the caller should close the handle themselves.
|
|
|
|
*
|
|
|
|
* In case of success, the underlying Vulkan functionality's ownership semantics apply: ownership of the @p native
|
|
|
|
* handle may have transferred, a reference may have been added, or the Vulkan object may rely on the caller to keep the
|
|
|
|
* native handle alive until the Vulkan object is destroyed. Which option applies depends on the particular native
|
|
|
|
* handle type used.
|
|
|
|
*
|
|
|
|
* See the corresponding Vulkan specification text:
|
|
|
|
* https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#synchronization-fences-importing
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-03-11 20:41:26 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2022-02-15 14:16:58 +00:00
|
|
|
vk_create_fence_sync_from_native(struct vk_bundle *vk, xrt_graphics_sync_handle_t native, VkFence *out_fence);
|
|
|
|
|
2022-03-19 00:35:03 +00:00
|
|
|
/*!
|
|
|
|
* Creates a Vulkan semaphore and a native graphics sync handle.
|
|
|
|
*
|
|
|
|
* In case of success, the underlying Vulkan functionality's ownership semantics
|
|
|
|
* apply: ownership of the @p native handle may have transferred, a reference
|
|
|
|
* may have been added, or the Vulkan object may rely on the caller to keep the
|
|
|
|
* native handle alive until the Vulkan object is destroyed. Which option
|
|
|
|
* applies depends on the particular native handle type used.
|
|
|
|
*
|
|
|
|
* In case of error, neither @p out_sem and @p out_native is not touched by the
|
|
|
|
* function so the caller only becomes responsible for the output on success.
|
|
|
|
*
|
|
|
|
* See the corresponding Vulkan specification text:
|
|
|
|
* https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
XRT_CHECK_RESULT VkResult
|
|
|
|
vk_create_semaphore_and_native(struct vk_bundle *vk, VkSemaphore *out_sem, xrt_graphics_sync_handle_t *out_native);
|
|
|
|
|
2022-05-17 18:53:03 +00:00
|
|
|
#if defined(VK_KHR_timeline_semaphore) || defined(XRT_DOXYGEN)
|
2022-03-19 00:35:03 +00:00
|
|
|
/*
|
|
|
|
* Creates a Vulkan timeline semaphore and a native graphics sync
|
|
|
|
* handle, see @ref vk_create_semaphore_and_native for more details.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
XRT_CHECK_RESULT VkResult
|
|
|
|
vk_create_timeline_semaphore_and_native(struct vk_bundle *vk,
|
|
|
|
VkSemaphore *out_sem,
|
|
|
|
xrt_graphics_sync_handle_t *out_native);
|
|
|
|
#endif
|
|
|
|
|
2022-02-15 14:16:58 +00:00
|
|
|
/*!
|
|
|
|
* @brief Creates a Vulkan semaphore from a native graphics sync handle.
|
|
|
|
*
|
|
|
|
* In case of error, ownership is never transferred and the caller should close the handle themselves.
|
|
|
|
*
|
|
|
|
* In case of success, the underlying Vulkan functionality's ownership semantics apply: ownership of the @p native
|
|
|
|
* handle may have transferred, a reference may have been added, or the Vulkan object may rely on the caller to keep the
|
|
|
|
* native handle alive until the Vulkan object is destroyed. Which option applies depends on the particular native
|
|
|
|
* handle type used.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-03-11 20:41:26 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2022-02-15 14:16:58 +00:00
|
|
|
vk_create_semaphore_from_native(struct vk_bundle *vk, xrt_graphics_sync_handle_t native, VkSemaphore *out_sem);
|
|
|
|
|
2022-05-17 18:53:03 +00:00
|
|
|
#if defined(VK_KHR_timeline_semaphore) || defined(XRT_DOXYGEN)
|
2022-03-09 14:35:32 +00:00
|
|
|
/*!
|
|
|
|
* @brief Creates a Vulkan timeline semaphore from a native graphics sync
|
|
|
|
* handle, see @ref vk_create_semaphore_from_native for more details.
|
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
2022-03-11 20:41:26 +00:00
|
|
|
XRT_CHECK_RESULT VkResult
|
2022-03-09 14:35:32 +00:00
|
|
|
vk_create_timeline_semaphore_from_native(struct vk_bundle *vk, xrt_graphics_sync_handle_t native, VkSemaphore *out_sem);
|
|
|
|
#endif
|
|
|
|
|
2022-02-15 14:16:58 +00:00
|
|
|
|
2022-04-24 16:16:14 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Time function(s), in the vk_time.c file.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2022-05-17 18:53:03 +00:00
|
|
|
#if defined(VK_EXT_calibrated_timestamps) || defined(XRT_DOXYGEN)
|
2022-04-24 16:16:14 +00:00
|
|
|
/*!
|
|
|
|
* Convert timestamps in GPU ticks (as return by VkQueryPool timestamp queries)
|
|
|
|
* into host CPU nanoseconds, same time domain as @ref os_monotonic_get_ns.
|
|
|
|
*
|
|
|
|
* Note the timestamp needs to be in the past and not to old, this is because
|
|
|
|
* not all GPU has full 64 bit timer resolution. For instance a Intel GPU "only"
|
|
|
|
* have 36 bits of valid timestamp and a tick period 83.3333 nanosecond,
|
2022-09-12 04:32:28 +00:00
|
|
|
* equating to an epoch of 5726 seconds before overflowing. The function can
|
2022-04-24 16:16:14 +00:00
|
|
|
* handle overflows happening between the given timestamps and when it is called
|
|
|
|
* but only for one such epoch overflow, any more will only be treated as one
|
|
|
|
* such overflow. So timestamps needs to be converted resonably soon after they
|
|
|
|
* have been captured.
|
|
|
|
*
|
|
|
|
* @param vk The Vulkan bundle.
|
|
|
|
* @param count Number of timestamps to be converted.
|
2022-05-17 18:53:03 +00:00
|
|
|
* @param[in,out] in_out_timestamps Array of timestamps to be converted, done in place.
|
2022-04-24 16:16:14 +00:00
|
|
|
*
|
|
|
|
* @ingroup aux_vk
|
|
|
|
*/
|
|
|
|
XRT_CHECK_RESULT VkResult
|
|
|
|
vk_convert_timestamps_to_host_ns(struct vk_bundle *vk, uint32_t count, uint64_t *in_out_timestamps);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
#ifdef __cplusplus
|
|
|
|
}
|
|
|
|
#endif
|