2019-03-18 05:52:32 +00:00
|
|
|
// Copyright 2019, Collabora, Ltd.
|
|
|
|
// SPDX-License-Identifier: BSL-1.0
|
|
|
|
/*!
|
|
|
|
* @file
|
|
|
|
* @brief Main compositor written using Vulkan implementation.
|
|
|
|
* @author Jakob Bornecrantz <jakob@collabora.com>
|
|
|
|
* @author Lubosz Sarnecki <lubosz.sarnecki@collabora.com>
|
2019-10-22 18:41:55 +00:00
|
|
|
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
|
2019-03-18 05:52:32 +00:00
|
|
|
* @ingroup comp
|
2019-10-22 18:41:55 +00:00
|
|
|
*
|
|
|
|
*
|
|
|
|
* begin_frame and end_frame delimit the application's work on graphics for a
|
|
|
|
* single frame. end_frame updates our estimate of the current estimated app
|
|
|
|
* graphics duration, as well as the "swap interval" for scheduling the
|
|
|
|
* application.
|
|
|
|
*
|
|
|
|
* We have some known overhead work required to composite a frame: eventually
|
|
|
|
* this may be measured as well. Overhead plus the estimated app render duration
|
|
|
|
* is compared to the frame duration: if it's longer, then we go to a "swap
|
|
|
|
* interval" of 2.
|
|
|
|
*
|
|
|
|
* wait_frame must be the one to produce the next predicted display time,
|
|
|
|
* because we cannot distinguish two sequential wait_frame calls (an app
|
|
|
|
* skipping a frame) from an OS scheduling blip causing the second wait_frame to
|
|
|
|
* happen before the first begin_frame actually gets executed. It cannot use the
|
|
|
|
* last display time in this computation for this reason. (Except perhaps to
|
|
|
|
* align the period at a sub-frame level? e.g. should be a multiple of the frame
|
|
|
|
* duration after the last displayed time).
|
|
|
|
*
|
|
|
|
* wait_frame should not actually produce the predicted display time until it's
|
|
|
|
* done waiting: it should wake up once a frame and see what the current swap
|
|
|
|
* interval suggests: this handles the case where end_frame changes the swap
|
|
|
|
* interval from 2 to 1 during a wait_frame call. (That is, we should wait until
|
|
|
|
* whichever is closer of the next vsync or the time we currently predict we
|
|
|
|
* should release the app.)
|
|
|
|
*
|
|
|
|
* Sleeping can be a bit hairy: in general right now we'll use a combination of
|
|
|
|
* operating system sleeps and busy-waits (for fine-grained waiting). Some
|
|
|
|
* platforms provide vsync-related sync primitives that may get us closer to our
|
|
|
|
* desired time. This is also convenient for the "wait until next frame"
|
|
|
|
* behavior.
|
2019-03-18 05:52:32 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdarg.h>
|
2019-03-31 21:37:34 +00:00
|
|
|
#include <string.h>
|
2019-12-03 19:06:42 +00:00
|
|
|
#include <assert.h>
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2019-09-24 13:43:21 +00:00
|
|
|
#include "os/os_time.h"
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
#include "util/u_debug.h"
|
2019-03-21 20:19:52 +00:00
|
|
|
#include "util/u_misc.h"
|
2019-03-18 03:28:09 +00:00
|
|
|
#include "util/u_time.h"
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
#include "main/comp_compositor.h"
|
|
|
|
|
2019-08-14 23:25:53 +00:00
|
|
|
#include <unistd.h>
|
2019-10-22 18:41:55 +00:00
|
|
|
#include <math.h>
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2019-10-22 18:41:55 +00:00
|
|
|
/*!
|
|
|
|
*/
|
2019-03-18 05:52:32 +00:00
|
|
|
static void
|
|
|
|
compositor_destroy(struct xrt_compositor *xc)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
struct vk_bundle *vk = &c->vk;
|
|
|
|
|
|
|
|
COMP_DEBUG(c, "DESTROY");
|
|
|
|
|
|
|
|
if (c->r) {
|
|
|
|
comp_renderer_destroy(c->r);
|
|
|
|
c->r = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (c->window != NULL) {
|
2019-03-19 15:08:08 +00:00
|
|
|
vk_swapchain_cleanup(&c->window->swapchain);
|
2019-03-18 05:52:32 +00:00
|
|
|
c->window->destroy(c->window);
|
|
|
|
c->window = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (vk->cmd_pool != VK_NULL_HANDLE) {
|
|
|
|
vk->vkDestroyCommandPool(vk->device, vk->cmd_pool, NULL);
|
|
|
|
vk->cmd_pool = VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (vk->device != VK_NULL_HANDLE) {
|
|
|
|
vk->vkDestroyDevice(vk->device, NULL);
|
|
|
|
vk->device = VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
|
|
|
|
vk_destroy_validation_callback(vk);
|
|
|
|
|
|
|
|
if (vk->instance != VK_NULL_HANDLE) {
|
|
|
|
vk->vkDestroyInstance(vk->instance, NULL);
|
|
|
|
vk->instance = VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
|
|
|
|
free(c);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
compositor_begin_session(struct xrt_compositor *xc, enum xrt_view_type type)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
COMP_DEBUG(c, "BEGIN_SESSION");
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
compositor_end_session(struct xrt_compositor *xc)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
COMP_DEBUG(c, "END_SESSION");
|
|
|
|
}
|
|
|
|
|
2019-10-22 18:41:55 +00:00
|
|
|
/*!
|
|
|
|
* @brief Utility for waiting (for rendering purposes) until the next vsync or a
|
|
|
|
* specified time point, whichever comes first.
|
|
|
|
*
|
|
|
|
* Only for rendering - this will busy-wait if needed.
|
|
|
|
*
|
|
|
|
* @return true if we waited until the time indicated
|
|
|
|
*
|
|
|
|
* @todo In the future, this may differ between platforms since some have ways
|
|
|
|
* to directly wait on a vsync.
|
|
|
|
*/
|
|
|
|
static bool
|
|
|
|
compositor_wait_vsync_or_time(struct comp_compositor *c, int64_t wake_up_time)
|
|
|
|
{
|
|
|
|
|
|
|
|
int64_t now_ns = time_state_get_now(c->timekeeping);
|
|
|
|
/*!
|
|
|
|
* @todo this is not accurate, but it serves the purpose of not letting
|
|
|
|
* us sleep longer than the next vsync usually
|
|
|
|
*/
|
|
|
|
int64_t next_vsync = now_ns + c->settings.nominal_frame_interval_ns / 2;
|
|
|
|
|
|
|
|
bool ret = true;
|
|
|
|
// Sleep until the sooner of vsync or our deadline.
|
|
|
|
if (next_vsync < wake_up_time) {
|
|
|
|
ret = false;
|
|
|
|
wake_up_time = next_vsync;
|
|
|
|
}
|
|
|
|
int64_t wait_duration = wake_up_time - now_ns;
|
|
|
|
if (wait_duration <= 0) {
|
|
|
|
// Don't wait at all
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (wait_duration > 1000000) {
|
|
|
|
os_nanosleep(wait_duration - (wait_duration % 1000000));
|
|
|
|
}
|
|
|
|
// Busy-wait for fine-grained delays.
|
|
|
|
while (now_ns < wake_up_time) {
|
|
|
|
now_ns = time_state_get_now(c->timekeeping);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
2019-03-18 05:52:32 +00:00
|
|
|
static void
|
|
|
|
compositor_wait_frame(struct xrt_compositor *xc,
|
|
|
|
int64_t *predicted_display_time,
|
|
|
|
int64_t *predicted_display_period)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
COMP_SPEW(c, "WAIT_FRAME");
|
2019-08-19 17:18:50 +00:00
|
|
|
|
2019-08-31 12:24:40 +00:00
|
|
|
// A little bit easier to read.
|
|
|
|
int64_t interval_ns = (int64_t)c->settings.nominal_frame_interval_ns;
|
|
|
|
|
|
|
|
int64_t now_ns = time_state_get_now(c->timekeeping);
|
2019-10-22 18:41:55 +00:00
|
|
|
if (c->last_next_display_time == 0) {
|
|
|
|
// First frame, we'll just assume we will display immediately
|
2019-08-19 17:18:50 +00:00
|
|
|
|
2019-10-22 18:41:55 +00:00
|
|
|
*predicted_display_period = interval_ns;
|
|
|
|
c->last_next_display_time = now_ns + interval_ns;
|
|
|
|
*predicted_display_time = c->last_next_display_time;
|
|
|
|
return;
|
2019-08-31 12:24:40 +00:00
|
|
|
}
|
2019-08-19 17:18:50 +00:00
|
|
|
|
2019-10-22 18:41:55 +00:00
|
|
|
// First estimate of next display time.
|
|
|
|
while (1) {
|
|
|
|
|
|
|
|
int64_t render_time_ns =
|
|
|
|
c->expected_app_duration_ns + c->frame_overhead_ns;
|
|
|
|
int64_t swap_interval =
|
|
|
|
ceil((float)render_time_ns / interval_ns);
|
|
|
|
int64_t render_interval_ns = swap_interval * interval_ns;
|
|
|
|
int64_t next_display_time =
|
|
|
|
c->last_next_display_time + render_interval_ns;
|
|
|
|
/*!
|
|
|
|
* @todo adjust next_display_time to be a multiple of
|
|
|
|
* interval_ns from c->last_frame_time_ns
|
|
|
|
*/
|
|
|
|
|
|
|
|
while ((next_display_time - render_time_ns) < now_ns) {
|
|
|
|
// we can't unblock in the past
|
|
|
|
next_display_time += render_interval_ns;
|
|
|
|
}
|
|
|
|
if (compositor_wait_vsync_or_time(
|
|
|
|
c, (next_display_time - render_time_ns))) {
|
|
|
|
// True return val means we actually waited for the
|
|
|
|
// deadline.
|
|
|
|
*predicted_display_period =
|
|
|
|
next_display_time - c->last_next_display_time;
|
|
|
|
*predicted_display_time = next_display_time;
|
|
|
|
|
|
|
|
c->last_next_display_time = next_display_time;
|
|
|
|
return;
|
|
|
|
}
|
2019-08-31 12:24:40 +00:00
|
|
|
}
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
compositor_begin_frame(struct xrt_compositor *xc)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
COMP_SPEW(c, "BEGIN_FRAME");
|
2019-10-22 18:41:55 +00:00
|
|
|
c->app_profiling.last_begin = time_state_get_now(c->timekeeping);
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
compositor_discard_frame(struct xrt_compositor *xc)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
COMP_SPEW(c, "DISCARD_FRAME");
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
compositor_end_frame(struct xrt_compositor *xc,
|
|
|
|
enum xrt_blend_mode blend_mode,
|
|
|
|
struct xrt_swapchain **xscs,
|
2019-08-16 21:59:06 +00:00
|
|
|
const uint32_t *image_index,
|
2019-05-01 15:17:17 +00:00
|
|
|
uint32_t *layers,
|
2019-03-18 05:52:32 +00:00
|
|
|
uint32_t num_swapchains)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = comp_compositor(xc);
|
|
|
|
COMP_SPEW(c, "END_FRAME");
|
|
|
|
|
|
|
|
struct comp_swapchain_image *right;
|
|
|
|
struct comp_swapchain_image *left;
|
|
|
|
|
|
|
|
// Stereo!
|
|
|
|
if (num_swapchains == 2) {
|
2019-04-30 15:33:56 +00:00
|
|
|
left = &comp_swapchain(xscs[0])->images[image_index[0]];
|
|
|
|
right = &comp_swapchain(xscs[1])->images[image_index[1]];
|
2019-05-01 15:17:17 +00:00
|
|
|
comp_renderer_frame(c->r, left, layers[0], right, layers[1]);
|
2019-03-18 05:52:32 +00:00
|
|
|
} else {
|
|
|
|
COMP_ERROR(c, "non-stereo rendering not supported");
|
|
|
|
}
|
2019-03-18 03:28:09 +00:00
|
|
|
|
|
|
|
// Record the time of this frame.
|
|
|
|
c->last_frame_time_ns = time_state_get_now(c->timekeeping);
|
2019-10-22 18:41:55 +00:00
|
|
|
c->app_profiling.last_end = c->last_frame_time_ns;
|
|
|
|
|
|
|
|
//! @todo do a time-weighted average or something.
|
|
|
|
c->expected_app_duration_ns =
|
|
|
|
c->app_profiling.last_end - c->app_profiling.last_begin;
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Vulkan functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
#define GET_DEV_PROC(c, name) \
|
|
|
|
(PFN_##name) c->vk.vkGetDeviceProcAddr(c->vk.device, #name);
|
|
|
|
#define GET_INS_PROC(c, name) \
|
|
|
|
(PFN_##name) c->vk.vkGetInstanceProcAddr(c->vk.instance, #name);
|
|
|
|
#define GET_DEV_PROC(c, name) \
|
|
|
|
(PFN_##name) c->vk.vkGetDeviceProcAddr(c->vk.device, #name);
|
|
|
|
|
|
|
|
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
|
|
|
|
vkGetInstanceProcAddr(VkInstance instance, const char *pName);
|
|
|
|
|
|
|
|
static VkResult
|
|
|
|
find_get_instance_proc_addr(struct comp_compositor *c)
|
|
|
|
{
|
|
|
|
//! @todo Do any library loading here.
|
2019-04-03 00:27:05 +00:00
|
|
|
return vk_get_loader_functions(&c->vk, vkGetInstanceProcAddr);
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef XRT_ENABLE_VK_VALIDATION
|
|
|
|
#define COMPOSITOR_DEBUG_VULKAN_EXTENSIONS VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
|
|
|
|
#else
|
|
|
|
#define COMPOSITOR_DEBUG_VULKAN_EXTENSIONS
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#define COMPOSITOR_COMMON_VULKAN_EXTENSIONS \
|
|
|
|
COMPOSITOR_DEBUG_VULKAN_EXTENSIONS \
|
|
|
|
VK_KHR_SURFACE_EXTENSION_NAME, \
|
|
|
|
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, \
|
|
|
|
VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, \
|
|
|
|
VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, \
|
|
|
|
VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME
|
|
|
|
|
|
|
|
static const char *instance_extensions_none[] = {
|
|
|
|
COMPOSITOR_COMMON_VULKAN_EXTENSIONS};
|
|
|
|
|
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
static const char *instance_extensions_xcb[] = {
|
|
|
|
COMPOSITOR_COMMON_VULKAN_EXTENSIONS,
|
|
|
|
VK_KHR_XCB_SURFACE_EXTENSION_NAME,
|
|
|
|
};
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
|
|
|
|
static const char *instance_extensions_wayland[] = {
|
|
|
|
COMPOSITOR_COMMON_VULKAN_EXTENSIONS,
|
|
|
|
VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME,
|
|
|
|
};
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
|
|
|
static const char *instance_extensions_direct_mode[] = {
|
|
|
|
COMPOSITOR_COMMON_VULKAN_EXTENSIONS,
|
|
|
|
VK_KHR_DISPLAY_EXTENSION_NAME,
|
|
|
|
VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME,
|
|
|
|
VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME,
|
|
|
|
};
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static VkResult
|
|
|
|
select_instances_extensions(struct comp_compositor *c,
|
|
|
|
const char ***out_exts,
|
|
|
|
uint32_t *out_num)
|
|
|
|
{
|
|
|
|
switch (c->settings.window_type) {
|
|
|
|
case WINDOW_NONE:
|
|
|
|
*out_exts = instance_extensions_none;
|
|
|
|
*out_num = ARRAY_SIZE(instance_extensions_none);
|
|
|
|
break;
|
|
|
|
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
|
|
|
|
case WINDOW_WAYLAND:
|
|
|
|
*out_exts = instance_extensions_wayland;
|
|
|
|
*out_num = ARRAY_SIZE(instance_extensions_wayland);
|
|
|
|
break;
|
|
|
|
#endif
|
2019-09-24 13:40:46 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
case WINDOW_XCB:
|
|
|
|
*out_exts = instance_extensions_xcb;
|
|
|
|
*out_num = ARRAY_SIZE(instance_extensions_xcb);
|
|
|
|
break;
|
|
|
|
#endif
|
2019-03-18 05:52:32 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
2019-03-31 21:37:34 +00:00
|
|
|
case WINDOW_DIRECT_RANDR:
|
|
|
|
case WINDOW_DIRECT_NVIDIA:
|
2019-03-18 05:52:32 +00:00
|
|
|
*out_exts = instance_extensions_direct_mode;
|
|
|
|
*out_num = ARRAY_SIZE(instance_extensions_direct_mode);
|
|
|
|
break;
|
|
|
|
#endif
|
|
|
|
default: return VK_ERROR_INITIALIZATION_FAILED;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkResult
|
|
|
|
create_instance(struct comp_compositor *c)
|
|
|
|
{
|
|
|
|
const char **instance_extensions;
|
|
|
|
uint32_t num_extensions;
|
|
|
|
VkResult ret;
|
|
|
|
|
|
|
|
VkApplicationInfo app_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
|
2019-08-16 15:44:53 +00:00
|
|
|
.pNext = NULL,
|
2019-03-18 05:52:32 +00:00
|
|
|
.pApplicationName = "Collabora Compositor",
|
2019-08-16 15:44:53 +00:00
|
|
|
.applicationVersion = 0,
|
2019-03-18 05:52:32 +00:00
|
|
|
.pEngineName = "Monado",
|
2019-08-16 15:44:53 +00:00
|
|
|
.engineVersion = 0,
|
2019-03-18 05:52:32 +00:00
|
|
|
.apiVersion = VK_MAKE_VERSION(1, 0, 2),
|
|
|
|
};
|
|
|
|
|
|
|
|
ret = select_instances_extensions(c, &instance_extensions,
|
|
|
|
&num_extensions);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "Failed to select instance extensions: %s",
|
|
|
|
vk_result_string(ret));
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
VkInstanceCreateInfo instance_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
|
2019-08-16 15:44:53 +00:00
|
|
|
.pNext = NULL,
|
|
|
|
.flags = 0,
|
2019-03-18 05:52:32 +00:00
|
|
|
.pApplicationInfo = &app_info,
|
2019-08-16 15:44:53 +00:00
|
|
|
.enabledLayerCount = 0,
|
|
|
|
.ppEnabledLayerNames = NULL,
|
2019-03-18 05:52:32 +00:00
|
|
|
.enabledExtensionCount = num_extensions,
|
|
|
|
.ppEnabledExtensionNames = instance_extensions,
|
2019-03-26 10:07:11 +00:00
|
|
|
};
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
#ifdef XRT_ENABLE_VK_VALIDATION
|
2019-03-26 10:07:11 +00:00
|
|
|
const char *instance_layers[] = {
|
|
|
|
"VK_LAYER_LUNARG_standard_validation",
|
2019-03-18 05:52:32 +00:00
|
|
|
};
|
|
|
|
|
2019-03-26 10:07:11 +00:00
|
|
|
if (c->settings.validate_vulkan) {
|
|
|
|
instance_info.enabledLayerCount = ARRAY_SIZE(instance_layers);
|
|
|
|
instance_info.ppEnabledLayerNames = instance_layers;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
ret = c->vk.vkCreateInstance(&instance_info, NULL, &c->vk.instance);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "vkCreateInstance: %s\n", vk_result_string(ret));
|
|
|
|
COMP_ERROR(c, "Failed to create Vulkan instance");
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = vk_get_instance_functions(&c->vk);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "Failed to get Vulkan instance functions: %s",
|
|
|
|
vk_result_string(ret));
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef XRT_ENABLE_VK_VALIDATION
|
2019-03-26 10:07:11 +00:00
|
|
|
if (c->settings.validate_vulkan)
|
|
|
|
vk_init_validation_callback(&c->vk);
|
2019-03-18 05:52:32 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
|
|
|
compositor_init_vulkan(struct comp_compositor *c)
|
|
|
|
{
|
2019-03-31 21:37:34 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
VkResult ret;
|
|
|
|
|
|
|
|
ret = find_get_instance_proc_addr(c);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = create_instance(c);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-10-25 19:05:06 +00:00
|
|
|
ret = vk_create_device(&c->vk, c->settings.gpu_index);
|
2019-03-18 05:52:32 +00:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = vk_init_cmd_pool(&c->vk);
|
2019-08-16 21:59:06 +00:00
|
|
|
return ret == VK_SUCCESS;
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Other functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
void
|
|
|
|
comp_compositor_print(struct comp_compositor *c,
|
|
|
|
const char *func,
|
|
|
|
const char *fmt,
|
|
|
|
...)
|
|
|
|
{
|
|
|
|
fprintf(stderr, "%s - ", func);
|
|
|
|
|
|
|
|
va_list args;
|
|
|
|
va_start(args, fmt);
|
|
|
|
vfprintf(stderr, fmt, args);
|
|
|
|
va_end(args);
|
|
|
|
|
|
|
|
fprintf(stderr, "\n");
|
|
|
|
}
|
|
|
|
|
2019-03-31 21:37:34 +00:00
|
|
|
static bool
|
|
|
|
compositor_check_vulkan_caps(struct comp_compositor *c)
|
|
|
|
{
|
|
|
|
VkResult ret;
|
|
|
|
|
2019-10-22 18:41:55 +00:00
|
|
|
// this is duplicative, but seems to be the easiest way to
|
|
|
|
// 'pre-check' capabilities when window creation precedes vulkan
|
|
|
|
// instance creation. we also need to load the VK_KHR_DISPLAY
|
|
|
|
// extension.
|
2019-03-31 21:37:34 +00:00
|
|
|
|
|
|
|
if (c->settings.window_type != WINDOW_AUTO) {
|
|
|
|
COMP_DEBUG(c, "Skipping NVIDIA detection, window type forced.");
|
|
|
|
return true;
|
|
|
|
}
|
2019-08-16 21:59:06 +00:00
|
|
|
COMP_DEBUG(c, "Checking for NVIDIA vulkan driver.");
|
2019-03-31 21:37:34 +00:00
|
|
|
|
2019-06-11 14:25:25 +00:00
|
|
|
struct vk_bundle temp_vk = {0};
|
2019-03-31 21:37:34 +00:00
|
|
|
ret = vk_get_loader_functions(&temp_vk, vkGetInstanceProcAddr);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const char *extension_names[] = {
|
|
|
|
VK_KHR_SURFACE_EXTENSION_NAME,
|
|
|
|
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
|
|
|
|
VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
|
|
|
|
VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
|
|
|
|
VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
|
|
|
|
VK_KHR_DISPLAY_EXTENSION_NAME,
|
|
|
|
};
|
|
|
|
|
|
|
|
VkInstanceCreateInfo instance_create_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
|
|
|
|
.pNext = NULL,
|
2019-08-16 15:44:53 +00:00
|
|
|
.flags = 0,
|
|
|
|
.pApplicationInfo = NULL,
|
|
|
|
.enabledLayerCount = 0,
|
|
|
|
.ppEnabledLayerNames = NULL,
|
2019-03-31 21:37:34 +00:00
|
|
|
.enabledExtensionCount = ARRAY_SIZE(extension_names),
|
|
|
|
.ppEnabledExtensionNames = extension_names,
|
|
|
|
};
|
|
|
|
|
|
|
|
ret = temp_vk.vkCreateInstance(&instance_create_info, NULL,
|
|
|
|
&(temp_vk.instance));
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "Failed to create VkInstance: %s",
|
|
|
|
vk_result_string(ret));
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = vk_get_instance_functions(&temp_vk);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "Failed to get Vulkan instance functions: %s",
|
|
|
|
vk_result_string(ret));
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// follow same device selection logic as subsequent calls
|
2019-10-25 19:05:06 +00:00
|
|
|
ret = vk_create_device(&temp_vk, c->settings.gpu_index);
|
2019-03-31 21:37:34 +00:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "Failed to create VkDevice: %s",
|
|
|
|
vk_result_string(ret));
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool nvidia_tests_passed = false;
|
|
|
|
|
2019-12-03 19:06:42 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
2019-03-31 21:37:34 +00:00
|
|
|
VkPhysicalDeviceProperties physical_device_properties;
|
|
|
|
temp_vk.vkGetPhysicalDeviceProperties(temp_vk.physical_device,
|
|
|
|
&physical_device_properties);
|
|
|
|
|
|
|
|
if (physical_device_properties.vendorID == 0x10DE) {
|
2019-10-22 18:41:55 +00:00
|
|
|
// our physical device is an nvidia card, we can
|
|
|
|
// potentially select nvidia-specific direct mode.
|
2019-03-31 21:37:34 +00:00
|
|
|
|
2019-10-22 18:41:55 +00:00
|
|
|
// we need to also check if we are confident that we can
|
|
|
|
// create a direct mode display, if not we need to
|
|
|
|
// abandon the attempt here, and allow desktop-window
|
|
|
|
// fallback to occur.
|
2019-03-31 21:37:34 +00:00
|
|
|
|
|
|
|
// get a list of attached displays
|
|
|
|
uint32_t display_count;
|
|
|
|
|
|
|
|
if (temp_vk.vkGetPhysicalDeviceDisplayPropertiesKHR(
|
|
|
|
temp_vk.physical_device, &display_count, NULL) !=
|
|
|
|
VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "Failed to get vulkan display count");
|
|
|
|
nvidia_tests_passed = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
VkDisplayPropertiesKHR *display_props =
|
|
|
|
U_TYPED_ARRAY_CALLOC(VkDisplayPropertiesKHR, display_count);
|
|
|
|
|
|
|
|
if (display_props &&
|
|
|
|
temp_vk.vkGetPhysicalDeviceDisplayPropertiesKHR(
|
|
|
|
temp_vk.physical_device, &display_count,
|
|
|
|
display_props) != VK_SUCCESS) {
|
|
|
|
COMP_ERROR(c, "Failed to get display properties");
|
|
|
|
nvidia_tests_passed = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < display_count; i++) {
|
|
|
|
VkDisplayPropertiesKHR disp = *(display_props + i);
|
|
|
|
// check this display against our whitelist
|
|
|
|
uint32_t wl_elements = sizeof(NV_DIRECT_WHITELIST) /
|
|
|
|
sizeof(NV_DIRECT_WHITELIST[0]);
|
|
|
|
for (uint32_t j = 0; j < wl_elements; j++) {
|
|
|
|
unsigned long wl_entry_length =
|
|
|
|
strlen(NV_DIRECT_WHITELIST[j]);
|
|
|
|
unsigned long disp_entry_length =
|
|
|
|
strlen(disp.displayName);
|
|
|
|
if (disp_entry_length >= wl_entry_length) {
|
|
|
|
if (strncmp(NV_DIRECT_WHITELIST[j],
|
|
|
|
disp.displayName,
|
|
|
|
wl_entry_length) == 0) {
|
2019-10-22 18:41:55 +00:00
|
|
|
// we have a match with
|
|
|
|
// this whitelist entry.
|
2019-03-31 21:37:34 +00:00
|
|
|
nvidia_tests_passed = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
free(display_props);
|
|
|
|
}
|
2019-12-03 19:06:42 +00:00
|
|
|
#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
2019-03-31 21:37:34 +00:00
|
|
|
|
|
|
|
if (nvidia_tests_passed) {
|
|
|
|
c->settings.window_type = WINDOW_DIRECT_NVIDIA;
|
|
|
|
COMP_DEBUG(c, "Selecting direct NVIDIA window type!");
|
|
|
|
} else {
|
|
|
|
COMP_DEBUG(c, "Keeping auto window type!");
|
|
|
|
}
|
|
|
|
|
|
|
|
temp_vk.vkDestroyDevice(temp_vk.device, NULL);
|
|
|
|
temp_vk.vkDestroyInstance(temp_vk.instance, NULL);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
static bool
|
|
|
|
compositor_try_window(struct comp_compositor *c, struct comp_window *window)
|
|
|
|
{
|
|
|
|
if (window == NULL) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!window->init(window)) {
|
|
|
|
window->destroy(window);
|
|
|
|
return false;
|
|
|
|
}
|
2019-08-16 21:59:06 +00:00
|
|
|
COMP_DEBUG(c, "Window backend %s initialized!", window->name);
|
|
|
|
c->window = window;
|
|
|
|
return true;
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
2019-03-31 21:37:34 +00:00
|
|
|
compositor_init_window_pre_vulkan(struct comp_compositor *c)
|
2019-03-18 05:52:32 +00:00
|
|
|
{
|
|
|
|
// Setup the initial width from the settings.
|
|
|
|
c->current.width = c->settings.width;
|
|
|
|
c->current.height = c->settings.height;
|
|
|
|
|
2019-03-31 21:37:34 +00:00
|
|
|
// Nothing to do for nvidia.
|
|
|
|
if (c->settings.window_type == WINDOW_DIRECT_NVIDIA) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
switch (c->settings.window_type) {
|
|
|
|
case WINDOW_AUTO:
|
2019-09-19 20:27:56 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
|
|
|
|
if (compositor_try_window(c, comp_window_wayland_create(c))) {
|
|
|
|
c->settings.window_type = WINDOW_WAYLAND;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
#endif
|
2019-03-18 05:52:32 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
|
|
|
if (compositor_try_window(c, comp_window_direct_create(c))) {
|
2019-03-31 21:37:34 +00:00
|
|
|
c->settings.window_type = WINDOW_DIRECT_RANDR;
|
2019-03-18 05:52:32 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
if (compositor_try_window(c, comp_window_xcb_create(c))) {
|
|
|
|
c->settings.window_type = WINDOW_XCB;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
COMP_ERROR(c, "Failed to auto detect window support!");
|
|
|
|
break;
|
|
|
|
case WINDOW_XCB:
|
|
|
|
#ifdef VK_USE_PLATFORM_XCB_KHR
|
|
|
|
compositor_try_window(c, comp_window_xcb_create(c));
|
|
|
|
#else
|
|
|
|
COMP_ERROR(c, "XCB support not compiled in!");
|
|
|
|
#endif
|
|
|
|
break;
|
|
|
|
case WINDOW_WAYLAND:
|
|
|
|
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
|
|
|
|
compositor_try_window(c, comp_window_wayland_create(c));
|
|
|
|
#else
|
|
|
|
COMP_ERROR(c, "Wayland support not compiled in!");
|
|
|
|
#endif
|
|
|
|
break;
|
2019-03-31 21:37:34 +00:00
|
|
|
case WINDOW_DIRECT_RANDR:
|
2019-03-18 05:52:32 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
|
|
|
compositor_try_window(c, comp_window_direct_create(c));
|
|
|
|
#else
|
|
|
|
COMP_ERROR(c, "Direct mode support not compiled in!");
|
|
|
|
#endif
|
|
|
|
break;
|
|
|
|
default: COMP_ERROR(c, "Unknown window type!"); break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Failed to create?
|
2019-08-16 21:59:06 +00:00
|
|
|
return c->window != NULL;
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
2019-03-31 21:37:34 +00:00
|
|
|
static bool
|
|
|
|
compositor_init_window_post_vulkan(struct comp_compositor *c)
|
|
|
|
{
|
|
|
|
if (c->settings.window_type != WINDOW_DIRECT_NVIDIA) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-12-03 19:06:42 +00:00
|
|
|
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
|
2019-03-31 21:37:34 +00:00
|
|
|
return compositor_try_window(c, comp_window_direct_create(c));
|
2019-12-03 19:06:42 +00:00
|
|
|
#else
|
|
|
|
assert(false &&
|
|
|
|
"NVIDIA direct mode depends on the xlib/xrandr direct mode.");
|
|
|
|
return false;
|
|
|
|
#endif
|
2019-03-31 21:37:34 +00:00
|
|
|
}
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
static void
|
|
|
|
_sc_dimension_cb(uint32_t width, uint32_t height, void *ptr)
|
|
|
|
{
|
|
|
|
struct comp_compositor *c = (struct comp_compositor *)ptr;
|
|
|
|
|
|
|
|
COMP_DEBUG(c, "_sc_dimension_cb %dx%d", width, height);
|
|
|
|
|
|
|
|
c->current.width = width;
|
|
|
|
c->current.height = height;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
|
|
|
compositor_init_swapchain(struct comp_compositor *c)
|
|
|
|
{
|
2019-03-31 21:37:34 +00:00
|
|
|
//! @todo Make c->window->init_swachain call vk_swapchain_init
|
|
|
|
//! and give
|
|
|
|
//! _sc_dimension_cb to window or just have it call a
|
|
|
|
//! function?
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
vk_swapchain_init(&c->window->swapchain, &c->vk, _sc_dimension_cb,
|
|
|
|
(void *)c);
|
|
|
|
if (!c->window->init_swapchain(c->window, c->current.width,
|
|
|
|
c->current.height)) {
|
|
|
|
COMP_ERROR(c, "Window init_swapchain failed!");
|
|
|
|
goto err_destroy;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
|
|
// Error path.
|
|
|
|
err_destroy:
|
|
|
|
c->window->destroy(c->window);
|
|
|
|
c->window = NULL;
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
|
|
|
compositor_init_renderer(struct comp_compositor *c)
|
|
|
|
{
|
|
|
|
c->r = comp_renderer_create(c);
|
2019-08-16 21:59:06 +00:00
|
|
|
return c->r != NULL;
|
2019-03-18 05:52:32 +00:00
|
|
|
}
|
|
|
|
|
2019-03-31 21:37:34 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
struct xrt_compositor_fd *
|
2020-03-01 12:09:25 +00:00
|
|
|
xrt_gfx_provider_create_fd(struct xrt_device *xdev,
|
|
|
|
struct time_state *timekeeping,
|
|
|
|
bool flip_y)
|
2019-03-18 05:52:32 +00:00
|
|
|
{
|
2019-03-21 20:19:52 +00:00
|
|
|
struct comp_compositor *c = U_TYPED_CALLOC(struct comp_compositor);
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
c->base.base.create_swapchain = comp_swapchain_create;
|
|
|
|
c->base.base.begin_session = compositor_begin_session;
|
|
|
|
c->base.base.end_session = compositor_end_session;
|
|
|
|
c->base.base.wait_frame = compositor_wait_frame;
|
|
|
|
c->base.base.begin_frame = compositor_begin_frame;
|
|
|
|
c->base.base.discard_frame = compositor_discard_frame;
|
|
|
|
c->base.base.end_frame = compositor_end_frame;
|
|
|
|
c->base.base.destroy = compositor_destroy;
|
|
|
|
c->xdev = xdev;
|
2019-03-18 03:20:20 +00:00
|
|
|
c->timekeeping = timekeeping;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
|
|
|
COMP_DEBUG(c, "Doing init %p", (void *)c);
|
|
|
|
|
|
|
|
// Init the settings to default.
|
|
|
|
comp_settings_init(&c->settings, xdev);
|
|
|
|
|
|
|
|
c->settings.flip_y = flip_y;
|
2019-03-18 03:28:09 +00:00
|
|
|
c->last_frame_time_ns = time_state_get_now(c->timekeeping);
|
2019-10-22 18:41:55 +00:00
|
|
|
c->frame_overhead_ns = 2000000;
|
|
|
|
//! @todo set this to an estimate that's better than 6ms
|
|
|
|
c->expected_app_duration_ns = 6000000;
|
2019-03-18 05:52:32 +00:00
|
|
|
|
2019-03-31 21:37:34 +00:00
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
// Need to select window backend before creating Vulkan, then
|
2019-03-31 21:37:34 +00:00
|
|
|
// swapchain will initialize the window fully and the swapchain,
|
|
|
|
// and finally the renderer is created which renderers to
|
|
|
|
// window/swapchain.
|
|
|
|
|
2019-03-18 05:52:32 +00:00
|
|
|
// clang-format off
|
2019-03-31 21:37:34 +00:00
|
|
|
if (!compositor_check_vulkan_caps(c) ||
|
|
|
|
!compositor_init_window_pre_vulkan(c) ||
|
2019-03-18 05:52:32 +00:00
|
|
|
!compositor_init_vulkan(c) ||
|
2019-03-31 21:37:34 +00:00
|
|
|
!compositor_init_window_post_vulkan(c) ||
|
2019-03-18 05:52:32 +00:00
|
|
|
!compositor_init_swapchain(c) ||
|
|
|
|
!compositor_init_renderer(c)) {
|
|
|
|
COMP_DEBUG(c, "Failed to init compositor %p", (void *)c);
|
|
|
|
c->base.base.destroy(&c->base.base);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
// clang-format on
|
|
|
|
|
|
|
|
COMP_DEBUG(c, "Done %p", (void *)c);
|
|
|
|
|
|
|
|
return &c->base;
|
|
|
|
}
|