c/render: Don't cache comp_rendering

This commit is contained in:
Jakob Bornecrantz 2021-09-29 17:35:05 +01:00
parent 847e322fe5
commit 4ca35b8b3c
4 changed files with 112 additions and 108 deletions

View file

@ -71,10 +71,11 @@ struct comp_renderer
int32_t fenced_buffer;
/*!
* Array of "renderings" equal in size to the number of comp_target images.
* Array of "rendering" target resources equal in size to the number of
* comp_target images. Each target resources holds all of the resources
* needed to render to that target and it's views.
*/
struct comp_rendering *rrs;
struct comp_rendering_target_resources *rtss;
struct comp_rendering_target_resources *rtr_array;
/*!
* Array of fences equal in size to the number of comp_target images.
@ -194,16 +195,13 @@ calc_viewport_data(struct comp_renderer *r,
//! @pre comp_target_has_images(r->c->target)
static void
renderer_build_rendering(struct comp_renderer *r,
struct comp_rendering *rr,
struct comp_rendering_target_resources *rts,
uint32_t index)
renderer_build_rendering_target_resources(struct comp_renderer *r,
struct comp_rendering_target_resources *rtr,
uint32_t index)
{
struct comp_compositor *c = r->c;
COMP_TRACE_MARKER();
/*
* Target
*/
struct comp_compositor *c = r->c;
struct comp_target_data data;
data.format = r->c->target->format;
@ -211,7 +209,18 @@ renderer_build_rendering(struct comp_renderer *r,
data.width = r->c->target->width;
data.height = r->c->target->height;
comp_rendering_target_resources_init(rts, c, &c->nr, r->c->target->images[index].view, &data);
comp_rendering_target_resources_init(rtr, c, &c->nr, r->c->target->images[index].view, &data);
}
//! @pre comp_target_has_images(r->c->target)
static void
renderer_build_rendering(struct comp_renderer *r,
struct comp_rendering *rr,
struct comp_rendering_target_resources *rtr)
{
COMP_TRACE_MARKER();
struct comp_compositor *c = r->c;
/*
@ -290,7 +299,7 @@ renderer_build_rendering(struct comp_renderer *r,
comp_draw_begin_target( //
rr, //
rts); //
rtr); //
/*
@ -336,7 +345,6 @@ renderer_build_rendering(struct comp_renderer *r,
static void
renderer_create_renderings_and_fences(struct comp_renderer *r)
{
assert(r->rrs == NULL);
assert(r->fences == NULL);
if (r->num_buffers == 0) {
COMP_ERROR(r->c, "Requested 0 command buffers.");
@ -349,11 +357,10 @@ renderer_create_renderings_and_fences(struct comp_renderer *r)
bool use_compute = r->settings->use_compute;
if (!use_compute) {
r->rrs = U_TYPED_ARRAY_CALLOC(struct comp_rendering, r->num_buffers);
r->rtss = U_TYPED_ARRAY_CALLOC(struct comp_rendering_target_resources, r->num_buffers);
r->rtr_array = U_TYPED_ARRAY_CALLOC(struct comp_rendering_target_resources, r->num_buffers);
for (uint32_t i = 0; i < r->num_buffers; ++i) {
renderer_build_rendering(r, &r->rrs[i], &r->rtss[i], i);
renderer_build_rendering_target_resources(r, &r->rtr_array[i], i);
}
}
@ -381,16 +388,13 @@ renderer_close_renderings_and_fences(struct comp_renderer *r)
{
struct vk_bundle *vk = &r->c->vk;
// Renderings
if (r->num_buffers > 0 && r->rrs != NULL) {
if (r->num_buffers > 0 && r->rtr_array != NULL) {
for (uint32_t i = 0; i < r->num_buffers; i++) {
comp_rendering_target_resources_close(&r->rtss[i]);
comp_rendering_close(&r->rrs[i]);
comp_rendering_target_resources_close(&r->rtr_array[i]);
}
free(r->rrs);
free(r->rtss);
r->rrs = NULL;
r->rtss = NULL;
free(r->rtr_array);
r->rtr_array = NULL;
}
// Fences
@ -523,7 +527,7 @@ renderer_create(struct comp_renderer *r, struct comp_compositor *c)
r->queue = VK_NULL_HANDLE;
r->semaphores.present_complete = VK_NULL_HANDLE;
r->semaphores.render_complete = VK_NULL_HANDLE;
r->rrs = NULL;
r->rtr_array = NULL;
struct vk_bundle *vk = &r->c->vk;
@ -747,7 +751,7 @@ get_image_view(const struct comp_swapchain_image *image, enum xrt_layer_composit
}
static void
dispatch_graphics(struct comp_renderer *r)
dispatch_graphics(struct comp_renderer *r, struct comp_rendering *rr)
{
COMP_TRACE_MARKER();
@ -761,7 +765,10 @@ dispatch_graphics(struct comp_renderer *r)
comp_target_update_timings(ct);
renderer_submit_queue(r, r->rrs[r->acquired_buffer].cmd);
uint32_t i = r->acquired_buffer;
renderer_build_rendering(r, rr, &r->rtr_array[i]);
renderer_submit_queue(r, rr->cmd);
}
@ -1155,11 +1162,12 @@ comp_renderer_draw(struct comp_renderer *r)
comp_target_update_timings(ct);
bool use_compute = r->settings->use_compute;
struct comp_rendering rr = {0};
struct comp_rendering_compute crc = {0};
if (use_compute) {
dispatch_compute(r, &crc);
} else {
dispatch_graphics(r);
dispatch_graphics(r, &rr);
}
renderer_present_swapchain_image(r, c->frame.rendering.desired_present_time_ns,
@ -1179,6 +1187,8 @@ comp_renderer_draw(struct comp_renderer *r)
if (use_compute) {
comp_rendering_compute_close(&crc);
} else {
comp_rendering_close(&rr);
}
/*

View file

@ -130,9 +130,6 @@ struct comp_resources
//! Shared for all rendering.
VkPipelineCache pipeline_cache;
//! Descriptor pool for mesh rendering.
VkDescriptorPool mesh_descriptor_pool;
/*
* Static
@ -160,6 +157,12 @@ struct comp_resources
uint32_t stride;
uint32_t offset_indices[2];
uint32_t total_num_indices;
//! Descriptor pool for mesh shaders.
VkDescriptorPool descriptor_pool;
//! Info ubos, only supports two views currently.
struct comp_buffer ubos[2];
} mesh;
struct
@ -325,8 +328,6 @@ struct comp_rendering_view
{
struct
{
struct comp_buffer ubo;
VkDescriptorSet descriptor_set;
} mesh;
};

View file

@ -142,21 +142,6 @@ create_descriptor_set(struct vk_bundle *vk,
return VK_SUCCESS;
}
static void
free_descriptor_set(struct vk_bundle *vk, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set)
{
VkResult ret;
ret = vk->vkFreeDescriptorSets(vk->device, //
descriptor_pool, // descriptorPool
1, // descriptorSetCount
&descriptor_set); // pDescriptorSets
if (ret != VK_SUCCESS) {
VK_DEBUG(vk, "vkFreeDescriptorSets failed: %s", vk_result_string(ret));
}
}
static VkResult
create_framebuffer(struct vk_bundle *vk,
VkImageView image_view,
@ -457,35 +442,6 @@ create_mesh_pipeline(struct vk_bundle *vk,
return VK_SUCCESS;
}
static bool
init_mesh_ubo_buffers(struct vk_bundle *vk, struct comp_buffer *l_ubo, struct comp_buffer *r_ubo)
{
// Using the same flags for all ubos.
VkBufferUsageFlags ubo_usage_flags = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
VkMemoryPropertyFlags memory_property_flags =
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
// Distortion ubo size.
VkDeviceSize ubo_size = sizeof(struct comp_mesh_ubo_data);
C(comp_buffer_init(vk, //
l_ubo, //
ubo_usage_flags, //
memory_property_flags, //
ubo_size)); // size
C(comp_buffer_map(vk, l_ubo));
C(comp_buffer_init(vk, //
r_ubo, //
ubo_usage_flags, //
memory_property_flags, //
ubo_size)); // size
C(comp_buffer_map(vk, r_ubo));
return true;
}
static void
update_mesh_discriptor_set(struct vk_bundle *vk,
uint32_t src_binding,
@ -620,21 +576,17 @@ comp_rendering_init(struct comp_rendering *rr, struct comp_compositor *c, struct
* Mesh per view
*/
C(create_descriptor_set(vk, // vk_bundle
r->mesh_descriptor_pool, // descriptor_pool
r->mesh.descriptor_set_layout, // descriptor_set_layout
&rr->views[0].mesh.descriptor_set)); // descriptor_set
C(create_descriptor_set( //
vk, // vk_bundle
r->mesh.descriptor_pool, // descriptor_pool
r->mesh.descriptor_set_layout, // descriptor_set_layout
&rr->views[0].mesh.descriptor_set)); // descriptor_set
C(create_descriptor_set(vk, // vk_bundle
r->mesh_descriptor_pool, // descriptor_pool
r->mesh.descriptor_set_layout, // descriptor_set_layout
&rr->views[1].mesh.descriptor_set)); // descriptor_set
if (!init_mesh_ubo_buffers(vk, //
&rr->views[0].mesh.ubo, //
&rr->views[1].mesh.ubo)) {
return false;
}
C(create_descriptor_set( //
vk, // vk_bundle
r->mesh.descriptor_pool, // descriptor_pool
r->mesh.descriptor_set_layout, // descriptor_set_layout
&rr->views[1].mesh.descriptor_set)); // descriptor_set
return true;
}
@ -659,10 +611,14 @@ comp_rendering_close(struct comp_rendering *rr)
struct vk_bundle *vk = &rr->c->vk;
struct comp_resources *r = rr->r;
comp_buffer_close(vk, &rr->views[0].mesh.ubo);
comp_buffer_close(vk, &rr->views[1].mesh.ubo);
DD(r->mesh_descriptor_pool, rr->views[0].mesh.descriptor_set);
DD(r->mesh_descriptor_pool, rr->views[1].mesh.descriptor_set);
// Reclaimed by vkResetDescriptorPool.
rr->views[0].mesh.descriptor_set = VK_NULL_HANDLE;
rr->views[1].mesh.descriptor_set = VK_NULL_HANDLE;
vk->vkResetDescriptorPool( //
vk->device, //
r->mesh.descriptor_pool, //
0); //
U_ZERO(rr);
}
@ -850,15 +806,15 @@ comp_draw_update_distortion(struct comp_rendering *rr,
struct comp_resources *r = rr->r;
struct comp_rendering_view *v = &rr->views[view_index];
comp_buffer_write(vk, &v->mesh.ubo, data, sizeof(struct comp_mesh_ubo_data));
comp_buffer_write(vk, &r->mesh.ubos[view_index], data, sizeof(struct comp_mesh_ubo_data));
update_mesh_discriptor_set( //
vk, // vk_bundle
r->mesh.src_binding, // src_binding
sampler, // sampler
image_view, // image_view
r->mesh.ubo_binding, // ubo_binding
v->mesh.ubo.buffer, // buffer
VK_WHOLE_SIZE, // size
v->mesh.descriptor_set); // descriptor_set
update_mesh_discriptor_set( //
vk, // vk_bundle
r->mesh.src_binding, // src_binding
sampler, // sampler
image_view, // image_view
r->mesh.ubo_binding, // ubo_binding
r->mesh.ubos[view_index].buffer, // buffer
VK_WHOLE_SIZE, // size
v->mesh.descriptor_set); // descriptor_set
}

View file

@ -262,6 +262,35 @@ init_mesh_vertex_buffers(struct vk_bundle *vk,
return true;
}
static bool
init_mesh_ubo_buffers(struct vk_bundle *vk, struct comp_buffer *l_ubo, struct comp_buffer *r_ubo)
{
// Using the same flags for all ubos.
VkBufferUsageFlags ubo_usage_flags = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
VkMemoryPropertyFlags memory_property_flags =
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
// Distortion ubo size.
VkDeviceSize ubo_size = sizeof(struct comp_mesh_ubo_data);
C(comp_buffer_init(vk, //
l_ubo, //
ubo_usage_flags, //
memory_property_flags, //
ubo_size)); // size
C(comp_buffer_map(vk, l_ubo));
C(comp_buffer_init(vk, //
r_ubo, //
ubo_usage_flags, //
memory_property_flags, //
ubo_size)); // size
C(comp_buffer_map(vk, r_ubo));
return true;
}
/*
*
@ -647,8 +676,8 @@ comp_resources_init(struct comp_compositor *c, struct comp_resources *r)
1, // num_sampler_per_desc
0, // num_storage_per_desc
16 * 2, // num_descs
true, // freeable
&r->mesh_descriptor_pool)); // out_descriptor_pool
false, // freeable
&r->mesh.descriptor_pool)); // out_descriptor_pool
C(create_mesh_descriptor_set_layout(vk, // vk_bundle
r->mesh.src_binding, // src_binding
@ -670,6 +699,12 @@ comp_resources_init(struct comp_compositor *c, struct comp_resources *r)
return false;
}
if (!init_mesh_ubo_buffers(vk, //
&r->mesh.ubos[0], //
&r->mesh.ubos[1])) {
return false;
}
/*
* Compute static.
@ -790,9 +825,11 @@ comp_resources_close(struct comp_compositor *c, struct comp_resources *r)
D(DescriptorSetLayout, r->mesh.descriptor_set_layout);
D(PipelineLayout, r->mesh.pipeline_layout);
D(PipelineCache, r->pipeline_cache);
D(DescriptorPool, r->mesh_descriptor_pool);
D(DescriptorPool, r->mesh.descriptor_pool);
comp_buffer_close(vk, &r->mesh.vbo);
comp_buffer_close(vk, &r->mesh.ibo);
comp_buffer_close(vk, &r->mesh.ubos[0]);
comp_buffer_close(vk, &r->mesh.ubos[1]);
D(DescriptorPool, r->compute.descriptor_pool);
D(DescriptorSetLayout, r->compute.descriptor_set_layout);