vk: Allow barriers to optionally skip renderpass splitting

- This is allowed by spec when we don't care about what happens outside the renderpass
This commit is contained in:
kd-11 2024-05-11 03:41:39 +03:00 committed by kd-11
parent 37c9392e59
commit 62eb9c2012
2 changed files with 33 additions and 15 deletions

View File

@ -12,9 +12,10 @@ namespace vk
VkImageLayout current_layout, VkImageLayout new_layout,
VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage,
VkAccessFlags src_mask, VkAccessFlags dst_mask,
const VkImageSubresourceRange& range)
const VkImageSubresourceRange& range,
bool preserve_renderpass)
{
if (vk::is_renderpass_open(cmd))
if (!preserve_renderpass && vk::is_renderpass_open(cmd))
{
vk::end_renderpass(cmd);
}
@ -33,9 +34,15 @@ namespace vk
vkCmdPipelineBarrier(cmd, src_stage, dst_stage, 0, 0, nullptr, 0, nullptr, 1, &barrier);
}
void insert_buffer_memory_barrier(const vk::command_buffer& cmd, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize length, VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage, VkAccessFlags src_mask, VkAccessFlags dst_mask)
void insert_buffer_memory_barrier(
const vk::command_buffer& cmd,
VkBuffer buffer,
VkDeviceSize offset, VkDeviceSize length,
VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage,
VkAccessFlags src_mask, VkAccessFlags dst_mask,
bool preserve_renderpass)
{
if (vk::is_renderpass_open(cmd))
if (!preserve_renderpass && vk::is_renderpass_open(cmd))
{
vk::end_renderpass(cmd);
}
@ -53,9 +60,13 @@ namespace vk
vkCmdPipelineBarrier(cmd, src_stage, dst_stage, 0, 0, nullptr, 1, &barrier, 0, nullptr);
}
void insert_global_memory_barrier(const vk::command_buffer& cmd, VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage, VkAccessFlags src_access, VkAccessFlags dst_access)
void insert_global_memory_barrier(
const vk::command_buffer& cmd,
VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage,
VkAccessFlags src_access, VkAccessFlags dst_access,
bool preserve_renderpass)
{
if (vk::is_renderpass_open(cmd))
if (!preserve_renderpass && vk::is_renderpass_open(cmd))
{
vk::end_renderpass(cmd);
}
@ -67,13 +78,18 @@ namespace vk
vkCmdPipelineBarrier(cmd, src_stage, dst_stage, 0, 1, &barrier, 0, nullptr, 0, nullptr);
}
void insert_texture_barrier(const vk::command_buffer& cmd, VkImage image, VkImageLayout current_layout, VkImageLayout new_layout, VkImageSubresourceRange range)
void insert_texture_barrier(
const vk::command_buffer& cmd,
VkImage image,
VkImageLayout current_layout, VkImageLayout new_layout,
VkImageSubresourceRange range,
bool preserve_renderpass)
{
// NOTE: Sampling from an attachment in ATTACHMENT_OPTIMAL layout on some hw ends up with garbage output
// Transition to GENERAL if this resource is both input and output
// TODO: This implicitly makes the target incompatible with the renderpass declaration; investigate a proper workaround
// TODO: This likely throws out hw optimizations on the rest of the renderpass, manage carefully
if (vk::is_renderpass_open(cmd))
if (!preserve_renderpass && vk::is_renderpass_open(cmd))
{
vk::end_renderpass(cmd);
}
@ -105,9 +121,9 @@ namespace vk
vkCmdPipelineBarrier(cmd, src_stage, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &barrier);
}
void insert_texture_barrier(const vk::command_buffer& cmd, vk::image* image, VkImageLayout new_layout)
void insert_texture_barrier(const vk::command_buffer& cmd, vk::image* image, VkImageLayout new_layout, bool preserve_renderpass)
{
insert_texture_barrier(cmd, image->value, image->current_layout, new_layout, { image->aspect(), 0, 1, 0, 1 });
insert_texture_barrier(cmd, image->value, image->current_layout, new_layout, { image->aspect(), 0, 1, 0, 1 }, preserve_renderpass);
image->current_layout = new_layout;
}
}

View File

@ -8,19 +8,21 @@ namespace vk
class command_buffer;
//Texture barrier applies to a texture to ensure writes to it are finished before any reads are attempted to avoid RAW hazards
void insert_texture_barrier(const vk::command_buffer& cmd, VkImage image, VkImageLayout current_layout, VkImageLayout new_layout, VkImageSubresourceRange range);
void insert_texture_barrier(const vk::command_buffer& cmd, vk::image* image, VkImageLayout new_layout);
void insert_texture_barrier(const vk::command_buffer& cmd, VkImage image, VkImageLayout current_layout, VkImageLayout new_layout, VkImageSubresourceRange range, bool preserve_renderpass = false);
void insert_texture_barrier(const vk::command_buffer& cmd, vk::image* image, VkImageLayout new_layout, bool preserve_renderpass = false);
void insert_buffer_memory_barrier(const vk::command_buffer& cmd, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize length,
VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage, VkAccessFlags src_mask, VkAccessFlags dst_mask);
VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage, VkAccessFlags src_mask, VkAccessFlags dst_mask,
bool preserve_renderpass = false);
void insert_image_memory_barrier(const vk::command_buffer& cmd, VkImage image, VkImageLayout current_layout, VkImageLayout new_layout,
VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage, VkAccessFlags src_mask, VkAccessFlags dst_mask,
const VkImageSubresourceRange& range);
const VkImageSubresourceRange& range, bool preserve_renderpass = false);
void insert_global_memory_barrier(const vk::command_buffer& cmd,
VkPipelineStageFlags src_stage = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VkPipelineStageFlags dst_stage = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VkAccessFlags src_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
VkAccessFlags dst_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT);
VkAccessFlags dst_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
bool preserve_renderpass = false);
}