vk: Implement support for VK_EXT_attachment_feedback_loop_layout

This commit is contained in:
kd-11 2022-08-18 22:42:41 +03:00 committed by kd-11
parent 2e504b2dac
commit 71e35c8b4d
8 changed files with 125 additions and 308 deletions

View File

@ -40,6 +40,60 @@ namespace vk
fmt::throw_exception("Unknown compare op: 0x%x", static_cast<u32>(op));
}
}
void validate_image_layout_for_read_access(vk::command_buffer& cmd, vk::image_view* view, const rsx::sampled_image_descriptor_base* sampler_state)
{
switch (auto raw = view->image(); raw->current_layout)
{
default:
//case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
break;
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
//ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_dst);
raw->change_layout(cmd, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_src);
raw->change_layout(cmd, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_GENERAL:
case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
if (!sampler_state->is_cyclic_reference)
{
// This was used in a cyclic ref before, but is missing a barrier
// No need for a full stall, use a custom barrier instead
VkPipelineStageFlags src_stage;
VkAccessFlags src_access;
if (raw->aspect() == VK_IMAGE_ASPECT_COLOR_BIT)
{
src_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
src_access = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
}
else
{
src_stage = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
src_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
}
vk::insert_image_memory_barrier(
cmd,
raw->value,
raw->current_layout, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
src_stage, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
src_access, VK_ACCESS_SHADER_READ_BIT,
{ raw->aspect(), 0, 1, 0, 1 });
raw->current_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
break;
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
raw->change_layout(cmd, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
}
}
}
void VKGSRender::begin_render_pass()
@ -423,55 +477,7 @@ bool VKGSRender::bind_texture_env()
}
else
{
switch (auto raw = view->image(); raw->current_layout)
{
default:
//case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
break;
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
//ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_dst);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_src);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_GENERAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
if (!sampler_state->is_cyclic_reference)
{
// This was used in a cyclic ref before, but is missing a barrier
// No need for a full stall, use a custom barrier instead
VkPipelineStageFlags src_stage;
VkAccessFlags src_access;
if (raw->aspect() == VK_IMAGE_ASPECT_COLOR_BIT)
{
src_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
src_access = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
}
else
{
src_stage = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
src_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
}
vk::insert_image_memory_barrier(
*m_current_command_buffer,
raw->value,
VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
src_stage, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
src_access, VK_ACCESS_SHADER_READ_BIT,
{ raw->aspect(), 0, 1, 0, 1 });
raw->current_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
break;
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
}
validate_image_layout_for_read_access(*m_current_command_buffer, view, sampler_state);
}
}
@ -565,54 +571,7 @@ bool VKGSRender::bind_texture_env()
continue;
}
switch (auto raw = image_ptr->image(); raw->current_layout)
{
default:
//case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
break;
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
//ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_dst);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_src);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_GENERAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
if (!sampler_state->is_cyclic_reference)
{
// Custom barrier, see similar block in FS stage
VkPipelineStageFlags src_stage;
VkAccessFlags src_access;
if (raw->aspect() == VK_IMAGE_ASPECT_COLOR_BIT)
{
src_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
src_access = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
}
else
{
src_stage = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
src_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
}
vk::insert_image_memory_barrier(
*m_current_command_buffer,
raw->value,
VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
src_stage, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
src_access, VK_ACCESS_SHADER_READ_BIT,
{ raw->aspect(), 0, 1, 0, 1 });
raw->current_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
break;
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
}
validate_image_layout_for_read_access(*m_current_command_buffer, image_ptr, sampler_state);
m_program->bind_uniform({ vs_sampler_handles[i]->value, image_ptr->value, image_ptr->image()->current_layout },
i,
@ -680,56 +639,7 @@ bool VKGSRender::bind_interpreter_texture_env()
}
else
{
switch (auto raw = view->image(); raw->current_layout)
{
default:
//case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
break;
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
//ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_dst);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::blit_engine_src);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
case VK_IMAGE_LAYOUT_GENERAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
if (!sampler_state->is_cyclic_reference)
{
// This was used in a cyclic ref before, but is missing a barrier
// No need for a full stall, use a custom barrier instead
VkPipelineStageFlags src_stage;
VkAccessFlags src_access;
if (raw->aspect() == VK_IMAGE_ASPECT_COLOR_BIT)
{
src_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
src_access = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
}
else
{
src_stage = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
src_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
}
vk::insert_image_memory_barrier(
*m_current_command_buffer,
raw->value,
VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
src_stage, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
src_access, VK_ACCESS_SHADER_READ_BIT,
{ raw->aspect(), 0, 1, 0, 1 });
raw->current_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
break;
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
ensure(sampler_state->upload_context == rsx::texture_upload_context::framebuffer_storage);
ensure(!sampler_state->is_cyclic_reference);
raw->change_layout(*m_current_command_buffer, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
break;
}
validate_image_layout_for_read_access(*m_current_command_buffer, view, sampler_state);
}
}

View File

@ -37,6 +37,8 @@ namespace vk
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
return static_cast<u64>(layout);
case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT:
return 4ull;
default:
fmt::throw_exception("Unsupported layout 0x%llx here", static_cast<usz>(layout));
}
@ -50,6 +52,8 @@ namespace vk
case 2:
case 3:
return static_cast<VkImageLayout>(encoded);
case 4:
return VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT;
default:
fmt::throw_exception("Unsupported layout encoding 0x%llx here", encoded);
}
@ -75,6 +79,7 @@ namespace vk
{
switch (layout)
{
case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT:
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
case VK_IMAGE_LAYOUT_GENERAL:

View File

@ -796,7 +796,10 @@ namespace vk
if (!write_barrier_sync_tag) write_barrier_sync_tag++; // Activate barrier sync
cyclic_reference_sync_tag = write_barrier_sync_tag; // Match tags
vk::insert_texture_barrier(cmd, this, VK_IMAGE_LAYOUT_GENERAL);
const auto supports_fbo_loops = cmd.get_command_pool().get_owner().get_framebuffer_loops_support();
const auto optimal_layout = supports_fbo_loops ? VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT
: VK_IMAGE_LAYOUT_GENERAL;
vk::insert_texture_barrier(cmd, this, optimal_layout);
}
void render_target::reset_surface_counters()
@ -853,7 +856,7 @@ namespace vk
if (access == rsx::surface_access::shader_write && write_barrier_sync_tag != 0)
{
if (current_layout == VK_IMAGE_LAYOUT_GENERAL)
if (current_layout == VK_IMAGE_LAYOUT_GENERAL || current_layout == VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT)
{
if (write_barrier_sync_tag != cyclic_reference_sync_tag)
{
@ -877,7 +880,7 @@ namespace vk
dst_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
}
vk::insert_image_memory_barrier(cmd, value, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
vk::insert_image_memory_barrier(cmd, value, current_layout, current_layout,
src_stage, dst_stage, src_access, dst_access, { aspect(), 0, 1, 0, 1 });
write_barrier_sync_tag = 0; // Disable for next draw

View File

@ -112,7 +112,8 @@ namespace vk
break;
case driver_vendor::AMD:
case driver_vendor::RADV:
if (vk::get_chip_family() >= chip_class::AMD_navi1x)
if ((vk::get_chip_family() >= chip_class::AMD_navi1x) &&
!vk::get_current_renderer()->get_framebuffer_loops_support())
{
// Only needed for GFX10+
return { 0, VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT };

View File

@ -21,3 +21,18 @@
#endif
#include <util/types.hpp>
#if VK_HEADER_VERSION < 224
#define VK_EXT_attachment_feedback_loop_layout 1
#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout"
#define VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT static_cast<VkImageLayout>(1000339000)
#define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT static_cast<VkStructureType>(1000339000)
typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
VkStructureType sType;
void* pNext;
VkBool32 attachmentFeedbackLoopLayout;
} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
#endif

View File

@ -34,7 +34,8 @@ namespace vk
features2.pNext = nullptr;
VkPhysicalDeviceFloat16Int8FeaturesKHR shader_support_info{};
VkPhysicalDeviceDescriptorIndexingFeatures descriptor_indexing_info{};
VkPhysicalDeviceDescriptorIndexingFeatures descriptor_indexing_info{};
VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT fbo_loops_info{};
if (device_extensions.is_supported(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME))
{
@ -57,6 +58,13 @@ namespace vk
descriptor_indexing_support = true;
}
if (device_extensions.is_supported(VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME))
{
fbo_loops_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT;
fbo_loops_info.pNext = features2.pNext;
features2.pNext = &fbo_loops_info;
}
auto _vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2KHR>(vkGetInstanceProcAddr(parent, "vkGetPhysicalDeviceFeatures2KHR"));
ensure(_vkGetPhysicalDeviceFeatures2KHR); // "vkGetInstanceProcAddress failed to find entry point!"
_vkGetPhysicalDeviceFeatures2KHR(dev, &features2);
@ -64,6 +72,7 @@ namespace vk
shader_types_support.allow_float64 = !!features2.features.shaderFloat64;
shader_types_support.allow_float16 = !!shader_support_info.shaderFloat16;
shader_types_support.allow_int8 = !!shader_support_info.shaderInt8;
framebuffer_loops_support = !!fbo_loops_info.attachmentFeedbackLoopLayout;
features = features2.features;
if (descriptor_indexing_support)
@ -669,36 +678,6 @@ namespace vk
}
}
VkQueue render_device::get_present_queue() const
{
return m_present_queue;
}
VkQueue render_device::get_graphics_queue() const
{
return m_graphics_queue;
}
VkQueue render_device::get_transfer_queue() const
{
return m_transfer_queue;
}
u32 render_device::get_graphics_queue_family() const
{
return m_graphics_queue_family;
}
u32 render_device::get_present_queue_family() const
{
return m_graphics_queue_family;
}
u32 render_device::get_transfer_queue_family() const
{
return m_transfer_queue_family;
}
const VkFormatProperties render_device::get_format_properties(VkFormat format) const
{
auto found = pgpu->format_properties.find(format);
@ -737,106 +716,6 @@ namespace vk
return false;
}
const physical_device& render_device::gpu() const
{
return *pgpu;
}
const memory_type_mapping& render_device::get_memory_mapping() const
{
return memory_map;
}
const gpu_formats_support& render_device::get_formats_support() const
{
return m_formats_support;
}
const pipeline_binding_table& render_device::get_pipeline_binding_table() const
{
return m_pipeline_binding_table;
}
const gpu_shader_types_support& render_device::get_shader_types_support() const
{
return pgpu->shader_types_support;
}
bool render_device::get_shader_stencil_export_support() const
{
return pgpu->stencil_export_support;
}
bool render_device::get_depth_bounds_support() const
{
return pgpu->features.depthBounds != VK_FALSE;
}
bool render_device::get_alpha_to_one_support() const
{
return pgpu->features.alphaToOne != VK_FALSE;
}
bool render_device::get_anisotropic_filtering_support() const
{
return pgpu->features.samplerAnisotropy != VK_FALSE;
}
bool render_device::get_wide_lines_support() const
{
return pgpu->features.wideLines != VK_FALSE;
}
bool render_device::get_conditional_render_support() const
{
return pgpu->conditional_render_support;
}
bool render_device::get_unrestricted_depth_range_support() const
{
return pgpu->unrestricted_depth_range_support;
}
bool render_device::get_external_memory_host_support() const
{
return pgpu->external_memory_host_support;
}
bool render_device::get_surface_capabilities_2_support() const
{
return pgpu->surface_capabilities_2_support;
}
bool render_device::get_debug_utils_support() const
{
return g_cfg.video.renderdoc_compatiblity && pgpu->debug_utils_support;
}
bool render_device::get_descriptor_indexing_support() const
{
return pgpu->descriptor_indexing_support;
}
u64 render_device::get_descriptor_update_after_bind_support() const
{
return pgpu->descriptor_update_after_bind_mask;
}
u32 render_device::get_descriptor_max_draw_calls() const
{
return pgpu->descriptor_max_draw_calls;
}
mem_allocator_base* render_device::get_allocator() const
{
return m_allocator.get();
}
render_device::operator VkDevice() const
{
return dev;
}
void render_device::rebalance_memory_type_usage()
{
// Rebalance device local memory types

View File

@ -62,6 +62,7 @@ namespace vk
bool debug_utils_support : 1 = false;
bool sampler_mirror_clamped_support : 1 = false;
bool descriptor_indexing_support : 1 = false;
bool framebuffer_loops_support : 1 = false;
u32 descriptor_max_draw_calls = DESCRIPTOR_MAX_DRAW_CALLS;
u64 descriptor_update_after_bind_mask = 0;
@ -132,37 +133,38 @@ namespace vk
bool get_compatible_memory_type(u32 typeBits, u32 desired_mask, u32* type_index) const;
void rebalance_memory_type_usage();
const physical_device& gpu() const;
const memory_type_mapping& get_memory_mapping() const;
const gpu_formats_support& get_formats_support() const;
const pipeline_binding_table& get_pipeline_binding_table() const;
const gpu_shader_types_support& get_shader_types_support() const;
const physical_device& gpu() const { return *pgpu; }
const memory_type_mapping& get_memory_mapping() const { return memory_map; }
const gpu_formats_support& get_formats_support() const { return m_formats_support; }
const pipeline_binding_table& get_pipeline_binding_table() const { return m_pipeline_binding_table; }
const gpu_shader_types_support& get_shader_types_support() const { return pgpu->shader_types_support; }
bool get_shader_stencil_export_support() const;
bool get_depth_bounds_support() const;
bool get_alpha_to_one_support() const;
bool get_anisotropic_filtering_support() const;
bool get_wide_lines_support() const;
bool get_conditional_render_support() const;
bool get_unrestricted_depth_range_support() const;
bool get_external_memory_host_support() const;
bool get_surface_capabilities_2_support() const;
bool get_debug_utils_support() const;
bool get_descriptor_indexing_support() const;
bool get_shader_stencil_export_support() const { return pgpu->stencil_export_support; }
bool get_depth_bounds_support() const { return pgpu->features.depthBounds != VK_FALSE; }
bool get_alpha_to_one_support() const { return pgpu->features.alphaToOne != VK_FALSE; }
bool get_anisotropic_filtering_support() const { return pgpu->features.samplerAnisotropy != VK_FALSE; }
bool get_wide_lines_support() const { return pgpu->features.wideLines != VK_FALSE; }
bool get_conditional_render_support() const { return pgpu->conditional_render_support; }
bool get_unrestricted_depth_range_support() const { return pgpu->unrestricted_depth_range_support; }
bool get_external_memory_host_support() const { return pgpu->external_memory_host_support; }
bool get_surface_capabilities_2_support() const { return pgpu->surface_capabilities_2_support; }
bool get_debug_utils_support() const { return g_cfg.video.renderdoc_compatiblity && pgpu->debug_utils_support; }
bool get_descriptor_indexing_support() const { return pgpu->descriptor_indexing_support; }
bool get_framebuffer_loops_support() const { return pgpu->framebuffer_loops_support; }
u64 get_descriptor_update_after_bind_support() const;
u32 get_descriptor_max_draw_calls() const;
u64 get_descriptor_update_after_bind_support() const { return pgpu->descriptor_update_after_bind_mask; }
u32 get_descriptor_max_draw_calls() const { return pgpu->descriptor_max_draw_calls; }
VkQueue get_present_queue() const;
VkQueue get_graphics_queue() const;
VkQueue get_transfer_queue() const;
u32 get_graphics_queue_family() const;
u32 get_present_queue_family() const;
u32 get_transfer_queue_family() const;
VkQueue get_present_queue() const { return m_present_queue; }
VkQueue get_graphics_queue() const { return m_graphics_queue; }
VkQueue get_transfer_queue() const { return m_transfer_queue; }
u32 get_graphics_queue_family() const { return m_graphics_queue_family; }
u32 get_present_queue_family() const { return m_graphics_queue_family; }
u32 get_transfer_queue_family() const { return m_transfer_queue_family; }
mem_allocator_base* get_allocator() const;
mem_allocator_base* get_allocator() const { return m_allocator.get(); }
operator VkDevice() const;
operator VkDevice() const { return dev; }
};
memory_type_mapping get_memory_mapping(const physical_device& dev);

View File

@ -117,6 +117,7 @@ namespace vk
dst_stage = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
break;
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT:
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
dst_stage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
break;
@ -189,6 +190,7 @@ namespace vk
src_stage = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
break;
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT:
barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
src_stage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
break;