VideoBackends:Vulkan: Dynamic vertex loader support

This commit is contained in:
TellowKrinkle 2022-06-18 22:49:03 -05:00
parent 7edc8b4e79
commit 936b4d5d0d
7 changed files with 71 additions and 21 deletions

View File

@ -39,6 +39,8 @@ enum DESCRIPTOR_SET_LAYOUT
// - Per-stage UBO (VS/GS/PS, VS constants accessible from PS) [set=0, binding=0-2]
// - 8 combined image samplers (accessible from PS) [set=1, binding=0-7]
// - 1 SSBO accessible from PS if supported [set=2, binding=0]
// - Uber
// - Like standard, plus 1 SSBO accessible from VS if supported [set=2, binding=1]
// - Utility
// - 1 combined UBO, accessible from VS/GS/PS [set=0, binding=0]
// - 8 combined image samplers (accessible from PS) [set=1, binding=0-7]
@ -55,6 +57,7 @@ enum DESCRIPTOR_SET_LAYOUT
enum PIPELINE_LAYOUT
{
PIPELINE_LAYOUT_STANDARD,
PIPELINE_LAYOUT_UBER,
PIPELINE_LAYOUT_UTILITY,
PIPELINE_LAYOUT_COMPUTE,
NUM_PIPELINE_LAYOUTS

View File

@ -123,8 +123,10 @@ bool ObjectCache::CreateDescriptorSetLayouts()
VK_SHADER_STAGE_FRAGMENT_BIT},
}};
static const std::array<VkDescriptorSetLayoutBinding, 1> standard_ssbo_bindings{{
// The dynamic veretex loader's vertex buffer must be last here, for similar reasons
static const std::array<VkDescriptorSetLayoutBinding, 2> standard_ssbo_bindings{{
{0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT},
{1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_VERTEX_BIT},
}};
static const std::array<VkDescriptorSetLayoutBinding, 1> utility_ubo_bindings{{
@ -173,6 +175,10 @@ bool ObjectCache::CreateDescriptorSetLayouts()
if (!g_ActiveConfig.backend_info.bSupportsGeometryShaders)
create_infos[DESCRIPTOR_SET_LAYOUT_STANDARD_UNIFORM_BUFFERS].bindingCount--;
// Remove the dynamic vertex loader's buffer if it'll never be needed
if (!g_ActiveConfig.backend_info.bSupportsDynamicVertexLoader)
create_infos[DESCRIPTOR_SET_LAYOUT_STANDARD_SHADER_STORAGE_BUFFERS].bindingCount--;
for (size_t i = 0; i < create_infos.size(); i++)
{
VkResult res = vkCreateDescriptorSetLayout(g_vulkan_context->GetDevice(), &create_infos[i],
@ -206,6 +212,11 @@ bool ObjectCache::CreatePipelineLayouts()
m_descriptor_set_layouts[DESCRIPTOR_SET_LAYOUT_STANDARD_SAMPLERS],
m_descriptor_set_layouts[DESCRIPTOR_SET_LAYOUT_STANDARD_SHADER_STORAGE_BUFFERS],
};
const std::array<VkDescriptorSetLayout, 3> uber_sets{
m_descriptor_set_layouts[DESCRIPTOR_SET_LAYOUT_STANDARD_UNIFORM_BUFFERS],
m_descriptor_set_layouts[DESCRIPTOR_SET_LAYOUT_STANDARD_SAMPLERS],
m_descriptor_set_layouts[DESCRIPTOR_SET_LAYOUT_STANDARD_SHADER_STORAGE_BUFFERS],
};
const std::array<VkDescriptorSetLayout, 2> utility_sets{
m_descriptor_set_layouts[DESCRIPTOR_SET_LAYOUT_UTILITY_UNIFORM_BUFFER],
m_descriptor_set_layouts[DESCRIPTOR_SET_LAYOUT_UTILITY_SAMPLERS],
@ -220,6 +231,10 @@ bool ObjectCache::CreatePipelineLayouts()
{VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0,
static_cast<u32>(standard_sets.size()), standard_sets.data(), 0, nullptr},
// Uber
{VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0,
static_cast<u32>(uber_sets.size()), uber_sets.data(), 0, nullptr},
// Utility
{VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0,
static_cast<u32>(utility_sets.size()), utility_sets.data(), 0, nullptr},

View File

@ -77,14 +77,23 @@ bool StateTracker::Initialize()
return true;
}
void StateTracker::SetVertexBuffer(VkBuffer buffer, VkDeviceSize offset)
void StateTracker::SetVertexBuffer(VkBuffer buffer, VkDeviceSize offset, u32 size)
{
if (m_vertex_buffer == buffer && m_vertex_buffer_offset == offset)
return;
m_vertex_buffer = buffer;
m_vertex_buffer_offset = offset;
m_dirty_flags |= DIRTY_FLAG_VERTEX_BUFFER;
if (m_vertex_buffer != buffer || m_vertex_buffer_offset != offset)
{
m_vertex_buffer = buffer;
m_vertex_buffer_offset = offset;
m_dirty_flags |= DIRTY_FLAG_VERTEX_BUFFER;
}
if (m_bindings.gx_uber_vertex_ssbo.buffer != buffer ||
m_bindings.gx_uber_vertex_ssbo.offset != offset ||
m_bindings.gx_uber_vertex_ssbo.range != size)
{
m_bindings.gx_uber_vertex_ssbo.buffer = buffer;
m_bindings.gx_uber_vertex_ssbo.offset = offset;
m_bindings.gx_uber_vertex_ssbo.range = size;
m_dirty_flags |= DIRTY_FLAG_GX_SSBO;
}
}
void StateTracker::SetIndexBuffer(VkBuffer buffer, VkDeviceSize offset, VkIndexType type)
@ -366,8 +375,12 @@ bool StateTracker::Bind()
// Re-bind parts of the pipeline
const VkCommandBuffer command_buffer = g_command_buffer_mgr->GetCurrentCommandBuffer();
if (m_dirty_flags & DIRTY_FLAG_VERTEX_BUFFER)
if (m_pipeline->GetUsage() != AbstractPipelineUsage::GXUber &&
(m_dirty_flags & DIRTY_FLAG_VERTEX_BUFFER))
{
vkCmdBindVertexBuffers(command_buffer, 0, 1, &m_vertex_buffer, &m_vertex_buffer_offset);
m_dirty_flags &= ~DIRTY_FLAG_VERTEX_BUFFER;
}
if (m_dirty_flags & DIRTY_FLAG_INDEX_BUFFER)
vkCmdBindIndexBuffer(command_buffer, m_index_buffer, m_index_buffer_offset, m_index_type);
@ -381,8 +394,8 @@ bool StateTracker::Bind()
if (m_dirty_flags & DIRTY_FLAG_SCISSOR)
vkCmdSetScissor(command_buffer, 0, 1, &m_scissor);
m_dirty_flags &= ~(DIRTY_FLAG_VERTEX_BUFFER | DIRTY_FLAG_INDEX_BUFFER | DIRTY_FLAG_PIPELINE |
DIRTY_FLAG_VIEWPORT | DIRTY_FLAG_SCISSOR);
m_dirty_flags &=
~(DIRTY_FLAG_INDEX_BUFFER | DIRTY_FLAG_PIPELINE | DIRTY_FLAG_VIEWPORT | DIRTY_FLAG_SCISSOR);
return true;
}
@ -462,7 +475,7 @@ bool StateTracker::UpdateGXDescriptorSet()
{
const size_t MAX_DESCRIPTOR_WRITES = NUM_UBO_DESCRIPTOR_SET_BINDINGS + // UBO
1 + // Samplers
1; // SSBO
2; // SSBO
std::array<VkWriteDescriptorSet, MAX_DESCRIPTOR_WRITES> writes;
u32 num_writes = 0;
@ -516,7 +529,11 @@ bool StateTracker::UpdateGXDescriptorSet()
m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_SAMPLERS) | DIRTY_FLAG_DESCRIPTOR_SETS;
}
if (g_ActiveConfig.backend_info.bSupportsBBox &&
const bool needs_bbox_ssbo = g_ActiveConfig.backend_info.bSupportsBBox;
const bool needs_vertex_ssbo = m_pipeline->GetUsage() == AbstractPipelineUsage::GXUber;
const bool needs_ssbo = needs_bbox_ssbo || needs_vertex_ssbo;
if (needs_ssbo &&
(m_dirty_flags & DIRTY_FLAG_GX_SSBO || m_gx_descriptor_sets[2] == VK_NULL_HANDLE))
{
m_gx_descriptor_sets[2] =
@ -528,6 +545,18 @@ bool StateTracker::UpdateGXDescriptorSet()
writes[num_writes++] = {
VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, nullptr, m_gx_descriptor_sets[2], 0, 0, 1,
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, nullptr, &m_bindings.ssbo, nullptr};
writes[num_writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
nullptr,
m_gx_descriptor_sets[2],
1,
0,
1,
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
nullptr,
&m_bindings.gx_uber_vertex_ssbo,
nullptr};
m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_SSBO) | DIRTY_FLAG_DESCRIPTOR_SETS;
}
@ -538,9 +567,7 @@ bool StateTracker::UpdateGXDescriptorSet()
{
vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
g_ActiveConfig.backend_info.bSupportsBBox ?
NUM_GX_DESCRIPTOR_SETS :
(NUM_GX_DESCRIPTOR_SETS - 1),
needs_ssbo ? NUM_GX_DESCRIPTOR_SETS : (NUM_GX_DESCRIPTOR_SETS - 1),
m_gx_descriptor_sets.data(),
g_ActiveConfig.backend_info.bSupportsGeometryShaders ?
NUM_UBO_DESCRIPTOR_SET_BINDINGS :

View File

@ -32,7 +32,7 @@ public:
VKFramebuffer* GetFramebuffer() const { return m_framebuffer; }
const VKPipeline* GetPipeline() const { return m_pipeline; }
void SetVertexBuffer(VkBuffer buffer, VkDeviceSize offset);
void SetVertexBuffer(VkBuffer buffer, VkDeviceSize offset, u32 size);
void SetIndexBuffer(VkBuffer buffer, VkDeviceSize offset, VkIndexType type);
void SetFramebuffer(VKFramebuffer* framebuffer);
void SetPipeline(const VKPipeline* pipeline);
@ -145,6 +145,7 @@ private:
std::array<VkDescriptorImageInfo, NUM_PIXEL_SHADER_SAMPLERS> samplers;
std::array<VkBufferView, NUM_COMPUTE_TEXEL_BUFFERS> texel_buffers;
VkDescriptorBufferInfo ssbo;
VkDescriptorBufferInfo gx_uber_vertex_ssbo;
VkDescriptorImageInfo image_texture;
} m_bindings = {};
std::array<VkDescriptorSet, NUM_GX_DESCRIPTOR_SETS> m_gx_descriptor_sets = {};

View File

@ -249,9 +249,11 @@ std::unique_ptr<VKPipeline> VKPipeline::Create(const AbstractPipelineConfig& con
switch (config.usage)
{
case AbstractPipelineUsage::GX:
case AbstractPipelineUsage::GXUber:
pipeline_layout = g_object_cache->GetPipelineLayout(PIPELINE_LAYOUT_STANDARD);
break;
case AbstractPipelineUsage::GXUber:
pipeline_layout = g_object_cache->GetPipelineLayout(PIPELINE_LAYOUT_UBER);
break;
case AbstractPipelineUsage::Utility:
pipeline_layout = g_object_cache->GetPipelineLayout(PIPELINE_LAYOUT_UTILITY);
break;

View File

@ -62,7 +62,8 @@ bool VertexManager::Initialize()
return false;
m_vertex_stream_buffer =
StreamBuffer::Create(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, VERTEX_STREAM_BUFFER_SIZE);
StreamBuffer::Create(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
VERTEX_STREAM_BUFFER_SIZE);
m_index_stream_buffer =
StreamBuffer::Create(VK_BUFFER_USAGE_INDEX_BUFFER_BIT, INDEX_STREAM_BUFFER_SIZE);
m_uniform_stream_buffer =
@ -186,7 +187,8 @@ void VertexManager::CommitBuffer(u32 num_vertices, u32 vertex_stride, u32 num_in
ADDSTAT(g_stats.this_frame.bytes_vertex_streamed, static_cast<int>(vertex_data_size));
ADDSTAT(g_stats.this_frame.bytes_index_streamed, static_cast<int>(index_data_size));
StateTracker::GetInstance()->SetVertexBuffer(m_vertex_stream_buffer->GetBuffer(), 0);
StateTracker::GetInstance()->SetVertexBuffer(m_vertex_stream_buffer->GetBuffer(), 0,
VERTEX_STREAM_BUFFER_SIZE);
StateTracker::GetInstance()->SetIndexBuffer(m_index_stream_buffer->GetBuffer(), 0,
VK_INDEX_TYPE_UINT16);
}

View File

@ -295,7 +295,7 @@ void VulkanContext::PopulateBackendInfo(VideoConfig* config)
config->backend_info.bSupportsLodBiasInSampler = false; // Dependent on OS.
config->backend_info.bSupportsSettingObjectNames = false; // Dependent on features.
config->backend_info.bSupportsPartialMultisampleResolve = true; // Assumed support.
config->backend_info.bSupportsDynamicVertexLoader = false; // Not yet supported
config->backend_info.bSupportsDynamicVertexLoader = true; // Assumed support.
}
void VulkanContext::PopulateBackendInfoAdapters(VideoConfig* config, const GPUList& gpu_list)