2016-08-13 12:57:50 +00:00
|
|
|
// Copyright 2016 Dolphin Emulator Project
|
2021-07-05 01:22:19 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2016-10-01 03:07:50 +00:00
|
|
|
#include "VideoBackends/Vulkan/StateTracker.h"
|
|
|
|
|
2016-08-13 12:57:50 +00:00
|
|
|
#include "Common/Assert.h"
|
|
|
|
|
|
|
|
#include "VideoBackends/Vulkan/CommandBufferManager.h"
|
|
|
|
#include "VideoBackends/Vulkan/ObjectCache.h"
|
2023-01-28 02:12:28 +00:00
|
|
|
#include "VideoBackends/Vulkan/VKGfx.h"
|
2018-02-24 15:15:35 +00:00
|
|
|
#include "VideoBackends/Vulkan/VKPipeline.h"
|
2019-02-15 01:59:50 +00:00
|
|
|
#include "VideoBackends/Vulkan/VKShader.h"
|
|
|
|
#include "VideoBackends/Vulkan/VKTexture.h"
|
2020-09-15 12:43:41 +00:00
|
|
|
#include "VideoBackends/Vulkan/VKVertexFormat.h"
|
2016-08-13 12:57:50 +00:00
|
|
|
#include "VideoBackends/Vulkan/VulkanContext.h"
|
2023-02-10 06:28:06 +00:00
|
|
|
#include "VideoCommon/Constants.h"
|
2016-08-13 12:57:50 +00:00
|
|
|
|
|
|
|
namespace Vulkan
|
|
|
|
{
|
2016-10-22 10:50:36 +00:00
|
|
|
static std::unique_ptr<StateTracker> s_state_tracker;
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
StateTracker::StateTracker() = default;
|
|
|
|
|
|
|
|
StateTracker::~StateTracker() = default;
|
|
|
|
|
2016-10-22 10:50:36 +00:00
|
|
|
StateTracker* StateTracker::GetInstance()
|
|
|
|
{
|
|
|
|
return s_state_tracker.get();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool StateTracker::CreateInstance()
|
|
|
|
{
|
2018-03-15 00:34:35 +00:00
|
|
|
ASSERT(!s_state_tracker);
|
2016-10-22 10:50:36 +00:00
|
|
|
s_state_tracker = std::make_unique<StateTracker>();
|
|
|
|
if (!s_state_tracker->Initialize())
|
|
|
|
{
|
|
|
|
s_state_tracker.reset();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void StateTracker::DestroyInstance()
|
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (!s_state_tracker)
|
|
|
|
return;
|
|
|
|
|
|
|
|
// When the dummy texture is destroyed, it unbinds itself, then references itself.
|
|
|
|
// Clear everything out so this doesn't happen.
|
|
|
|
for (auto& it : s_state_tracker->m_bindings.samplers)
|
|
|
|
it.imageView = VK_NULL_HANDLE;
|
|
|
|
s_state_tracker->m_bindings.image_texture.imageView = VK_NULL_HANDLE;
|
|
|
|
s_state_tracker->m_dummy_texture.reset();
|
|
|
|
|
2016-10-22 10:50:36 +00:00
|
|
|
s_state_tracker.reset();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool StateTracker::Initialize()
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
// Create a dummy texture which can be used in place of a real binding.
|
|
|
|
m_dummy_texture =
|
2021-08-28 05:30:05 +00:00
|
|
|
VKTexture::Create(TextureConfig(1, 1, 1, 1, 1, AbstractTextureFormat::RGBA8, 0), "");
|
2019-02-15 01:59:50 +00:00
|
|
|
if (!m_dummy_texture)
|
|
|
|
return false;
|
2019-03-17 06:46:41 +00:00
|
|
|
m_dummy_texture->TransitionToLayout(g_command_buffer_mgr->GetCurrentInitCommandBuffer(),
|
|
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
2019-02-15 01:59:50 +00:00
|
|
|
|
2016-08-13 12:57:50 +00:00
|
|
|
// Initialize all samplers to point by default
|
2023-02-10 06:28:06 +00:00
|
|
|
for (size_t i = 0; i < VideoCommon::MAX_PIXEL_SHADER_SAMPLERS; i++)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-03-17 06:46:41 +00:00
|
|
|
m_bindings.samplers[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.samplers[i].imageView = m_dummy_texture->GetView();
|
|
|
|
m_bindings.samplers[i].sampler = g_object_cache->GetPointSampler();
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Default dirty flags include all descriptors
|
2019-02-15 01:59:50 +00:00
|
|
|
InvalidateCachedState();
|
2016-10-22 10:50:36 +00:00
|
|
|
return true;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2022-06-19 03:49:03 +00:00
|
|
|
void StateTracker::SetVertexBuffer(VkBuffer buffer, VkDeviceSize offset, u32 size)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2022-06-19 03:49:03 +00:00
|
|
|
if (m_vertex_buffer != buffer || m_vertex_buffer_offset != offset)
|
|
|
|
{
|
|
|
|
m_vertex_buffer = buffer;
|
|
|
|
m_vertex_buffer_offset = offset;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_VERTEX_BUFFER;
|
|
|
|
}
|
|
|
|
if (m_bindings.gx_uber_vertex_ssbo.buffer != buffer ||
|
|
|
|
m_bindings.gx_uber_vertex_ssbo.offset != offset ||
|
|
|
|
m_bindings.gx_uber_vertex_ssbo.range != size)
|
|
|
|
{
|
|
|
|
m_bindings.gx_uber_vertex_ssbo.buffer = buffer;
|
|
|
|
m_bindings.gx_uber_vertex_ssbo.offset = offset;
|
|
|
|
m_bindings.gx_uber_vertex_ssbo.range = size;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_GX_SSBO;
|
|
|
|
}
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void StateTracker::SetIndexBuffer(VkBuffer buffer, VkDeviceSize offset, VkIndexType type)
|
|
|
|
{
|
|
|
|
if (m_index_buffer == buffer && m_index_buffer_offset == offset && m_index_type == type)
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_index_buffer = buffer;
|
|
|
|
m_index_buffer_offset = offset;
|
|
|
|
m_index_type = type;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_INDEX_BUFFER;
|
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetFramebuffer(VKFramebuffer* framebuffer)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
|
|
|
// Should not be changed within a render pass.
|
2018-03-15 00:34:35 +00:00
|
|
|
ASSERT(!InRenderPass());
|
2016-08-13 12:57:50 +00:00
|
|
|
m_framebuffer = framebuffer;
|
|
|
|
}
|
|
|
|
|
2018-02-24 15:15:35 +00:00
|
|
|
void StateTracker::SetPipeline(const VKPipeline* pipeline)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2018-02-24 15:15:35 +00:00
|
|
|
if (m_pipeline == pipeline)
|
2016-08-13 12:57:50 +00:00
|
|
|
return;
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
// If the usage changes, we need to re-bind everything, as the layout is different.
|
2018-02-24 15:15:35 +00:00
|
|
|
const bool new_usage =
|
|
|
|
pipeline && (!m_pipeline || m_pipeline->GetUsage() != pipeline->GetUsage());
|
2017-07-20 05:25:35 +00:00
|
|
|
|
2018-02-24 15:15:35 +00:00
|
|
|
m_pipeline = pipeline;
|
2017-07-20 05:25:35 +00:00
|
|
|
m_dirty_flags |= DIRTY_FLAG_PIPELINE;
|
2018-02-24 15:15:35 +00:00
|
|
|
if (new_usage)
|
2019-02-15 01:59:50 +00:00
|
|
|
m_dirty_flags |= DIRTY_FLAG_DESCRIPTOR_SETS;
|
2017-07-20 05:25:35 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetComputeShader(const VKShader* shader)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_compute_shader == shader)
|
2016-08-13 12:57:50 +00:00
|
|
|
return;
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_compute_shader = shader;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_COMPUTE_SHADER;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetGXUniformBuffer(u32 index, VkBuffer buffer, u32 offset, u32 size)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
auto& binding = m_bindings.gx_ubo_bindings[index];
|
|
|
|
if (binding.buffer != buffer || binding.range != size)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
binding.buffer = buffer;
|
|
|
|
binding.range = size;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_GX_UBOS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.gx_ubo_offsets[index] != offset)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.gx_ubo_offsets[index] = offset;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_GX_UBO_OFFSETS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetUtilityUniformBuffer(VkBuffer buffer, u32 offset, u32 size)
|
2018-11-27 07:16:53 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
auto& binding = m_bindings.utility_ubo_binding;
|
|
|
|
if (binding.buffer != buffer || binding.range != size)
|
2018-11-27 07:16:53 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
binding.buffer = buffer;
|
|
|
|
binding.range = size;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_UTILITY_UBO;
|
2018-11-27 07:16:53 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.utility_ubo_offset != offset)
|
2018-11-27 07:16:53 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.utility_ubo_offset = offset;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_UTILITY_UBO_OFFSET | DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
|
2018-11-27 07:16:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetTexture(u32 index, VkImageView view)
|
2016-11-30 13:32:23 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.samplers[index].imageView == view)
|
|
|
|
return;
|
2016-11-30 13:32:23 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.samplers[index].imageView = view;
|
|
|
|
m_bindings.samplers[index].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
|
|
m_dirty_flags |=
|
|
|
|
DIRTY_FLAG_GX_SAMPLERS | DIRTY_FLAG_UTILITY_BINDINGS | DIRTY_FLAG_COMPUTE_BINDINGS;
|
2016-11-30 13:32:23 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetSampler(u32 index, VkSampler sampler)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.samplers[index].sampler == sampler)
|
2016-11-30 13:32:23 +00:00
|
|
|
return;
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.samplers[index].sampler = sampler;
|
|
|
|
m_dirty_flags |=
|
|
|
|
DIRTY_FLAG_GX_SAMPLERS | DIRTY_FLAG_UTILITY_BINDINGS | DIRTY_FLAG_COMPUTE_BINDINGS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetSSBO(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize range)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.ssbo.buffer == buffer && m_bindings.ssbo.offset == offset &&
|
|
|
|
m_bindings.ssbo.range == range)
|
|
|
|
{
|
2016-08-13 12:57:50 +00:00
|
|
|
return;
|
2019-02-15 01:59:50 +00:00
|
|
|
}
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.ssbo.buffer = buffer;
|
|
|
|
m_bindings.ssbo.offset = offset;
|
|
|
|
m_bindings.ssbo.range = range;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_GX_SSBO;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetTexelBuffer(u32 index, VkBufferView view)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.texel_buffers[index] == view)
|
2016-08-13 12:57:50 +00:00
|
|
|
return;
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.texel_buffers[index] = view;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_UTILITY_BINDINGS | DIRTY_FLAG_COMPUTE_BINDINGS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::SetImageTexture(VkImageView view)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.image_texture.imageView == view)
|
2016-08-13 12:57:50 +00:00
|
|
|
return;
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.image_texture.imageView = view;
|
|
|
|
m_bindings.image_texture.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_COMPUTE_BINDINGS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void StateTracker::UnbindTexture(VkImageView view)
|
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
for (VkDescriptorImageInfo& it : m_bindings.samplers)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
|
|
|
if (it.imageView == view)
|
2019-02-15 01:59:50 +00:00
|
|
|
{
|
|
|
|
it.imageView = m_dummy_texture->GetView();
|
2019-03-17 06:46:41 +00:00
|
|
|
it.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
2019-02-15 01:59:50 +00:00
|
|
|
}
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_bindings.image_texture.imageView == view)
|
|
|
|
{
|
|
|
|
m_bindings.image_texture.imageView = m_dummy_texture->GetView();
|
2019-03-17 06:46:41 +00:00
|
|
|
m_bindings.image_texture.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
2019-02-15 01:59:50 +00:00
|
|
|
}
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::InvalidateCachedState()
|
2016-11-30 12:31:41 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
m_gx_descriptor_sets.fill(VK_NULL_HANDLE);
|
|
|
|
m_utility_descriptor_sets.fill(VK_NULL_HANDLE);
|
|
|
|
m_compute_descriptor_set = VK_NULL_HANDLE;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_ALL_DESCRIPTORS | DIRTY_FLAG_VIEWPORT | DIRTY_FLAG_SCISSOR |
|
|
|
|
DIRTY_FLAG_PIPELINE | DIRTY_FLAG_COMPUTE_SHADER | DIRTY_FLAG_DESCRIPTOR_SETS |
|
|
|
|
DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
|
|
|
|
if (m_vertex_buffer != VK_NULL_HANDLE)
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_VERTEX_BUFFER;
|
|
|
|
if (m_index_buffer != VK_NULL_HANDLE)
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_INDEX_BUFFER;
|
2016-11-30 12:31:41 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::BeginRenderPass()
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (InRenderPass())
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_current_render_pass = m_framebuffer->GetLoadRenderPass();
|
|
|
|
m_framebuffer_render_area = m_framebuffer->GetRect();
|
|
|
|
|
|
|
|
VkRenderPassBeginInfo begin_info = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
|
|
|
|
nullptr,
|
|
|
|
m_current_render_pass,
|
|
|
|
m_framebuffer->GetFB(),
|
|
|
|
m_framebuffer_render_area,
|
|
|
|
0,
|
|
|
|
nullptr};
|
|
|
|
|
|
|
|
vkCmdBeginRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer(), &begin_info,
|
|
|
|
VK_SUBPASS_CONTENTS_INLINE);
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
void StateTracker::BeginDiscardRenderPass()
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2016-09-11 06:37:41 +00:00
|
|
|
if (InRenderPass())
|
2016-08-13 12:57:50 +00:00
|
|
|
return;
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_current_render_pass = m_framebuffer->GetDiscardRenderPass();
|
|
|
|
m_framebuffer_render_area = m_framebuffer->GetRect();
|
2016-09-11 06:37:41 +00:00
|
|
|
|
2016-08-13 12:57:50 +00:00
|
|
|
VkRenderPassBeginInfo begin_info = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
|
|
|
|
nullptr,
|
2016-09-11 06:37:41 +00:00
|
|
|
m_current_render_pass,
|
2019-02-15 01:59:50 +00:00
|
|
|
m_framebuffer->GetFB(),
|
2016-08-13 12:57:50 +00:00
|
|
|
m_framebuffer_render_area,
|
|
|
|
0,
|
|
|
|
nullptr};
|
|
|
|
|
|
|
|
vkCmdBeginRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer(), &begin_info,
|
|
|
|
VK_SUBPASS_CONTENTS_INLINE);
|
|
|
|
}
|
|
|
|
|
|
|
|
void StateTracker::EndRenderPass()
|
|
|
|
{
|
2016-09-11 06:37:41 +00:00
|
|
|
if (!InRenderPass())
|
2016-08-13 12:57:50 +00:00
|
|
|
return;
|
|
|
|
|
|
|
|
vkCmdEndRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer());
|
2016-10-01 00:40:44 +00:00
|
|
|
m_current_render_pass = VK_NULL_HANDLE;
|
2016-09-11 06:37:41 +00:00
|
|
|
}
|
|
|
|
|
2018-01-21 10:22:45 +00:00
|
|
|
void StateTracker::BeginClearRenderPass(const VkRect2D& area, const VkClearValue* clear_values,
|
|
|
|
u32 num_clear_values)
|
2016-09-11 06:37:41 +00:00
|
|
|
{
|
2018-03-15 00:34:35 +00:00
|
|
|
ASSERT(!InRenderPass());
|
2016-09-11 06:37:41 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_current_render_pass = m_framebuffer->GetClearRenderPass();
|
2016-09-11 06:37:41 +00:00
|
|
|
m_framebuffer_render_area = area;
|
|
|
|
|
|
|
|
VkRenderPassBeginInfo begin_info = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
|
|
|
|
nullptr,
|
|
|
|
m_current_render_pass,
|
2019-02-15 01:59:50 +00:00
|
|
|
m_framebuffer->GetFB(),
|
2016-09-11 06:37:41 +00:00
|
|
|
m_framebuffer_render_area,
|
2018-01-21 10:22:45 +00:00
|
|
|
num_clear_values,
|
2016-09-11 06:37:41 +00:00
|
|
|
clear_values};
|
|
|
|
|
|
|
|
vkCmdBeginRenderPass(g_command_buffer_mgr->GetCurrentCommandBuffer(), &begin_info,
|
|
|
|
VK_SUBPASS_CONTENTS_INLINE);
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void StateTracker::SetViewport(const VkViewport& viewport)
|
|
|
|
{
|
|
|
|
if (memcmp(&m_viewport, &viewport, sizeof(viewport)) == 0)
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_viewport = viewport;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_VIEWPORT;
|
|
|
|
}
|
|
|
|
|
|
|
|
void StateTracker::SetScissor(const VkRect2D& scissor)
|
|
|
|
{
|
|
|
|
if (memcmp(&m_scissor, &scissor, sizeof(scissor)) == 0)
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_scissor = scissor;
|
|
|
|
m_dirty_flags |= DIRTY_FLAG_SCISSOR;
|
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
bool StateTracker::Bind()
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2018-02-24 15:15:35 +00:00
|
|
|
// Must have a pipeline.
|
|
|
|
if (!m_pipeline)
|
|
|
|
return false;
|
|
|
|
|
2016-09-11 06:37:41 +00:00
|
|
|
// Check the render area if we were in a clear pass.
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_current_render_pass == m_framebuffer->GetClearRenderPass() && !IsViewportWithinRenderArea())
|
2016-09-11 06:37:41 +00:00
|
|
|
EndRenderPass();
|
|
|
|
|
2016-08-13 12:57:50 +00:00
|
|
|
// Get a new descriptor set if any parts have changed
|
2022-10-05 23:35:17 +00:00
|
|
|
UpdateDescriptorSet();
|
2016-08-13 12:57:50 +00:00
|
|
|
|
|
|
|
// Start render pass if not already started
|
2016-09-11 06:37:41 +00:00
|
|
|
if (!InRenderPass())
|
2016-08-13 12:57:50 +00:00
|
|
|
BeginRenderPass();
|
|
|
|
|
|
|
|
// Re-bind parts of the pipeline
|
2019-02-15 01:59:50 +00:00
|
|
|
const VkCommandBuffer command_buffer = g_command_buffer_mgr->GetCurrentCommandBuffer();
|
2022-07-11 01:01:38 +00:00
|
|
|
const bool needs_vertex_buffer = !g_ActiveConfig.backend_info.bSupportsDynamicVertexLoader ||
|
|
|
|
m_pipeline->GetUsage() != AbstractPipelineUsage::GXUber;
|
|
|
|
if (needs_vertex_buffer && (m_dirty_flags & DIRTY_FLAG_VERTEX_BUFFER))
|
2022-06-19 03:49:03 +00:00
|
|
|
{
|
2016-08-13 12:57:50 +00:00
|
|
|
vkCmdBindVertexBuffers(command_buffer, 0, 1, &m_vertex_buffer, &m_vertex_buffer_offset);
|
2022-06-19 03:49:03 +00:00
|
|
|
m_dirty_flags &= ~DIRTY_FLAG_VERTEX_BUFFER;
|
|
|
|
}
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_INDEX_BUFFER)
|
2016-08-13 12:57:50 +00:00
|
|
|
vkCmdBindIndexBuffer(command_buffer, m_index_buffer, m_index_buffer_offset, m_index_type);
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_PIPELINE)
|
2018-02-24 15:15:35 +00:00
|
|
|
vkCmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipeline());
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_VIEWPORT)
|
2016-08-13 12:57:50 +00:00
|
|
|
vkCmdSetViewport(command_buffer, 0, 1, &m_viewport);
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_SCISSOR)
|
2016-08-13 12:57:50 +00:00
|
|
|
vkCmdSetScissor(command_buffer, 0, 1, &m_scissor);
|
|
|
|
|
2022-06-19 03:49:03 +00:00
|
|
|
m_dirty_flags &=
|
|
|
|
~(DIRTY_FLAG_INDEX_BUFFER | DIRTY_FLAG_PIPELINE | DIRTY_FLAG_VIEWPORT | DIRTY_FLAG_SCISSOR);
|
2016-08-13 12:57:50 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
bool StateTracker::BindCompute()
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (!m_compute_shader)
|
|
|
|
return false;
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
// Can't kick compute in a render pass.
|
|
|
|
if (InRenderPass())
|
|
|
|
EndRenderPass();
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
const VkCommandBuffer command_buffer = g_command_buffer_mgr->GetCurrentCommandBuffer();
|
|
|
|
if (m_dirty_flags & DIRTY_FLAG_COMPUTE_SHADER)
|
2018-11-05 13:09:34 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
vkCmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
m_compute_shader->GetComputePipeline());
|
2018-11-05 13:09:34 +00:00
|
|
|
}
|
|
|
|
|
2022-10-05 23:35:17 +00:00
|
|
|
UpdateComputeDescriptorSet();
|
2019-02-15 01:59:50 +00:00
|
|
|
m_dirty_flags &= ~DIRTY_FLAG_COMPUTE_SHADER;
|
|
|
|
return true;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2016-09-11 06:37:41 +00:00
|
|
|
bool StateTracker::IsWithinRenderArea(s32 x, s32 y, u32 width, u32 height) const
|
|
|
|
{
|
|
|
|
// Check that the viewport does not lie outside the render area.
|
|
|
|
// If it does, we need to switch to a normal load/store render pass.
|
|
|
|
s32 left = m_framebuffer_render_area.offset.x;
|
|
|
|
s32 top = m_framebuffer_render_area.offset.y;
|
|
|
|
s32 right = left + static_cast<s32>(m_framebuffer_render_area.extent.width);
|
|
|
|
s32 bottom = top + static_cast<s32>(m_framebuffer_render_area.extent.height);
|
|
|
|
s32 test_left = x;
|
|
|
|
s32 test_top = y;
|
|
|
|
s32 test_right = test_left + static_cast<s32>(width);
|
|
|
|
s32 test_bottom = test_top + static_cast<s32>(height);
|
|
|
|
return test_left >= left && test_right <= right && test_top >= top && test_bottom <= bottom;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool StateTracker::IsViewportWithinRenderArea() const
|
|
|
|
{
|
|
|
|
return IsWithinRenderArea(static_cast<s32>(m_viewport.x), static_cast<s32>(m_viewport.y),
|
|
|
|
static_cast<u32>(m_viewport.width),
|
|
|
|
static_cast<u32>(m_viewport.height));
|
|
|
|
}
|
|
|
|
|
|
|
|
void StateTracker::EndClearRenderPass()
|
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_current_render_pass != m_framebuffer->GetClearRenderPass())
|
2016-09-11 06:37:41 +00:00
|
|
|
return;
|
|
|
|
|
|
|
|
// End clear render pass. Bind() will call BeginRenderPass() which
|
|
|
|
// will switch to the load/store render pass.
|
|
|
|
EndRenderPass();
|
|
|
|
}
|
|
|
|
|
2022-10-05 23:35:17 +00:00
|
|
|
void StateTracker::UpdateDescriptorSet()
|
2018-11-27 07:16:53 +00:00
|
|
|
{
|
2022-06-19 01:03:28 +00:00
|
|
|
if (m_pipeline->GetUsage() != AbstractPipelineUsage::Utility)
|
2022-10-05 23:35:17 +00:00
|
|
|
UpdateGXDescriptorSet();
|
2018-11-27 07:16:53 +00:00
|
|
|
else
|
2022-10-05 23:35:17 +00:00
|
|
|
UpdateUtilityDescriptorSet();
|
2018-11-27 07:16:53 +00:00
|
|
|
}
|
|
|
|
|
2022-10-05 23:35:17 +00:00
|
|
|
void StateTracker::UpdateGXDescriptorSet()
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
|
|
|
const size_t MAX_DESCRIPTOR_WRITES = NUM_UBO_DESCRIPTOR_SET_BINDINGS + // UBO
|
2017-07-20 05:25:35 +00:00
|
|
|
1 + // Samplers
|
2022-06-19 03:49:03 +00:00
|
|
|
2; // SSBO
|
2016-08-13 12:57:50 +00:00
|
|
|
std::array<VkWriteDescriptorSet, MAX_DESCRIPTOR_WRITES> writes;
|
|
|
|
u32 num_writes = 0;
|
|
|
|
|
2022-07-24 08:32:59 +00:00
|
|
|
const bool needs_gs_ubo = g_ActiveConfig.backend_info.bSupportsGeometryShaders ||
|
|
|
|
g_ActiveConfig.UseVSForLinePointExpand();
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_GX_UBOS || m_gx_descriptor_sets[0] == VK_NULL_HANDLE)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
m_gx_descriptor_sets[0] = g_command_buffer_mgr->AllocateDescriptorSet(
|
|
|
|
g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_STANDARD_UNIFORM_BUFFERS));
|
2016-08-13 12:57:50 +00:00
|
|
|
|
|
|
|
for (size_t i = 0; i < NUM_UBO_DESCRIPTOR_SET_BINDINGS; i++)
|
|
|
|
{
|
2022-07-24 08:32:59 +00:00
|
|
|
if (i == UBO_DESCRIPTOR_SET_BINDING_GS && !needs_gs_ubo)
|
2019-02-15 01:59:50 +00:00
|
|
|
{
|
2018-02-24 15:15:35 +00:00
|
|
|
continue;
|
2019-02-15 01:59:50 +00:00
|
|
|
}
|
2018-02-24 15:15:35 +00:00
|
|
|
|
2016-08-13 12:57:50 +00:00
|
|
|
writes[num_writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
2019-02-15 01:59:50 +00:00
|
|
|
m_gx_descriptor_sets[0],
|
2016-08-13 12:57:50 +00:00
|
|
|
static_cast<uint32_t>(i),
|
|
|
|
0,
|
|
|
|
1,
|
|
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
|
|
|
|
nullptr,
|
2019-02-15 01:59:50 +00:00
|
|
|
&m_bindings.gx_ubo_bindings[i],
|
2016-08-13 12:57:50 +00:00
|
|
|
nullptr};
|
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_UBOS) | DIRTY_FLAG_DESCRIPTOR_SETS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_GX_SAMPLERS || m_gx_descriptor_sets[1] == VK_NULL_HANDLE)
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
m_gx_descriptor_sets[1] = g_command_buffer_mgr->AllocateDescriptorSet(
|
|
|
|
g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_STANDARD_SAMPLERS));
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2017-07-20 05:25:35 +00:00
|
|
|
writes[num_writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
2019-02-15 01:59:50 +00:00
|
|
|
m_gx_descriptor_sets[1],
|
2017-07-20 05:25:35 +00:00
|
|
|
0,
|
|
|
|
0,
|
2023-02-10 06:28:06 +00:00
|
|
|
static_cast<u32>(VideoCommon::MAX_PIXEL_SHADER_SAMPLERS),
|
2017-07-20 05:25:35 +00:00
|
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.samplers.data(),
|
2017-07-20 05:25:35 +00:00
|
|
|
nullptr,
|
|
|
|
nullptr};
|
2019-02-15 01:59:50 +00:00
|
|
|
m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_SAMPLERS) | DIRTY_FLAG_DESCRIPTOR_SETS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
2022-06-19 03:49:03 +00:00
|
|
|
const bool needs_bbox_ssbo = g_ActiveConfig.backend_info.bSupportsBBox;
|
2022-07-24 08:32:59 +00:00
|
|
|
const bool needs_vertex_ssbo = (g_ActiveConfig.backend_info.bSupportsDynamicVertexLoader &&
|
|
|
|
m_pipeline->GetUsage() == AbstractPipelineUsage::GXUber) ||
|
|
|
|
g_ActiveConfig.UseVSForLinePointExpand();
|
2022-06-19 03:49:03 +00:00
|
|
|
const bool needs_ssbo = needs_bbox_ssbo || needs_vertex_ssbo;
|
|
|
|
|
|
|
|
if (needs_ssbo &&
|
2019-02-15 01:59:50 +00:00
|
|
|
(m_dirty_flags & DIRTY_FLAG_GX_SSBO || m_gx_descriptor_sets[2] == VK_NULL_HANDLE))
|
2016-08-13 12:57:50 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
m_gx_descriptor_sets[2] =
|
|
|
|
g_command_buffer_mgr->AllocateDescriptorSet(g_object_cache->GetDescriptorSetLayout(
|
|
|
|
DESCRIPTOR_SET_LAYOUT_STANDARD_SHADER_STORAGE_BUFFERS));
|
2016-08-13 12:57:50 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
writes[num_writes++] = {
|
|
|
|
VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, nullptr, m_gx_descriptor_sets[2], 0, 0, 1,
|
|
|
|
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, nullptr, &m_bindings.ssbo, nullptr};
|
2022-06-19 03:49:03 +00:00
|
|
|
|
2022-07-24 08:32:59 +00:00
|
|
|
if (g_ActiveConfig.backend_info.bSupportsDynamicVertexLoader ||
|
|
|
|
g_ActiveConfig.UseVSForLinePointExpand())
|
2022-07-11 01:01:38 +00:00
|
|
|
{
|
|
|
|
writes[num_writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_gx_descriptor_sets[2],
|
|
|
|
1,
|
|
|
|
0,
|
|
|
|
1,
|
|
|
|
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
|
|
|
|
nullptr,
|
|
|
|
&m_bindings.gx_uber_vertex_ssbo,
|
|
|
|
nullptr};
|
|
|
|
}
|
2022-06-19 03:49:03 +00:00
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_GX_SSBO) | DIRTY_FLAG_DESCRIPTOR_SETS;
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (num_writes > 0)
|
|
|
|
vkUpdateDescriptorSets(g_vulkan_context->GetDevice(), num_writes, writes.data(), 0, nullptr);
|
|
|
|
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_DESCRIPTOR_SETS)
|
|
|
|
{
|
|
|
|
vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
|
|
|
|
VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
|
2022-06-19 03:49:03 +00:00
|
|
|
needs_ssbo ? NUM_GX_DESCRIPTOR_SETS : (NUM_GX_DESCRIPTOR_SETS - 1),
|
2020-10-04 22:09:30 +00:00
|
|
|
m_gx_descriptor_sets.data(),
|
2022-07-24 08:32:59 +00:00
|
|
|
needs_gs_ubo ? NUM_UBO_DESCRIPTOR_SET_BINDINGS :
|
|
|
|
(NUM_UBO_DESCRIPTOR_SET_BINDINGS - 1),
|
2019-02-15 01:59:50 +00:00
|
|
|
m_bindings.gx_ubo_offsets.data());
|
|
|
|
m_dirty_flags &= ~(DIRTY_FLAG_DESCRIPTOR_SETS | DIRTY_FLAG_GX_UBO_OFFSETS);
|
|
|
|
}
|
|
|
|
else if (m_dirty_flags & DIRTY_FLAG_GX_UBO_OFFSETS)
|
|
|
|
{
|
2022-07-24 08:32:59 +00:00
|
|
|
vkCmdBindDescriptorSets(
|
|
|
|
g_command_buffer_mgr->GetCurrentCommandBuffer(), VK_PIPELINE_BIND_POINT_GRAPHICS,
|
|
|
|
m_pipeline->GetVkPipelineLayout(), 0, 1, m_gx_descriptor_sets.data(),
|
|
|
|
needs_gs_ubo ? NUM_UBO_DESCRIPTOR_SET_BINDINGS : (NUM_UBO_DESCRIPTOR_SET_BINDINGS - 1),
|
|
|
|
m_bindings.gx_ubo_offsets.data());
|
2019-02-15 01:59:50 +00:00
|
|
|
m_dirty_flags &= ~DIRTY_FLAG_GX_UBO_OFFSETS;
|
|
|
|
}
|
2018-11-27 07:16:53 +00:00
|
|
|
}
|
|
|
|
|
2022-10-05 23:35:17 +00:00
|
|
|
void StateTracker::UpdateUtilityDescriptorSet()
|
2018-11-27 07:16:53 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
// Max number of updates - UBO, Samplers, TexelBuffer
|
|
|
|
std::array<VkWriteDescriptorSet, 3> dswrites;
|
|
|
|
u32 writes = 0;
|
|
|
|
|
2018-11-27 07:16:53 +00:00
|
|
|
// Allocate descriptor sets.
|
2019-02-15 01:59:50 +00:00
|
|
|
if (m_dirty_flags & DIRTY_FLAG_UTILITY_UBO || m_utility_descriptor_sets[0] == VK_NULL_HANDLE)
|
2018-11-27 07:16:53 +00:00
|
|
|
{
|
2019-02-15 01:59:50 +00:00
|
|
|
m_utility_descriptor_sets[0] = g_command_buffer_mgr->AllocateDescriptorSet(
|
|
|
|
g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_UTILITY_UNIFORM_BUFFER));
|
|
|
|
|
|
|
|
dswrites[writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_utility_descriptor_sets[0],
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
1,
|
|
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
|
|
|
|
nullptr,
|
|
|
|
&m_bindings.utility_ubo_binding,
|
|
|
|
nullptr};
|
|
|
|
|
|
|
|
m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_UTILITY_UBO) | DIRTY_FLAG_DESCRIPTOR_SETS;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (m_dirty_flags & DIRTY_FLAG_UTILITY_BINDINGS || m_utility_descriptor_sets[1] == VK_NULL_HANDLE)
|
|
|
|
{
|
|
|
|
m_utility_descriptor_sets[1] = g_command_buffer_mgr->AllocateDescriptorSet(
|
|
|
|
g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_UTILITY_SAMPLERS));
|
|
|
|
|
|
|
|
dswrites[writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_utility_descriptor_sets[1],
|
|
|
|
0,
|
|
|
|
0,
|
2023-02-10 06:28:06 +00:00
|
|
|
NUM_UTILITY_PIXEL_SAMPLERS,
|
2019-02-15 01:59:50 +00:00
|
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
|
|
m_bindings.samplers.data(),
|
|
|
|
nullptr,
|
|
|
|
nullptr};
|
|
|
|
dswrites[writes++] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_utility_descriptor_sets[1],
|
|
|
|
8,
|
|
|
|
0,
|
|
|
|
1,
|
|
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
m_bindings.texel_buffers.data()};
|
|
|
|
|
|
|
|
m_dirty_flags = (m_dirty_flags & ~DIRTY_FLAG_UTILITY_BINDINGS) | DIRTY_FLAG_DESCRIPTOR_SETS;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (writes > 0)
|
|
|
|
vkUpdateDescriptorSets(g_vulkan_context->GetDevice(), writes, dswrites.data(), 0, nullptr);
|
|
|
|
|
|
|
|
if (m_dirty_flags & DIRTY_FLAG_DESCRIPTOR_SETS)
|
|
|
|
{
|
|
|
|
vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
|
|
|
|
VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
|
|
|
|
NUM_UTILITY_DESCRIPTOR_SETS, m_utility_descriptor_sets.data(), 1,
|
|
|
|
&m_bindings.utility_ubo_offset);
|
|
|
|
m_dirty_flags &= ~(DIRTY_FLAG_DESCRIPTOR_SETS | DIRTY_FLAG_UTILITY_UBO_OFFSET);
|
|
|
|
}
|
|
|
|
else if (m_dirty_flags & DIRTY_FLAG_UTILITY_UBO_OFFSET)
|
|
|
|
{
|
|
|
|
vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
|
|
|
|
VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline->GetVkPipelineLayout(), 0,
|
|
|
|
1, m_utility_descriptor_sets.data(), 1, &m_bindings.utility_ubo_offset);
|
|
|
|
m_dirty_flags &= ~(DIRTY_FLAG_DESCRIPTOR_SETS | DIRTY_FLAG_UTILITY_UBO_OFFSET);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-05 23:35:17 +00:00
|
|
|
void StateTracker::UpdateComputeDescriptorSet()
|
2019-02-15 01:59:50 +00:00
|
|
|
{
|
|
|
|
// Max number of updates - UBO, Samplers, TexelBuffer, Image
|
|
|
|
std::array<VkWriteDescriptorSet, 4> dswrites;
|
|
|
|
|
|
|
|
// Allocate descriptor sets.
|
|
|
|
if (m_dirty_flags & DIRTY_FLAG_COMPUTE_BINDINGS)
|
|
|
|
{
|
|
|
|
m_compute_descriptor_set = g_command_buffer_mgr->AllocateDescriptorSet(
|
|
|
|
g_object_cache->GetDescriptorSetLayout(DESCRIPTOR_SET_LAYOUT_COMPUTE));
|
|
|
|
dswrites[0] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_compute_descriptor_set,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
1,
|
|
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
|
|
|
|
nullptr,
|
|
|
|
&m_bindings.utility_ubo_binding,
|
|
|
|
nullptr};
|
|
|
|
dswrites[1] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_compute_descriptor_set,
|
|
|
|
1,
|
|
|
|
0,
|
|
|
|
NUM_COMPUTE_SHADER_SAMPLERS,
|
|
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
|
|
m_bindings.samplers.data(),
|
|
|
|
nullptr,
|
|
|
|
nullptr};
|
|
|
|
dswrites[2] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_compute_descriptor_set,
|
|
|
|
3,
|
|
|
|
0,
|
|
|
|
NUM_COMPUTE_TEXEL_BUFFERS,
|
|
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
m_bindings.texel_buffers.data()};
|
|
|
|
dswrites[3] = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
|
|
nullptr,
|
|
|
|
m_compute_descriptor_set,
|
|
|
|
5,
|
|
|
|
0,
|
|
|
|
1,
|
|
|
|
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
|
|
|
|
&m_bindings.image_texture,
|
|
|
|
nullptr,
|
|
|
|
nullptr};
|
|
|
|
|
|
|
|
vkUpdateDescriptorSets(g_vulkan_context->GetDevice(), static_cast<uint32_t>(dswrites.size()),
|
|
|
|
dswrites.data(), 0, nullptr);
|
|
|
|
m_dirty_flags =
|
|
|
|
(m_dirty_flags & ~DIRTY_FLAG_COMPUTE_BINDINGS) | DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (m_dirty_flags & DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET)
|
|
|
|
{
|
|
|
|
vkCmdBindDescriptorSets(g_command_buffer_mgr->GetCurrentCommandBuffer(),
|
|
|
|
VK_PIPELINE_BIND_POINT_COMPUTE,
|
|
|
|
g_object_cache->GetPipelineLayout(PIPELINE_LAYOUT_COMPUTE), 0, 1,
|
|
|
|
&m_compute_descriptor_set, 1, &m_bindings.utility_ubo_offset);
|
|
|
|
m_dirty_flags &= ~DIRTY_FLAG_COMPUTE_DESCRIPTOR_SET;
|
2018-11-27 07:16:53 +00:00
|
|
|
}
|
2016-08-13 12:57:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Vulkan
|