[Vulkan] Merge texture and sampler descriptors into a single descriptor set
Put all descriptors used by translated shaders in up to 4 descriptor sets, which is the minimum required, and the most common on Android, `maxBoundDescriptorSets` device limit value
This commit is contained in:
parent
ff35a4b3a1
commit
b3edc56576
|
@ -642,6 +642,16 @@ std::vector<uint8_t> SpirvShaderTranslator::CompleteTranslation() {
|
|||
entry_point->addIdOperand(interface_id);
|
||||
}
|
||||
|
||||
// Specify the binding indices for samplers when the number of textures is
|
||||
// known, as samplers are located after images in the texture descriptor set.
|
||||
size_t texture_binding_count = texture_bindings_.size();
|
||||
size_t sampler_binding_count = sampler_bindings_.size();
|
||||
for (size_t i = 0; i < sampler_binding_count; ++i) {
|
||||
builder_->addDecoration(sampler_bindings_[i].variable,
|
||||
spv::DecorationBinding,
|
||||
int(texture_binding_count + i));
|
||||
}
|
||||
|
||||
// TODO(Triang3l): Avoid copy?
|
||||
std::vector<unsigned int> module_uints;
|
||||
builder_->dump(module_uints);
|
||||
|
|
|
@ -176,14 +176,21 @@ class SpirvShaderTranslator : public ShaderTranslator {
|
|||
kDescriptorSetMutableLayoutsStart,
|
||||
|
||||
// Rarely used at all, but may be changed at an unpredictable rate when
|
||||
// vertex textures are used.
|
||||
kDescriptorSetSamplersVertex = kDescriptorSetMutableLayoutsStart,
|
||||
kDescriptorSetTexturesVertex,
|
||||
// vertex textures are used (for example, for bones of an object, which may
|
||||
// consist of multiple draw commands with different materials).
|
||||
kDescriptorSetTexturesVertex = kDescriptorSetMutableLayoutsStart,
|
||||
// Per-material textures.
|
||||
kDescriptorSetSamplersPixel,
|
||||
kDescriptorSetTexturesPixel,
|
||||
|
||||
kDescriptorSetCount,
|
||||
};
|
||||
static_assert(
|
||||
kDescriptorSetCount <= 4,
|
||||
"The number of descriptor sets used by translated shaders must be within "
|
||||
"the minimum Vulkan maxBoundDescriptorSets requirement of 4, which is "
|
||||
"the limit on most GPUs used in Android devices - Arm Mali, Imagination "
|
||||
"PowerVR, Qualcomm Adreno 6xx and older, as well as on old PC Nvidia "
|
||||
"drivers");
|
||||
|
||||
// "Xenia Emulator Microcode Translator".
|
||||
// https://github.com/KhronosGroup/SPIRV-Headers/blob/c43a43c7cc3af55910b9bec2a71e3e8a622443cf/include/spirv/spir-v.xml#L79
|
||||
|
|
|
@ -2573,10 +2573,10 @@ size_t SpirvShaderTranslator::FindOrAddSamplerBinding(
|
|||
builder_->makeSamplerType(), name.str().c_str());
|
||||
builder_->addDecoration(
|
||||
new_sampler_binding.variable, spv::DecorationDescriptorSet,
|
||||
int(is_vertex_shader() ? kDescriptorSetSamplersVertex
|
||||
: kDescriptorSetSamplersPixel));
|
||||
builder_->addDecoration(new_sampler_binding.variable, spv::DecorationBinding,
|
||||
int(new_sampler_binding_index));
|
||||
int(is_vertex_shader() ? kDescriptorSetTexturesVertex
|
||||
: kDescriptorSetTexturesPixel));
|
||||
// The binding indices will be specified later after all textures are added as
|
||||
// samplers are located after images in the descriptor set.
|
||||
if (features_.spirv_version >= spv::Spv_1_4) {
|
||||
main_interface_.push_back(new_sampler_binding.variable);
|
||||
}
|
||||
|
|
|
@ -49,6 +49,24 @@ namespace shaders {
|
|||
#include "xenia/gpu/shaders/bytecode/vulkan_spirv/fullscreen_cw_vs.h"
|
||||
} // namespace shaders
|
||||
|
||||
const VkDescriptorPoolSize
|
||||
VulkanCommandProcessor::kDescriptorPoolSizeUniformBuffer = {
|
||||
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
||||
SpirvShaderTranslator::kConstantBufferCount*
|
||||
kLinkedTypeDescriptorPoolSetCount};
|
||||
|
||||
const VkDescriptorPoolSize
|
||||
VulkanCommandProcessor::kDescriptorPoolSizeStorageBuffer = {
|
||||
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, kLinkedTypeDescriptorPoolSetCount};
|
||||
|
||||
// 2x descriptors for texture images because of unsigned and signed bindings.
|
||||
const VkDescriptorPoolSize
|
||||
VulkanCommandProcessor::kDescriptorPoolSizeTextures[2] = {
|
||||
{VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
|
||||
2 * kLinkedTypeDescriptorPoolSetCount},
|
||||
{VK_DESCRIPTOR_TYPE_SAMPLER, kLinkedTypeDescriptorPoolSetCount},
|
||||
};
|
||||
|
||||
// No specific reason for 32768 descriptors, just the "too much" amount from
|
||||
// Direct3D 12 PIX warnings. 2x descriptors for textures because of unsigned and
|
||||
// signed bindings.
|
||||
|
@ -59,20 +77,19 @@ VulkanCommandProcessor::VulkanCommandProcessor(
|
|||
transient_descriptor_allocator_uniform_buffer_(
|
||||
*static_cast<const ui::vulkan::VulkanProvider*>(
|
||||
graphics_system->provider()),
|
||||
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
||||
SpirvShaderTranslator::kConstantBufferCount * 32768, 32768),
|
||||
&kDescriptorPoolSizeUniformBuffer, 1,
|
||||
kLinkedTypeDescriptorPoolSetCount),
|
||||
transient_descriptor_allocator_storage_buffer_(
|
||||
*static_cast<const ui::vulkan::VulkanProvider*>(
|
||||
graphics_system->provider()),
|
||||
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 32768, 32768),
|
||||
transient_descriptor_allocator_sampled_image_(
|
||||
&kDescriptorPoolSizeStorageBuffer, 1,
|
||||
kLinkedTypeDescriptorPoolSetCount),
|
||||
transient_descriptor_allocator_textures_(
|
||||
*static_cast<const ui::vulkan::VulkanProvider*>(
|
||||
graphics_system->provider()),
|
||||
VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 2 * 32768, 32768),
|
||||
transient_descriptor_allocator_sampler_(
|
||||
*static_cast<const ui::vulkan::VulkanProvider*>(
|
||||
graphics_system->provider()),
|
||||
VK_DESCRIPTOR_TYPE_SAMPLER, 32768, 32768) {}
|
||||
kDescriptorPoolSizeTextures,
|
||||
uint32_t(xe::countof(kDescriptorPoolSizeTextures)),
|
||||
kLinkedTypeDescriptorPoolSetCount) {}
|
||||
|
||||
VulkanCommandProcessor::~VulkanCommandProcessor() = default;
|
||||
|
||||
|
@ -1735,14 +1752,21 @@ VkDescriptorSet VulkanCommandProcessor::AllocateSingleTransientDescriptor(
|
|||
const ui::vulkan::VulkanProvider& provider = GetVulkanProvider();
|
||||
const ui::vulkan::VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||
VkDevice device = provider.device();
|
||||
ui::vulkan::SingleTypeDescriptorSetAllocator&
|
||||
transfer_descriptor_allocator =
|
||||
transient_descriptor_layout ==
|
||||
SingleTransientDescriptorLayout::kStorageBufferCompute
|
||||
? transient_descriptor_allocator_storage_buffer_
|
||||
: transient_descriptor_allocator_uniform_buffer_;
|
||||
descriptor_set = transfer_descriptor_allocator.Allocate(
|
||||
GetSingleTransientDescriptorLayout(transient_descriptor_layout), 1);
|
||||
bool is_storage_buffer =
|
||||
transient_descriptor_layout ==
|
||||
SingleTransientDescriptorLayout::kStorageBufferCompute;
|
||||
ui::vulkan::LinkedTypeDescriptorSetAllocator&
|
||||
transient_descriptor_allocator =
|
||||
is_storage_buffer ? transient_descriptor_allocator_storage_buffer_
|
||||
: transient_descriptor_allocator_uniform_buffer_;
|
||||
VkDescriptorPoolSize descriptor_count;
|
||||
descriptor_count.type = is_storage_buffer
|
||||
? VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
|
||||
: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
|
||||
descriptor_count.descriptorCount = 1;
|
||||
descriptor_set = transient_descriptor_allocator.Allocate(
|
||||
GetSingleTransientDescriptorLayout(transient_descriptor_layout),
|
||||
&descriptor_count, 1);
|
||||
if (descriptor_set == VK_NULL_HANDLE) {
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
|
@ -1756,15 +1780,16 @@ VkDescriptorSet VulkanCommandProcessor::AllocateSingleTransientDescriptor(
|
|||
}
|
||||
|
||||
VkDescriptorSetLayout VulkanCommandProcessor::GetTextureDescriptorSetLayout(
|
||||
bool is_samplers, bool is_vertex, size_t binding_count) {
|
||||
bool is_vertex, size_t texture_count, size_t sampler_count) {
|
||||
size_t binding_count = texture_count + sampler_count;
|
||||
if (!binding_count) {
|
||||
return descriptor_set_layout_empty_;
|
||||
}
|
||||
|
||||
TextureDescriptorSetLayoutKey texture_descriptor_set_layout_key;
|
||||
texture_descriptor_set_layout_key.is_samplers = uint32_t(is_samplers);
|
||||
texture_descriptor_set_layout_key.texture_count = uint32_t(texture_count);
|
||||
texture_descriptor_set_layout_key.sampler_count = uint32_t(sampler_count);
|
||||
texture_descriptor_set_layout_key.is_vertex = uint32_t(is_vertex);
|
||||
texture_descriptor_set_layout_key.binding_count = uint32_t(binding_count);
|
||||
auto it_existing =
|
||||
descriptor_set_layouts_textures_.find(texture_descriptor_set_layout_key);
|
||||
if (it_existing != descriptor_set_layouts_textures_.end()) {
|
||||
|
@ -1777,16 +1802,22 @@ VkDescriptorSetLayout VulkanCommandProcessor::GetTextureDescriptorSetLayout(
|
|||
|
||||
descriptor_set_layout_bindings_.clear();
|
||||
descriptor_set_layout_bindings_.reserve(binding_count);
|
||||
VkDescriptorType descriptor_type = is_samplers
|
||||
? VK_DESCRIPTOR_TYPE_SAMPLER
|
||||
: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
||||
VkShaderStageFlags stage_flags =
|
||||
is_vertex ? guest_shader_vertex_stages_ : VK_SHADER_STAGE_FRAGMENT_BIT;
|
||||
for (size_t i = 0; i < binding_count; ++i) {
|
||||
for (size_t i = 0; i < texture_count; ++i) {
|
||||
VkDescriptorSetLayoutBinding& descriptor_set_layout_binding =
|
||||
descriptor_set_layout_bindings_.emplace_back();
|
||||
descriptor_set_layout_binding.binding = uint32_t(i);
|
||||
descriptor_set_layout_binding.descriptorType = descriptor_type;
|
||||
descriptor_set_layout_binding.descriptorType =
|
||||
VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
||||
descriptor_set_layout_binding.descriptorCount = 1;
|
||||
descriptor_set_layout_binding.stageFlags = stage_flags;
|
||||
}
|
||||
for (size_t i = 0; i < sampler_count; ++i) {
|
||||
VkDescriptorSetLayoutBinding& descriptor_set_layout_binding =
|
||||
descriptor_set_layout_bindings_.emplace_back();
|
||||
descriptor_set_layout_binding.binding = uint32_t(texture_count + i);
|
||||
descriptor_set_layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
|
||||
descriptor_set_layout_binding.descriptorCount = 1;
|
||||
descriptor_set_layout_binding.stageFlags = stage_flags;
|
||||
}
|
||||
|
@ -1826,40 +1857,24 @@ VulkanCommandProcessor::GetPipelineLayout(size_t texture_count_pixel,
|
|||
}
|
||||
}
|
||||
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_pixel =
|
||||
GetTextureDescriptorSetLayout(false, false, texture_count_pixel);
|
||||
if (descriptor_set_layout_textures_pixel == VK_NULL_HANDLE) {
|
||||
XELOGE(
|
||||
"Failed to obtain a Vulkan descriptor set layout for {} sampled images "
|
||||
"for guest pixel shaders",
|
||||
texture_count_pixel);
|
||||
return nullptr;
|
||||
}
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_pixel =
|
||||
GetTextureDescriptorSetLayout(true, false, sampler_count_pixel);
|
||||
if (descriptor_set_layout_samplers_pixel == VK_NULL_HANDLE) {
|
||||
XELOGE(
|
||||
"Failed to obtain a Vulkan descriptor set layout for {} samplers for "
|
||||
"guest pixel shaders",
|
||||
sampler_count_pixel);
|
||||
return nullptr;
|
||||
}
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_vertex =
|
||||
GetTextureDescriptorSetLayout(false, true, texture_count_vertex);
|
||||
GetTextureDescriptorSetLayout(true, texture_count_vertex,
|
||||
sampler_count_vertex);
|
||||
if (descriptor_set_layout_textures_vertex == VK_NULL_HANDLE) {
|
||||
XELOGE(
|
||||
"Failed to obtain a Vulkan descriptor set layout for {} sampled images "
|
||||
"for guest vertex shaders",
|
||||
texture_count_vertex);
|
||||
"and {} samplers for guest vertex shaders",
|
||||
texture_count_vertex, sampler_count_vertex);
|
||||
return nullptr;
|
||||
}
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_vertex =
|
||||
GetTextureDescriptorSetLayout(true, true, sampler_count_vertex);
|
||||
if (descriptor_set_layout_samplers_vertex == VK_NULL_HANDLE) {
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_pixel =
|
||||
GetTextureDescriptorSetLayout(false, texture_count_pixel,
|
||||
sampler_count_pixel);
|
||||
if (descriptor_set_layout_textures_pixel == VK_NULL_HANDLE) {
|
||||
XELOGE(
|
||||
"Failed to obtain a Vulkan descriptor set layout for {} samplers for "
|
||||
"guest vertex shaders",
|
||||
sampler_count_vertex);
|
||||
"Failed to obtain a Vulkan descriptor set layout for {} sampled images "
|
||||
"and {} samplers for guest pixel shaders",
|
||||
texture_count_pixel, sampler_count_pixel);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
@ -1872,12 +1887,8 @@ VulkanCommandProcessor::GetPipelineLayout(size_t texture_count_pixel,
|
|||
descriptor_set_layouts[SpirvShaderTranslator::kDescriptorSetConstants] =
|
||||
descriptor_set_layout_constants_;
|
||||
// Mutable layouts.
|
||||
descriptor_set_layouts[SpirvShaderTranslator::kDescriptorSetSamplersVertex] =
|
||||
descriptor_set_layout_samplers_vertex;
|
||||
descriptor_set_layouts[SpirvShaderTranslator::kDescriptorSetTexturesVertex] =
|
||||
descriptor_set_layout_textures_vertex;
|
||||
descriptor_set_layouts[SpirvShaderTranslator::kDescriptorSetSamplersPixel] =
|
||||
descriptor_set_layout_samplers_pixel;
|
||||
descriptor_set_layouts[SpirvShaderTranslator::kDescriptorSetTexturesPixel] =
|
||||
descriptor_set_layout_textures_pixel;
|
||||
|
||||
|
@ -1908,9 +1919,7 @@ VulkanCommandProcessor::GetPipelineLayout(size_t texture_count_pixel,
|
|||
std::piecewise_construct, std::forward_as_tuple(pipeline_layout_key),
|
||||
std::forward_as_tuple(pipeline_layout,
|
||||
descriptor_set_layout_textures_vertex,
|
||||
descriptor_set_layout_samplers_vertex,
|
||||
descriptor_set_layout_textures_pixel,
|
||||
descriptor_set_layout_samplers_pixel));
|
||||
descriptor_set_layout_textures_pixel));
|
||||
// unordered_map insertion doesn't invalidate element references.
|
||||
return &emplaced_pair.first->second;
|
||||
}
|
||||
|
@ -2309,13 +2318,6 @@ bool VulkanCommandProcessor::IssueDraw(xenos::PrimitiveType prim_type,
|
|||
// set N if set layouts 0 through N are compatible).
|
||||
uint32_t descriptor_sets_kept =
|
||||
uint32_t(SpirvShaderTranslator::kDescriptorSetCount);
|
||||
if (current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_samplers_vertex_ref() !=
|
||||
pipeline_layout->descriptor_set_layout_samplers_vertex_ref()) {
|
||||
descriptor_sets_kept = std::min(
|
||||
descriptor_sets_kept,
|
||||
uint32_t(SpirvShaderTranslator::kDescriptorSetSamplersVertex));
|
||||
}
|
||||
if (current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_textures_vertex_ref() !=
|
||||
pipeline_layout->descriptor_set_layout_textures_vertex_ref()) {
|
||||
|
@ -2323,13 +2325,6 @@ bool VulkanCommandProcessor::IssueDraw(xenos::PrimitiveType prim_type,
|
|||
descriptor_sets_kept,
|
||||
uint32_t(SpirvShaderTranslator::kDescriptorSetTexturesVertex));
|
||||
}
|
||||
if (current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_samplers_pixel_ref() !=
|
||||
pipeline_layout->descriptor_set_layout_samplers_pixel_ref()) {
|
||||
descriptor_sets_kept = std::min(
|
||||
descriptor_sets_kept,
|
||||
uint32_t(SpirvShaderTranslator::kDescriptorSetSamplersPixel));
|
||||
}
|
||||
if (current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_textures_pixel_ref() !=
|
||||
pipeline_layout->descriptor_set_layout_textures_pixel_ref()) {
|
||||
|
@ -3063,8 +3058,7 @@ bool VulkanCommandProcessor::EndSubmission(bool is_swap) {
|
|||
void VulkanCommandProcessor::ClearTransientDescriptorPools() {
|
||||
texture_transient_descriptor_sets_free_.clear();
|
||||
texture_transient_descriptor_sets_used_.clear();
|
||||
transient_descriptor_allocator_sampler_.Reset();
|
||||
transient_descriptor_allocator_sampled_image_.Reset();
|
||||
transient_descriptor_allocator_textures_.Reset();
|
||||
|
||||
constants_transient_descriptors_free_.clear();
|
||||
constants_transient_descriptors_used_.clear();
|
||||
|
@ -3719,9 +3713,7 @@ bool VulkanCommandProcessor::UpdateBindings(const VulkanShader* vertex_shader,
|
|||
}
|
||||
// TODO(Triang3l): Reuse texture and sampler bindings if not changed.
|
||||
current_graphics_descriptor_set_values_up_to_date_ &=
|
||||
~((UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersVertex) |
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesVertex) |
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersPixel) |
|
||||
~((UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesVertex) |
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesPixel));
|
||||
|
||||
// Make sure new descriptor sets are bound to the command buffer.
|
||||
|
@ -3731,39 +3723,21 @@ bool VulkanCommandProcessor::UpdateBindings(const VulkanShader* vertex_shader,
|
|||
|
||||
// Fill the texture and sampler write image infos.
|
||||
|
||||
bool write_vertex_samplers =
|
||||
sampler_count_vertex &&
|
||||
!(current_graphics_descriptor_set_values_up_to_date_ &
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersVertex));
|
||||
bool write_vertex_textures =
|
||||
texture_count_vertex &&
|
||||
(texture_count_vertex || sampler_count_vertex) &&
|
||||
!(current_graphics_descriptor_set_values_up_to_date_ &
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesVertex));
|
||||
bool write_pixel_samplers =
|
||||
sampler_count_pixel &&
|
||||
!(current_graphics_descriptor_set_values_up_to_date_ &
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersPixel));
|
||||
bool write_pixel_textures =
|
||||
texture_count_pixel &&
|
||||
(texture_count_pixel || sampler_count_pixel) &&
|
||||
!(current_graphics_descriptor_set_values_up_to_date_ &
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesPixel));
|
||||
descriptor_write_image_info_.clear();
|
||||
descriptor_write_image_info_.reserve(
|
||||
(write_vertex_samplers ? sampler_count_vertex : 0) +
|
||||
(write_vertex_textures ? texture_count_vertex : 0) +
|
||||
(write_pixel_samplers ? sampler_count_pixel : 0) +
|
||||
(write_pixel_textures ? texture_count_pixel : 0));
|
||||
size_t vertex_sampler_image_info_offset = descriptor_write_image_info_.size();
|
||||
if (write_vertex_samplers) {
|
||||
for (const std::pair<VulkanTextureCache::SamplerParameters, VkSampler>&
|
||||
sampler_pair : current_samplers_vertex_) {
|
||||
VkDescriptorImageInfo& descriptor_image_info =
|
||||
descriptor_write_image_info_.emplace_back();
|
||||
descriptor_image_info.sampler = sampler_pair.second;
|
||||
}
|
||||
}
|
||||
(write_vertex_textures ? texture_count_vertex + sampler_count_vertex
|
||||
: 0) +
|
||||
(write_pixel_textures ? texture_count_pixel + sampler_count_pixel : 0));
|
||||
size_t vertex_texture_image_info_offset = descriptor_write_image_info_.size();
|
||||
if (write_vertex_textures) {
|
||||
if (write_vertex_textures && texture_count_vertex) {
|
||||
for (const VulkanShader::TextureBinding& texture_binding :
|
||||
textures_vertex) {
|
||||
VkDescriptorImageInfo& descriptor_image_info =
|
||||
|
@ -3776,17 +3750,17 @@ bool VulkanCommandProcessor::UpdateBindings(const VulkanShader* vertex_shader,
|
|||
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
||||
}
|
||||
}
|
||||
size_t pixel_sampler_image_info_offset = descriptor_write_image_info_.size();
|
||||
if (write_pixel_samplers) {
|
||||
size_t vertex_sampler_image_info_offset = descriptor_write_image_info_.size();
|
||||
if (write_vertex_textures && sampler_count_vertex) {
|
||||
for (const std::pair<VulkanTextureCache::SamplerParameters, VkSampler>&
|
||||
sampler_pair : current_samplers_pixel_) {
|
||||
sampler_pair : current_samplers_vertex_) {
|
||||
VkDescriptorImageInfo& descriptor_image_info =
|
||||
descriptor_write_image_info_.emplace_back();
|
||||
descriptor_image_info.sampler = sampler_pair.second;
|
||||
}
|
||||
}
|
||||
size_t pixel_texture_image_info_offset = descriptor_write_image_info_.size();
|
||||
if (write_pixel_textures) {
|
||||
if (write_pixel_textures && texture_count_pixel) {
|
||||
for (const VulkanShader::TextureBinding& texture_binding :
|
||||
*textures_pixel) {
|
||||
VkDescriptorImageInfo& descriptor_image_info =
|
||||
|
@ -3799,14 +3773,23 @@ bool VulkanCommandProcessor::UpdateBindings(const VulkanShader* vertex_shader,
|
|||
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
||||
}
|
||||
}
|
||||
size_t pixel_sampler_image_info_offset = descriptor_write_image_info_.size();
|
||||
if (write_pixel_textures && sampler_count_pixel) {
|
||||
for (const std::pair<VulkanTextureCache::SamplerParameters, VkSampler>&
|
||||
sampler_pair : current_samplers_pixel_) {
|
||||
VkDescriptorImageInfo& descriptor_image_info =
|
||||
descriptor_write_image_info_.emplace_back();
|
||||
descriptor_image_info.sampler = sampler_pair.second;
|
||||
}
|
||||
}
|
||||
|
||||
// Write the new descriptor sets.
|
||||
|
||||
// Consecutive bindings updated via a single VkWriteDescriptorSet must have
|
||||
// identical stage flags, but for the constants they vary.
|
||||
// identical stage flags, but for the constants they vary. Plus vertex and
|
||||
// pixel texture images and samplers.
|
||||
std::array<VkWriteDescriptorSet,
|
||||
SpirvShaderTranslator::kDescriptorSetCount - 1 +
|
||||
SpirvShaderTranslator::kConstantBufferCount>
|
||||
SpirvShaderTranslator::kConstantBufferCount + 2 * 2>
|
||||
write_descriptor_sets;
|
||||
uint32_t write_descriptor_set_count = 0;
|
||||
uint32_t write_descriptor_set_bits = 0;
|
||||
|
@ -3822,10 +3805,13 @@ bool VulkanCommandProcessor::UpdateBindings(const VulkanShader* vertex_shader,
|
|||
constants_descriptor_set = constants_transient_descriptors_free_.back();
|
||||
constants_transient_descriptors_free_.pop_back();
|
||||
} else {
|
||||
VkDescriptorPoolSize constants_descriptor_count;
|
||||
constants_descriptor_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
|
||||
constants_descriptor_count.descriptorCount =
|
||||
SpirvShaderTranslator::kConstantBufferCount;
|
||||
constants_descriptor_set =
|
||||
transient_descriptor_allocator_uniform_buffer_.Allocate(
|
||||
descriptor_set_layout_constants_,
|
||||
SpirvShaderTranslator::kConstantBufferCount);
|
||||
descriptor_set_layout_constants_, &constants_descriptor_count, 1);
|
||||
if (constants_descriptor_set == VK_NULL_HANDLE) {
|
||||
return false;
|
||||
}
|
||||
|
@ -3854,81 +3840,47 @@ bool VulkanCommandProcessor::UpdateBindings(const VulkanShader* vertex_shader,
|
|||
[SpirvShaderTranslator::kDescriptorSetConstants] =
|
||||
constants_descriptor_set;
|
||||
}
|
||||
// Vertex shader samplers.
|
||||
if (write_vertex_samplers) {
|
||||
VkWriteDescriptorSet& write_samplers =
|
||||
write_descriptor_sets[write_descriptor_set_count++];
|
||||
if (!WriteTransientTextureBindings(
|
||||
true, true, sampler_count_vertex,
|
||||
current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_samplers_vertex_ref(),
|
||||
descriptor_write_image_info_.data() +
|
||||
vertex_sampler_image_info_offset,
|
||||
write_samplers)) {
|
||||
return false;
|
||||
}
|
||||
write_descriptor_set_bits |=
|
||||
UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersVertex;
|
||||
current_graphics_descriptor_sets_
|
||||
[SpirvShaderTranslator::kDescriptorSetSamplersVertex] =
|
||||
write_samplers.dstSet;
|
||||
}
|
||||
// Vertex shader textures.
|
||||
// Vertex shader textures and samplers.
|
||||
if (write_vertex_textures) {
|
||||
VkWriteDescriptorSet& write_textures =
|
||||
write_descriptor_sets[write_descriptor_set_count++];
|
||||
if (!WriteTransientTextureBindings(
|
||||
false, true, texture_count_vertex,
|
||||
current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_textures_vertex_ref(),
|
||||
descriptor_write_image_info_.data() +
|
||||
vertex_texture_image_info_offset,
|
||||
write_textures)) {
|
||||
VkWriteDescriptorSet* write_textures =
|
||||
write_descriptor_sets.data() + write_descriptor_set_count;
|
||||
uint32_t texture_descriptor_set_write_count = WriteTransientTextureBindings(
|
||||
true, texture_count_vertex, sampler_count_vertex,
|
||||
current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_textures_vertex_ref(),
|
||||
descriptor_write_image_info_.data() + vertex_texture_image_info_offset,
|
||||
descriptor_write_image_info_.data() + vertex_sampler_image_info_offset,
|
||||
write_textures);
|
||||
if (!texture_descriptor_set_write_count) {
|
||||
return false;
|
||||
}
|
||||
write_descriptor_set_count += texture_descriptor_set_write_count;
|
||||
write_descriptor_set_bits |=
|
||||
UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesVertex;
|
||||
current_graphics_descriptor_sets_
|
||||
[SpirvShaderTranslator::kDescriptorSetTexturesVertex] =
|
||||
write_textures.dstSet;
|
||||
write_textures[0].dstSet;
|
||||
}
|
||||
// Pixel shader samplers.
|
||||
if (write_pixel_samplers) {
|
||||
VkWriteDescriptorSet& write_samplers =
|
||||
write_descriptor_sets[write_descriptor_set_count++];
|
||||
if (!WriteTransientTextureBindings(
|
||||
true, false, sampler_count_pixel,
|
||||
current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_samplers_pixel_ref(),
|
||||
descriptor_write_image_info_.data() +
|
||||
pixel_sampler_image_info_offset,
|
||||
write_samplers)) {
|
||||
return false;
|
||||
}
|
||||
write_descriptor_set_bits |=
|
||||
UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersPixel;
|
||||
current_graphics_descriptor_sets_
|
||||
[SpirvShaderTranslator::kDescriptorSetSamplersPixel] =
|
||||
write_samplers.dstSet;
|
||||
}
|
||||
// Pixel shader textures.
|
||||
// Pixel shader textures and samplers.
|
||||
if (write_pixel_textures) {
|
||||
VkWriteDescriptorSet& write_textures =
|
||||
write_descriptor_sets[write_descriptor_set_count++];
|
||||
if (!WriteTransientTextureBindings(
|
||||
false, false, texture_count_pixel,
|
||||
current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_textures_pixel_ref(),
|
||||
descriptor_write_image_info_.data() +
|
||||
pixel_texture_image_info_offset,
|
||||
write_textures)) {
|
||||
VkWriteDescriptorSet* write_textures =
|
||||
write_descriptor_sets.data() + write_descriptor_set_count;
|
||||
uint32_t texture_descriptor_set_write_count = WriteTransientTextureBindings(
|
||||
false, texture_count_pixel, sampler_count_pixel,
|
||||
current_guest_graphics_pipeline_layout_
|
||||
->descriptor_set_layout_textures_pixel_ref(),
|
||||
descriptor_write_image_info_.data() + pixel_texture_image_info_offset,
|
||||
descriptor_write_image_info_.data() + pixel_sampler_image_info_offset,
|
||||
write_textures);
|
||||
if (!texture_descriptor_set_write_count) {
|
||||
return false;
|
||||
}
|
||||
write_descriptor_set_count += texture_descriptor_set_write_count;
|
||||
write_descriptor_set_bits |=
|
||||
UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesPixel;
|
||||
current_graphics_descriptor_sets_
|
||||
[SpirvShaderTranslator::kDescriptorSetTexturesPixel] =
|
||||
write_textures.dstSet;
|
||||
write_textures[0].dstSet;
|
||||
}
|
||||
// Write.
|
||||
if (write_descriptor_set_count) {
|
||||
|
@ -3943,19 +3895,11 @@ bool VulkanCommandProcessor::UpdateBindings(const VulkanShader* vertex_shader,
|
|||
// Bind the new descriptor sets.
|
||||
uint32_t descriptor_sets_needed =
|
||||
(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetCount) - 1;
|
||||
if (!sampler_count_vertex) {
|
||||
descriptor_sets_needed &=
|
||||
~(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersVertex);
|
||||
}
|
||||
if (!texture_count_vertex) {
|
||||
if (!texture_count_vertex && !sampler_count_vertex) {
|
||||
descriptor_sets_needed &=
|
||||
~(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesVertex);
|
||||
}
|
||||
if (!sampler_count_pixel) {
|
||||
descriptor_sets_needed &=
|
||||
~(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetSamplersPixel);
|
||||
}
|
||||
if (!texture_count_pixel) {
|
||||
if (!texture_count_pixel && !sampler_count_pixel) {
|
||||
descriptor_sets_needed &=
|
||||
~(UINT32_C(1) << SpirvShaderTranslator::kDescriptorSetTexturesPixel);
|
||||
}
|
||||
|
@ -4038,17 +3982,20 @@ uint8_t* VulkanCommandProcessor::WriteTransientUniformBufferBinding(
|
|||
return mapping;
|
||||
}
|
||||
|
||||
bool VulkanCommandProcessor::WriteTransientTextureBindings(
|
||||
bool is_samplers, bool is_vertex, uint32_t binding_count,
|
||||
uint32_t VulkanCommandProcessor::WriteTransientTextureBindings(
|
||||
bool is_vertex, uint32_t texture_count, uint32_t sampler_count,
|
||||
VkDescriptorSetLayout descriptor_set_layout,
|
||||
const VkDescriptorImageInfo* image_info,
|
||||
VkWriteDescriptorSet& write_descriptor_set_out) {
|
||||
assert_not_zero(binding_count);
|
||||
const VkDescriptorImageInfo* texture_image_info,
|
||||
const VkDescriptorImageInfo* sampler_image_info,
|
||||
VkWriteDescriptorSet* descriptor_set_writes_out) {
|
||||
assert_true(frame_open_);
|
||||
if (!texture_count && !sampler_count) {
|
||||
return 0;
|
||||
}
|
||||
TextureDescriptorSetLayoutKey texture_descriptor_set_layout_key;
|
||||
texture_descriptor_set_layout_key.is_samplers = uint32_t(is_samplers);
|
||||
texture_descriptor_set_layout_key.texture_count = texture_count;
|
||||
texture_descriptor_set_layout_key.sampler_count = sampler_count;
|
||||
texture_descriptor_set_layout_key.is_vertex = uint32_t(is_vertex);
|
||||
texture_descriptor_set_layout_key.binding_count = binding_count;
|
||||
VkDescriptorSet texture_descriptor_set;
|
||||
auto textures_free_it = texture_transient_descriptor_sets_free_.find(
|
||||
texture_descriptor_set_layout_key);
|
||||
|
@ -4057,12 +4004,26 @@ bool VulkanCommandProcessor::WriteTransientTextureBindings(
|
|||
texture_descriptor_set = textures_free_it->second.back();
|
||||
textures_free_it->second.pop_back();
|
||||
} else {
|
||||
texture_descriptor_set =
|
||||
(is_samplers ? transient_descriptor_allocator_sampler_
|
||||
: transient_descriptor_allocator_sampled_image_)
|
||||
.Allocate(descriptor_set_layout, binding_count);
|
||||
std::array<VkDescriptorPoolSize, 2> texture_descriptor_counts;
|
||||
uint32_t texture_descriptor_counts_count = 0;
|
||||
if (texture_count) {
|
||||
VkDescriptorPoolSize& texture_descriptor_count =
|
||||
texture_descriptor_counts[texture_descriptor_counts_count++];
|
||||
texture_descriptor_count.type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
||||
texture_descriptor_count.descriptorCount = texture_count;
|
||||
}
|
||||
if (sampler_count) {
|
||||
VkDescriptorPoolSize& texture_descriptor_count =
|
||||
texture_descriptor_counts[texture_descriptor_counts_count++];
|
||||
texture_descriptor_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
|
||||
texture_descriptor_count.descriptorCount = sampler_count;
|
||||
}
|
||||
assert_not_zero(texture_descriptor_counts_count);
|
||||
texture_descriptor_set = transient_descriptor_allocator_textures_.Allocate(
|
||||
descriptor_set_layout, texture_descriptor_counts.data(),
|
||||
texture_descriptor_counts_count);
|
||||
if (texture_descriptor_set == VK_NULL_HANDLE) {
|
||||
return false;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
UsedTextureTransientDescriptorSet& used_texture_descriptor_set =
|
||||
|
@ -4070,19 +4031,37 @@ bool VulkanCommandProcessor::WriteTransientTextureBindings(
|
|||
used_texture_descriptor_set.frame = frame_current_;
|
||||
used_texture_descriptor_set.layout = texture_descriptor_set_layout_key;
|
||||
used_texture_descriptor_set.set = texture_descriptor_set;
|
||||
write_descriptor_set_out.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
||||
write_descriptor_set_out.pNext = nullptr;
|
||||
write_descriptor_set_out.dstSet = texture_descriptor_set;
|
||||
write_descriptor_set_out.dstBinding = 0;
|
||||
write_descriptor_set_out.dstArrayElement = 0;
|
||||
write_descriptor_set_out.descriptorCount = binding_count;
|
||||
write_descriptor_set_out.descriptorType =
|
||||
is_samplers ? VK_DESCRIPTOR_TYPE_SAMPLER
|
||||
: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
||||
write_descriptor_set_out.pImageInfo = image_info;
|
||||
write_descriptor_set_out.pBufferInfo = nullptr;
|
||||
write_descriptor_set_out.pTexelBufferView = nullptr;
|
||||
return true;
|
||||
uint32_t descriptor_set_write_count = 0;
|
||||
if (texture_count) {
|
||||
VkWriteDescriptorSet& descriptor_set_write =
|
||||
descriptor_set_writes_out[descriptor_set_write_count++];
|
||||
descriptor_set_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
||||
descriptor_set_write.pNext = nullptr;
|
||||
descriptor_set_write.dstSet = texture_descriptor_set;
|
||||
descriptor_set_write.dstBinding = 0;
|
||||
descriptor_set_write.dstArrayElement = 0;
|
||||
descriptor_set_write.descriptorCount = texture_count;
|
||||
descriptor_set_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
||||
descriptor_set_write.pImageInfo = texture_image_info;
|
||||
descriptor_set_write.pBufferInfo = nullptr;
|
||||
descriptor_set_write.pTexelBufferView = nullptr;
|
||||
}
|
||||
if (sampler_count) {
|
||||
VkWriteDescriptorSet& descriptor_set_write =
|
||||
descriptor_set_writes_out[descriptor_set_write_count++];
|
||||
descriptor_set_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
||||
descriptor_set_write.pNext = nullptr;
|
||||
descriptor_set_write.dstSet = texture_descriptor_set;
|
||||
descriptor_set_write.dstBinding = texture_count;
|
||||
descriptor_set_write.dstArrayElement = 0;
|
||||
descriptor_set_write.descriptorCount = sampler_count;
|
||||
descriptor_set_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
|
||||
descriptor_set_write.pImageInfo = sampler_image_info;
|
||||
descriptor_set_write.pBufferInfo = nullptr;
|
||||
descriptor_set_write.pTexelBufferView = nullptr;
|
||||
}
|
||||
assert_not_zero(descriptor_set_write_count);
|
||||
return descriptor_set_write_count;
|
||||
}
|
||||
|
||||
} // namespace vulkan
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
#include "xenia/gpu/vulkan/vulkan_texture_cache.h"
|
||||
#include "xenia/gpu/xenos.h"
|
||||
#include "xenia/kernel/kernel_state.h"
|
||||
#include "xenia/ui/vulkan/single_type_descriptor_set_allocator.h"
|
||||
#include "xenia/ui/vulkan/linked_type_descriptor_set_allocator.h"
|
||||
#include "xenia/ui/vulkan/vulkan_presenter.h"
|
||||
#include "xenia/ui/vulkan/vulkan_provider.h"
|
||||
#include "xenia/ui/vulkan/vulkan_upload_buffer_pool.h"
|
||||
|
@ -227,9 +227,9 @@ class VulkanCommandProcessor : public CommandProcessor {
|
|||
VkDescriptorSet& descriptor_set_out);
|
||||
|
||||
// The returned reference is valid until a cache clear.
|
||||
VkDescriptorSetLayout GetTextureDescriptorSetLayout(bool is_samplers,
|
||||
bool is_vertex,
|
||||
size_t binding_count);
|
||||
VkDescriptorSetLayout GetTextureDescriptorSetLayout(bool is_vertex,
|
||||
size_t texture_count,
|
||||
size_t sampler_count);
|
||||
// The returned reference is valid until a cache clear.
|
||||
const VulkanPipelineCache::PipelineLayoutProvider* GetPipelineLayout(
|
||||
size_t texture_count_pixel, size_t sampler_count_pixel,
|
||||
|
@ -294,12 +294,11 @@ class VulkanCommandProcessor : public CommandProcessor {
|
|||
union TextureDescriptorSetLayoutKey {
|
||||
uint32_t key;
|
||||
struct {
|
||||
// 0 - sampled image descriptors, 1 - sampler descriptors.
|
||||
uint32_t is_samplers : 1;
|
||||
// If texture and sampler counts are both 0, use
|
||||
// descriptor_set_layout_empty_ instead as these are owning references.
|
||||
uint32_t texture_count : 16;
|
||||
uint32_t sampler_count : 15;
|
||||
uint32_t is_vertex : 1;
|
||||
// For 0, use descriptor_set_layout_empty_ instead as these are owning
|
||||
// references.
|
||||
uint32_t binding_count : 30;
|
||||
};
|
||||
|
||||
TextureDescriptorSetLayoutKey() : key(0) {
|
||||
|
@ -350,40 +349,26 @@ class VulkanCommandProcessor : public CommandProcessor {
|
|||
explicit PipelineLayout(
|
||||
VkPipelineLayout pipeline_layout,
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_vertex_ref,
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_vertex_ref,
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_pixel_ref,
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_pixel_ref)
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_pixel_ref)
|
||||
: pipeline_layout_(pipeline_layout),
|
||||
descriptor_set_layout_textures_vertex_ref_(
|
||||
descriptor_set_layout_textures_vertex_ref),
|
||||
descriptor_set_layout_samplers_vertex_ref_(
|
||||
descriptor_set_layout_samplers_vertex_ref),
|
||||
descriptor_set_layout_textures_pixel_ref_(
|
||||
descriptor_set_layout_textures_pixel_ref),
|
||||
descriptor_set_layout_samplers_pixel_ref_(
|
||||
descriptor_set_layout_samplers_pixel_ref) {}
|
||||
descriptor_set_layout_textures_pixel_ref) {}
|
||||
VkPipelineLayout GetPipelineLayout() const override {
|
||||
return pipeline_layout_;
|
||||
}
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_vertex_ref() const {
|
||||
return descriptor_set_layout_textures_vertex_ref_;
|
||||
}
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_vertex_ref() const {
|
||||
return descriptor_set_layout_samplers_vertex_ref_;
|
||||
}
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_pixel_ref() const {
|
||||
return descriptor_set_layout_textures_pixel_ref_;
|
||||
}
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_pixel_ref() const {
|
||||
return descriptor_set_layout_samplers_pixel_ref_;
|
||||
}
|
||||
|
||||
private:
|
||||
VkPipelineLayout pipeline_layout_;
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_vertex_ref_;
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_vertex_ref_;
|
||||
VkDescriptorSetLayout descriptor_set_layout_textures_pixel_ref_;
|
||||
VkDescriptorSetLayout descriptor_set_layout_samplers_pixel_ref_;
|
||||
};
|
||||
|
||||
struct UsedSingleTransientDescriptor {
|
||||
|
@ -454,16 +439,20 @@ class VulkanCommandProcessor : public CommandProcessor {
|
|||
uint32_t used_texture_mask);
|
||||
bool UpdateBindings(const VulkanShader* vertex_shader,
|
||||
const VulkanShader* pixel_shader);
|
||||
// Allocates a descriptor set and fills the VkWriteDescriptorSet structure.
|
||||
// The descriptor set layout must be the one for the given is_samplers,
|
||||
// is_vertex, binding_count (from GetTextureDescriptorSetLayout - may be
|
||||
// Allocates a descriptor set and fills one or two VkWriteDescriptorSet
|
||||
// structure instances (for images and samplers).
|
||||
// The descriptor set layout must be the one for the given is_vertex,
|
||||
// texture_count, sampler_count (from GetTextureDescriptorSetLayout - may be
|
||||
// already available at the moment of the call, no need to locate it again).
|
||||
// Returns whether the allocation was successful.
|
||||
bool WriteTransientTextureBindings(
|
||||
bool is_samplers, bool is_vertex, uint32_t binding_count,
|
||||
// Returns how many VkWriteDescriptorSet structure instances have been
|
||||
// written, or 0 if there was a failure to allocate the descriptor set or no
|
||||
// bindings were requested.
|
||||
uint32_t WriteTransientTextureBindings(
|
||||
bool is_vertex, uint32_t texture_count, uint32_t sampler_count,
|
||||
VkDescriptorSetLayout descriptor_set_layout,
|
||||
const VkDescriptorImageInfo* image_info,
|
||||
VkWriteDescriptorSet& write_descriptor_set_out);
|
||||
const VkDescriptorImageInfo* texture_image_info,
|
||||
const VkDescriptorImageInfo* sampler_image_info,
|
||||
VkWriteDescriptorSet* descriptor_set_writes_out);
|
||||
|
||||
bool device_lost_ = false;
|
||||
|
||||
|
@ -540,9 +529,15 @@ class VulkanCommandProcessor : public CommandProcessor {
|
|||
PipelineLayoutKey::Hasher>
|
||||
pipeline_layouts_;
|
||||
|
||||
ui::vulkan::SingleTypeDescriptorSetAllocator
|
||||
// No specific reason for 32768, just the "too much" descriptor count from
|
||||
// Direct3D 12 PIX warnings.
|
||||
static constexpr uint32_t kLinkedTypeDescriptorPoolSetCount = 32768;
|
||||
static const VkDescriptorPoolSize kDescriptorPoolSizeUniformBuffer;
|
||||
static const VkDescriptorPoolSize kDescriptorPoolSizeStorageBuffer;
|
||||
static const VkDescriptorPoolSize kDescriptorPoolSizeTextures[2];
|
||||
ui::vulkan::LinkedTypeDescriptorSetAllocator
|
||||
transient_descriptor_allocator_uniform_buffer_;
|
||||
ui::vulkan::SingleTypeDescriptorSetAllocator
|
||||
ui::vulkan::LinkedTypeDescriptorSetAllocator
|
||||
transient_descriptor_allocator_storage_buffer_;
|
||||
std::deque<UsedSingleTransientDescriptor> single_transient_descriptors_used_;
|
||||
std::array<std::vector<VkDescriptorSet>,
|
||||
|
@ -553,10 +548,8 @@ class VulkanCommandProcessor : public CommandProcessor {
|
|||
constants_transient_descriptors_used_;
|
||||
std::vector<VkDescriptorSet> constants_transient_descriptors_free_;
|
||||
|
||||
ui::vulkan::SingleTypeDescriptorSetAllocator
|
||||
transient_descriptor_allocator_sampled_image_;
|
||||
ui::vulkan::SingleTypeDescriptorSetAllocator
|
||||
transient_descriptor_allocator_sampler_;
|
||||
ui::vulkan::LinkedTypeDescriptorSetAllocator
|
||||
transient_descriptor_allocator_textures_;
|
||||
std::deque<UsedTextureTransientDescriptorSet>
|
||||
texture_transient_descriptor_sets_used_;
|
||||
std::unordered_map<TextureDescriptorSetLayoutKey,
|
||||
|
|
|
@ -0,0 +1,415 @@
|
|||
/**
|
||||
******************************************************************************
|
||||
* Xenia : Xbox 360 Emulator Research Project *
|
||||
******************************************************************************
|
||||
* Copyright 2022 Ben Vanik. All rights reserved. *
|
||||
* Released under the BSD license - see LICENSE in the root for more details. *
|
||||
******************************************************************************
|
||||
*/
|
||||
|
||||
#include "xenia/ui/vulkan/linked_type_descriptor_set_allocator.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <iterator>
|
||||
#include <utility>
|
||||
|
||||
#include "xenia/base/assert.h"
|
||||
#include "xenia/base/logging.h"
|
||||
#include "xenia/ui/vulkan/vulkan_util.h"
|
||||
|
||||
namespace xe {
|
||||
namespace ui {
|
||||
namespace vulkan {
|
||||
|
||||
void LinkedTypeDescriptorSetAllocator::Reset() {
|
||||
const ui::vulkan::VulkanProvider::DeviceFunctions& dfn = provider_.dfn();
|
||||
VkDevice device = provider_.device();
|
||||
ui::vulkan::util::DestroyAndNullHandle(dfn.vkDestroyDescriptorPool, device,
|
||||
page_usable_latest_.pool);
|
||||
page_usable_latest_.descriptors_remaining.reset();
|
||||
for (const std::pair<const uint32_t, Page>& page_pair : pages_usable_) {
|
||||
dfn.vkDestroyDescriptorPool(device, page_pair.second.pool, nullptr);
|
||||
}
|
||||
pages_usable_.clear();
|
||||
for (VkDescriptorPool pool : pages_full_) {
|
||||
dfn.vkDestroyDescriptorPool(device, pool, nullptr);
|
||||
}
|
||||
pages_full_.clear();
|
||||
}
|
||||
|
||||
VkDescriptorSet LinkedTypeDescriptorSetAllocator::Allocate(
|
||||
VkDescriptorSetLayout descriptor_set_layout,
|
||||
const VkDescriptorPoolSize* descriptor_counts,
|
||||
uint32_t descriptor_type_count) {
|
||||
assert_not_zero(descriptor_type_count);
|
||||
#ifndef NDEBUG
|
||||
for (uint32_t i = 0; i < descriptor_type_count; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[i];
|
||||
assert_not_zero(descriptor_count_for_type.descriptorCount);
|
||||
for (uint32_t j = 0; j < i; ++j) {
|
||||
assert_true(descriptor_counts[j].type != descriptor_count_for_type.type);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
const ui::vulkan::VulkanProvider::DeviceFunctions& dfn = provider_.dfn();
|
||||
VkDevice device = provider_.device();
|
||||
|
||||
VkDescriptorSetAllocateInfo descriptor_set_allocate_info;
|
||||
descriptor_set_allocate_info.sType =
|
||||
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
|
||||
descriptor_set_allocate_info.pNext = nullptr;
|
||||
descriptor_set_allocate_info.descriptorSetCount = 1;
|
||||
descriptor_set_allocate_info.pSetLayouts = &descriptor_set_layout;
|
||||
VkDescriptorSet descriptor_set;
|
||||
|
||||
// Check if more descriptors have been requested than a page can hold, or
|
||||
// descriptors of types not provided by this allocator, and if that's the
|
||||
// case, create a dedicated pool for this allocation.
|
||||
bool dedicated_descriptor_pool_needed = false;
|
||||
for (uint32_t i = 0; i < descriptor_type_count; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[i];
|
||||
// If the type is one that's not supported by the allocator, a dedicated
|
||||
// pool is required. If it's supported, and the allocator has large enough
|
||||
// pools to hold the requested number of descriptors,
|
||||
// dedicated_descriptor_pool_needed will be set to false for this iteration,
|
||||
// and the loop will continue. Otherwise, if that doesn't happen, a
|
||||
// dedicated pool is required.
|
||||
dedicated_descriptor_pool_needed = true;
|
||||
for (uint32_t j = 0; j < descriptor_pool_size_count_; ++j) {
|
||||
const VkDescriptorPoolSize& descriptor_pool_size =
|
||||
descriptor_pool_sizes_[j];
|
||||
if (descriptor_count_for_type.type != descriptor_pool_size.type) {
|
||||
continue;
|
||||
}
|
||||
if (descriptor_count_for_type.descriptorCount <=
|
||||
descriptor_pool_size.descriptorCount) {
|
||||
// For this type, pages can hold enough descriptors.
|
||||
dedicated_descriptor_pool_needed = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (dedicated_descriptor_pool_needed) {
|
||||
// For at least one requested type, pages can't hold enough descriptors.
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (dedicated_descriptor_pool_needed) {
|
||||
VkDescriptorPoolCreateInfo dedicated_descriptor_pool_create_info;
|
||||
dedicated_descriptor_pool_create_info.sType =
|
||||
VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
||||
dedicated_descriptor_pool_create_info.pNext = nullptr;
|
||||
dedicated_descriptor_pool_create_info.flags = 0;
|
||||
dedicated_descriptor_pool_create_info.maxSets = 1;
|
||||
dedicated_descriptor_pool_create_info.poolSizeCount = descriptor_type_count;
|
||||
dedicated_descriptor_pool_create_info.pPoolSizes = descriptor_counts;
|
||||
VkDescriptorPool dedicated_descriptor_pool;
|
||||
if (dfn.vkCreateDescriptorPool(
|
||||
device, &dedicated_descriptor_pool_create_info, nullptr,
|
||||
&dedicated_descriptor_pool) != VK_SUCCESS) {
|
||||
XELOGE(
|
||||
"LinkedTypeDescriptorSetAllocator: Failed to create a dedicated "
|
||||
"descriptor pool for a descriptor set that is too large for a pool "
|
||||
"page");
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
descriptor_set_allocate_info.descriptorPool = dedicated_descriptor_pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) != VK_SUCCESS) {
|
||||
XELOGE(
|
||||
"LinkedTypeDescriptorSetAllocator: Failed to allocate descriptors in "
|
||||
"a dedicated pool");
|
||||
dfn.vkDestroyDescriptorPool(device, dedicated_descriptor_pool, nullptr);
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
pages_full_.push_back(dedicated_descriptor_pool);
|
||||
return descriptor_set;
|
||||
}
|
||||
|
||||
// Try allocating from the latest page an allocation has happened from, to
|
||||
// avoid detaching from the map and re-attaching for every allocation.
|
||||
if (page_usable_latest_.pool != VK_NULL_HANDLE) {
|
||||
assert_not_zero(page_usable_latest_.descriptor_sets_remaining);
|
||||
bool allocate_from_latest_page = true;
|
||||
bool latest_page_becomes_full =
|
||||
page_usable_latest_.descriptor_sets_remaining == 1;
|
||||
for (uint32_t i = 0; i < descriptor_type_count; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[i];
|
||||
for (uint32_t j = 0; j < descriptor_pool_size_count_; ++j) {
|
||||
const VkDescriptorPoolSize& descriptors_remaining_for_type =
|
||||
page_usable_latest_.descriptors_remaining[j];
|
||||
if (descriptor_count_for_type.type !=
|
||||
descriptors_remaining_for_type.type) {
|
||||
continue;
|
||||
}
|
||||
if (descriptor_count_for_type.descriptorCount >=
|
||||
descriptors_remaining_for_type.descriptorCount) {
|
||||
if (descriptor_count_for_type.descriptorCount >
|
||||
descriptors_remaining_for_type.descriptorCount) {
|
||||
allocate_from_latest_page = false;
|
||||
break;
|
||||
}
|
||||
latest_page_becomes_full = true;
|
||||
}
|
||||
}
|
||||
if (!allocate_from_latest_page) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (allocate_from_latest_page) {
|
||||
descriptor_set_allocate_info.descriptorPool = page_usable_latest_.pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) != VK_SUCCESS) {
|
||||
descriptor_set = VK_NULL_HANDLE;
|
||||
// Failed to allocate internally even though there should be enough
|
||||
// space, don't try to allocate from this pool again at all.
|
||||
latest_page_becomes_full = true;
|
||||
}
|
||||
if (latest_page_becomes_full) {
|
||||
pages_full_.push_back(page_usable_latest_.pool);
|
||||
page_usable_latest_.pool = VK_NULL_HANDLE;
|
||||
page_usable_latest_.descriptors_remaining.reset();
|
||||
} else {
|
||||
--page_usable_latest_.descriptor_sets_remaining;
|
||||
for (uint32_t i = 0; i < descriptor_type_count; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[i];
|
||||
for (uint32_t j = 0; j < descriptor_pool_size_count_; ++j) {
|
||||
VkDescriptorPoolSize& descriptors_remaining_for_type =
|
||||
page_usable_latest_.descriptors_remaining[j];
|
||||
if (descriptor_count_for_type.type !=
|
||||
descriptors_remaining_for_type.type) {
|
||||
continue;
|
||||
}
|
||||
descriptors_remaining_for_type.descriptorCount -=
|
||||
descriptor_count_for_type.descriptorCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (descriptor_set != VK_NULL_HANDLE) {
|
||||
return descriptor_set;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Count the maximum number of descriptors requested for any type to stop
|
||||
// searching for pages once they can't satisfy this requirement.
|
||||
uint32_t max_descriptors_per_type = descriptor_counts[0].descriptorCount;
|
||||
for (uint32_t i = 1; i < descriptor_type_count; ++i) {
|
||||
max_descriptors_per_type = std::max(max_descriptors_per_type,
|
||||
descriptor_counts[i].descriptorCount);
|
||||
}
|
||||
|
||||
// If allocating from the latest pool wasn't possible, pick any that has
|
||||
// enough free space. Prefer filling pages that have the most free space as
|
||||
// they can more likely be used for more allocations later.
|
||||
auto page_usable_it_next = pages_usable_.rbegin();
|
||||
while (page_usable_it_next != pages_usable_.rend()) {
|
||||
auto page_usable_it = page_usable_it_next;
|
||||
++page_usable_it_next;
|
||||
if (page_usable_it->first < max_descriptors_per_type) {
|
||||
// All other pages_usable_ entries have smaller maximum number of free
|
||||
// descriptor for any type (it's the map key).
|
||||
break;
|
||||
}
|
||||
// Check if the page has enough free descriptors for all requested types,
|
||||
// and whether allocating the requested number of descriptors in it will
|
||||
// result in the page becoming full.
|
||||
bool map_page_has_sufficient_space = true;
|
||||
bool map_page_becomes_full =
|
||||
page_usable_it->second.descriptor_sets_remaining == 1;
|
||||
for (uint32_t i = 0; i < descriptor_type_count; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[i];
|
||||
for (uint32_t j = 0; j < descriptor_pool_size_count_; ++j) {
|
||||
const VkDescriptorPoolSize& descriptors_remaining_for_type =
|
||||
page_usable_it->second.descriptors_remaining[j];
|
||||
if (descriptor_count_for_type.type !=
|
||||
descriptors_remaining_for_type.type) {
|
||||
continue;
|
||||
}
|
||||
if (descriptor_count_for_type.descriptorCount >=
|
||||
descriptors_remaining_for_type.descriptorCount) {
|
||||
if (descriptor_count_for_type.descriptorCount >
|
||||
descriptors_remaining_for_type.descriptorCount) {
|
||||
map_page_has_sufficient_space = false;
|
||||
break;
|
||||
}
|
||||
map_page_becomes_full = true;
|
||||
}
|
||||
}
|
||||
if (!map_page_has_sufficient_space) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!map_page_has_sufficient_space) {
|
||||
// Even though the coarse (maximum number of descriptors for any type)
|
||||
// check has passed, for the exact types requested this page doesn't have
|
||||
// sufficient space - try another one.
|
||||
continue;
|
||||
}
|
||||
// Remove the page from the map unconditionally - in case of a successful
|
||||
// allocation, it will have a different number of free descriptors for
|
||||
// different types, thus potentially a new map key (but it will also become
|
||||
// page_usable_latest_ instead even), or will become full, and in case of a
|
||||
// failure to allocate internally even though there still should be enough
|
||||
// space, it should never be allocated from again.
|
||||
Page map_page = std::move(page_usable_it->second);
|
||||
// Convert the reverse iterator to a forward iterator for erasing.
|
||||
pages_usable_.erase(std::next(page_usable_it).base());
|
||||
descriptor_set_allocate_info.descriptorPool = map_page.pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) != VK_SUCCESS) {
|
||||
descriptor_set = VK_NULL_HANDLE;
|
||||
// Failed to allocate internally even though there should be enough space,
|
||||
// don't try to allocate from this pool again at all.
|
||||
map_page_becomes_full = true;
|
||||
}
|
||||
if (map_page_becomes_full) {
|
||||
map_page.descriptors_remaining.reset();
|
||||
pages_full_.push_back(map_page.pool);
|
||||
} else {
|
||||
--map_page.descriptor_sets_remaining;
|
||||
for (uint32_t i = 0; i < descriptor_type_count; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[i];
|
||||
for (uint32_t j = 0; j < descriptor_pool_size_count_; ++j) {
|
||||
VkDescriptorPoolSize& descriptors_remaining_for_type =
|
||||
map_page.descriptors_remaining[j];
|
||||
if (descriptor_count_for_type.type !=
|
||||
descriptors_remaining_for_type.type) {
|
||||
continue;
|
||||
}
|
||||
descriptors_remaining_for_type.descriptorCount -=
|
||||
descriptor_count_for_type.descriptorCount;
|
||||
}
|
||||
}
|
||||
// Move the latest page that allocation couldn't be done in to the usable
|
||||
// pages to replace it with the new one.
|
||||
if (page_usable_latest_.pool != VK_NULL_HANDLE) {
|
||||
// Calculate the map key (the maximum number of remaining descriptors of
|
||||
// any type).
|
||||
uint32_t latest_page_max_descriptors_remaining =
|
||||
page_usable_latest_.descriptors_remaining[0].descriptorCount;
|
||||
for (uint32_t i = 1; i < descriptor_pool_size_count_; ++i) {
|
||||
latest_page_max_descriptors_remaining = std::max(
|
||||
latest_page_max_descriptors_remaining,
|
||||
page_usable_latest_.descriptors_remaining[i].descriptorCount);
|
||||
}
|
||||
assert_not_zero(latest_page_max_descriptors_remaining);
|
||||
pages_usable_.emplace(latest_page_max_descriptors_remaining,
|
||||
std::move(page_usable_latest_));
|
||||
}
|
||||
page_usable_latest_ = std::move(map_page);
|
||||
}
|
||||
if (descriptor_set != VK_NULL_HANDLE) {
|
||||
return descriptor_set;
|
||||
}
|
||||
}
|
||||
|
||||
// Try allocating from a new page.
|
||||
// See if the new page has instantly become full.
|
||||
bool new_page_becomes_full = descriptor_sets_per_page_ == 1;
|
||||
for (uint32_t i = 0; !new_page_becomes_full && i < descriptor_type_count;
|
||||
++i) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[i];
|
||||
for (uint32_t j = 0; j < descriptor_pool_size_count_; ++j) {
|
||||
const VkDescriptorPoolSize& descriptors_remaining_for_type =
|
||||
descriptor_pool_sizes_[j];
|
||||
if (descriptor_count_for_type.type !=
|
||||
descriptors_remaining_for_type.type) {
|
||||
continue;
|
||||
}
|
||||
assert_true(descriptor_count_for_type.descriptorCount <=
|
||||
descriptors_remaining_for_type.descriptorCount);
|
||||
if (descriptor_count_for_type.descriptorCount >=
|
||||
descriptors_remaining_for_type.descriptorCount) {
|
||||
new_page_becomes_full = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Allocate from a new page. However, if the new page becomes full
|
||||
// immediately, create a dedicated pool instead for the exact number of
|
||||
// descriptors not to leave any unused space in the pool.
|
||||
VkDescriptorPoolCreateInfo new_descriptor_pool_create_info;
|
||||
new_descriptor_pool_create_info.sType =
|
||||
VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
||||
new_descriptor_pool_create_info.pNext = nullptr;
|
||||
new_descriptor_pool_create_info.flags = 0;
|
||||
if (new_page_becomes_full) {
|
||||
new_descriptor_pool_create_info.maxSets = 1;
|
||||
new_descriptor_pool_create_info.poolSizeCount = descriptor_type_count;
|
||||
new_descriptor_pool_create_info.pPoolSizes = descriptor_counts;
|
||||
} else {
|
||||
new_descriptor_pool_create_info.maxSets = descriptor_sets_per_page_;
|
||||
new_descriptor_pool_create_info.poolSizeCount = descriptor_pool_size_count_;
|
||||
new_descriptor_pool_create_info.pPoolSizes = descriptor_pool_sizes_.get();
|
||||
}
|
||||
VkDescriptorPool new_descriptor_pool;
|
||||
if (dfn.vkCreateDescriptorPool(device, &new_descriptor_pool_create_info,
|
||||
nullptr, &new_descriptor_pool) != VK_SUCCESS) {
|
||||
XELOGE(
|
||||
"LinkedTypeDescriptorSetAllocator: Failed to create a descriptor pool");
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
descriptor_set_allocate_info.descriptorPool = new_descriptor_pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) != VK_SUCCESS) {
|
||||
XELOGE("LinkedTypeDescriptorSetAllocator: Failed to allocate descriptors");
|
||||
dfn.vkDestroyDescriptorPool(device, new_descriptor_pool, nullptr);
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
if (new_page_becomes_full) {
|
||||
pages_full_.push_back(new_descriptor_pool);
|
||||
} else {
|
||||
// Move the latest page that allocation couldn't be done in to the usable
|
||||
// pages to replace it with the new one.
|
||||
if (page_usable_latest_.pool != VK_NULL_HANDLE) {
|
||||
// Calculate the map key (the maximum number of remaining descriptors of
|
||||
// any type).
|
||||
uint32_t latest_page_max_descriptors_remaining =
|
||||
page_usable_latest_.descriptors_remaining[0].descriptorCount;
|
||||
for (uint32_t i = 1; i < descriptor_pool_size_count_; ++i) {
|
||||
latest_page_max_descriptors_remaining = std::max(
|
||||
latest_page_max_descriptors_remaining,
|
||||
page_usable_latest_.descriptors_remaining[i].descriptorCount);
|
||||
}
|
||||
assert_not_zero(latest_page_max_descriptors_remaining);
|
||||
pages_usable_.emplace(latest_page_max_descriptors_remaining,
|
||||
std::move(page_usable_latest_));
|
||||
}
|
||||
page_usable_latest_.pool = new_descriptor_pool;
|
||||
page_usable_latest_.descriptors_remaining =
|
||||
std::unique_ptr<VkDescriptorPoolSize[]>(
|
||||
new VkDescriptorPoolSize[descriptor_pool_size_count_]);
|
||||
for (uint32_t i = 0; i < descriptor_pool_size_count_; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_pool_size_for_type =
|
||||
descriptor_pool_sizes_[i];
|
||||
page_usable_latest_.descriptors_remaining[i] =
|
||||
descriptor_pool_size_for_type;
|
||||
for (uint32_t j = 0; j < descriptor_type_count; ++j) {
|
||||
const VkDescriptorPoolSize& descriptor_count_for_type =
|
||||
descriptor_counts[j];
|
||||
if (descriptor_count_for_type.type !=
|
||||
descriptor_pool_size_for_type.type) {
|
||||
continue;
|
||||
}
|
||||
page_usable_latest_.descriptors_remaining[i].descriptorCount -=
|
||||
descriptor_count_for_type.descriptorCount;
|
||||
break;
|
||||
}
|
||||
}
|
||||
page_usable_latest_.descriptor_sets_remaining =
|
||||
descriptor_sets_per_page_ - 1;
|
||||
}
|
||||
return descriptor_set;
|
||||
}
|
||||
|
||||
} // namespace vulkan
|
||||
} // namespace ui
|
||||
} // namespace xe
|
|
@ -0,0 +1,125 @@
|
|||
/**
|
||||
******************************************************************************
|
||||
* Xenia : Xbox 360 Emulator Research Project *
|
||||
******************************************************************************
|
||||
* Copyright 2022 Ben Vanik. All rights reserved. *
|
||||
* Released under the BSD license - see LICENSE in the root for more details. *
|
||||
******************************************************************************
|
||||
*/
|
||||
|
||||
#ifndef XENIA_UI_VULKAN_LINKED_TYPE_DESCRIPTOR_SET_ALLOCATOR_H_
|
||||
#define XENIA_UI_VULKAN_LINKED_TYPE_DESCRIPTOR_SET_ALLOCATOR_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <cstring>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "xenia/base/assert.h"
|
||||
#include "xenia/ui/vulkan/vulkan_provider.h"
|
||||
|
||||
namespace xe {
|
||||
namespace ui {
|
||||
namespace vulkan {
|
||||
|
||||
// Allocates multiple descriptors of in descriptor set layouts consisting of
|
||||
// descriptors of types specified during initialization.
|
||||
//
|
||||
// "LinkedType" means that the allocator is designed for allocating descriptor
|
||||
// sets containing descriptors of multiple types together - for instance, it
|
||||
// will mark the entire page as full even if no space is left in it for just one
|
||||
// of the descriptor types (not all at once).
|
||||
//
|
||||
// The primary usage scenario for this kind of an allocator is allocating image
|
||||
// and sampler descriptors in a single descriptor set if they both are actually
|
||||
// used in one. It is expected that the ratio of the numbers of descriptors per
|
||||
// type specified during the initialization will roughly correspond to the ratio
|
||||
// of the numbers of descriptors that will actually be allocated. For instance,
|
||||
// if there are approximately 2 images for each 1 sampler, it's recommended to
|
||||
// make the image count per page twice the sampler count per page.
|
||||
//
|
||||
// If some allocations use just one type, and some use just another, completely
|
||||
// independently, it's preferable to use separate allocators rather than a
|
||||
// single one.
|
||||
//
|
||||
// This allocator is also suitable for allocating variable-length descriptor
|
||||
// sets containing descriptors of just a single type.
|
||||
//
|
||||
// There's no way to free these descriptors within the allocator object itself,
|
||||
// per-layout free lists should be used externally.
|
||||
class LinkedTypeDescriptorSetAllocator {
|
||||
public:
|
||||
// Multiple descriptor sizes for the same descriptor type, and zero sizes, are
|
||||
// not allowed.
|
||||
explicit LinkedTypeDescriptorSetAllocator(
|
||||
const ui::vulkan::VulkanProvider& provider,
|
||||
const VkDescriptorPoolSize* descriptor_sizes,
|
||||
uint32_t descriptor_size_count, uint32_t descriptor_sets_per_page)
|
||||
: provider_(provider),
|
||||
descriptor_pool_sizes_(new VkDescriptorPoolSize[descriptor_size_count]),
|
||||
descriptor_pool_size_count_(descriptor_size_count),
|
||||
descriptor_sets_per_page_(descriptor_sets_per_page) {
|
||||
assert_not_zero(descriptor_size_count);
|
||||
assert_not_zero(descriptor_sets_per_page_);
|
||||
#ifndef NDEBUG
|
||||
for (uint32_t i = 0; i < descriptor_size_count; ++i) {
|
||||
const VkDescriptorPoolSize& descriptor_size = descriptor_sizes[i];
|
||||
assert_not_zero(descriptor_size.descriptorCount);
|
||||
for (uint32_t j = 0; j < i; ++j) {
|
||||
assert_true(descriptor_sizes[j].type != descriptor_size.type);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
std::memcpy(descriptor_pool_sizes_.get(), descriptor_sizes,
|
||||
sizeof(VkDescriptorPoolSize) * descriptor_size_count);
|
||||
}
|
||||
LinkedTypeDescriptorSetAllocator(
|
||||
const LinkedTypeDescriptorSetAllocator& allocator) = delete;
|
||||
LinkedTypeDescriptorSetAllocator& operator=(
|
||||
const LinkedTypeDescriptorSetAllocator& allocator) = delete;
|
||||
~LinkedTypeDescriptorSetAllocator() { Reset(); }
|
||||
|
||||
void Reset();
|
||||
|
||||
VkDescriptorSet Allocate(VkDescriptorSetLayout descriptor_set_layout,
|
||||
const VkDescriptorPoolSize* descriptor_counts,
|
||||
uint32_t descriptor_type_count);
|
||||
|
||||
private:
|
||||
struct Page {
|
||||
VkDescriptorPool pool;
|
||||
std::unique_ptr<VkDescriptorPoolSize[]> descriptors_remaining;
|
||||
uint32_t descriptor_sets_remaining;
|
||||
};
|
||||
|
||||
const ui::vulkan::VulkanProvider& provider_;
|
||||
|
||||
std::unique_ptr<VkDescriptorPoolSize[]> descriptor_pool_sizes_;
|
||||
uint32_t descriptor_pool_size_count_;
|
||||
uint32_t descriptor_sets_per_page_;
|
||||
|
||||
std::vector<VkDescriptorPool> pages_full_;
|
||||
// Because allocations must be contiguous, overflow may happen even if a page
|
||||
// still has free descriptors, so multiple pages may have free space.
|
||||
// To avoid removing and re-adding the page to the map that keeps them sorted
|
||||
// (the key is the maximum number of free descriptors remaining across all
|
||||
// types - and lookups need to be made with the maximum of the requested
|
||||
// number of descriptors across all types since it's pointless to check the
|
||||
// pages that can't even potentially fit the largest amount of descriptors of
|
||||
// a requested type, and unlike using the minimum as the key, this doesn't
|
||||
// degenerate if, for example, 0 descriptors are requested for some type - and
|
||||
// it changes at every allocation from a page), instead of always looking for
|
||||
// a free space in the map, maintaining one page outside the map, and
|
||||
// allocation attempts will be made from that page first.
|
||||
std::multimap<uint32_t, Page> pages_usable_;
|
||||
// Doesn't exist if page_usable_latest_.pool == VK_NULL_HANDLE.
|
||||
Page page_usable_latest_ = {};
|
||||
};
|
||||
|
||||
} // namespace vulkan
|
||||
} // namespace ui
|
||||
} // namespace xe
|
||||
|
||||
#endif // XENIA_UI_VULKAN_CONNECTED_DESCRIPTOR_SET_ALLOCATOR_H_
|
|
@ -1,216 +0,0 @@
|
|||
/**
|
||||
******************************************************************************
|
||||
* Xenia : Xbox 360 Emulator Research Project *
|
||||
******************************************************************************
|
||||
* Copyright 2022 Ben Vanik. All rights reserved. *
|
||||
* Released under the BSD license - see LICENSE in the root for more details. *
|
||||
******************************************************************************
|
||||
*/
|
||||
|
||||
#include "xenia/ui/vulkan/single_type_descriptor_set_allocator.h"
|
||||
|
||||
#include "xenia/base/logging.h"
|
||||
#include "xenia/ui/vulkan/vulkan_util.h"
|
||||
|
||||
namespace xe {
|
||||
namespace ui {
|
||||
namespace vulkan {
|
||||
|
||||
void SingleTypeDescriptorSetAllocator::Reset() {
|
||||
const ui::vulkan::VulkanProvider::DeviceFunctions& dfn = provider_.dfn();
|
||||
VkDevice device = provider_.device();
|
||||
ui::vulkan::util::DestroyAndNullHandle(dfn.vkDestroyDescriptorPool, device,
|
||||
page_usable_latest_.pool);
|
||||
for (const std::pair<uint32_t, Page>& page_pair : pages_usable_) {
|
||||
dfn.vkDestroyDescriptorPool(device, page_pair.second.pool, nullptr);
|
||||
}
|
||||
pages_usable_.clear();
|
||||
for (VkDescriptorPool pool : pages_full_) {
|
||||
dfn.vkDestroyDescriptorPool(device, pool, nullptr);
|
||||
}
|
||||
pages_full_.clear();
|
||||
}
|
||||
|
||||
VkDescriptorSet SingleTypeDescriptorSetAllocator::Allocate(
|
||||
VkDescriptorSetLayout descriptor_set_layout, uint32_t descriptor_count) {
|
||||
assert_not_zero(descriptor_count);
|
||||
if (descriptor_count == 0) {
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
|
||||
const ui::vulkan::VulkanProvider::DeviceFunctions& dfn = provider_.dfn();
|
||||
VkDevice device = provider_.device();
|
||||
|
||||
VkDescriptorSetAllocateInfo descriptor_set_allocate_info;
|
||||
descriptor_set_allocate_info.sType =
|
||||
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
|
||||
descriptor_set_allocate_info.pNext = nullptr;
|
||||
descriptor_set_allocate_info.descriptorSetCount = 1;
|
||||
descriptor_set_allocate_info.pSetLayouts = &descriptor_set_layout;
|
||||
VkDescriptorSet descriptor_set;
|
||||
|
||||
if (descriptor_count > descriptor_pool_size_.descriptorCount) {
|
||||
// Can't allocate in the pool, need a dedicated allocation.
|
||||
VkDescriptorPoolSize dedicated_descriptor_pool_size;
|
||||
dedicated_descriptor_pool_size.type = descriptor_pool_size_.type;
|
||||
dedicated_descriptor_pool_size.descriptorCount = descriptor_count;
|
||||
VkDescriptorPoolCreateInfo dedicated_descriptor_pool_create_info;
|
||||
dedicated_descriptor_pool_create_info.sType =
|
||||
VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
||||
dedicated_descriptor_pool_create_info.pNext = nullptr;
|
||||
dedicated_descriptor_pool_create_info.flags = 0;
|
||||
dedicated_descriptor_pool_create_info.maxSets = 1;
|
||||
dedicated_descriptor_pool_create_info.poolSizeCount = 1;
|
||||
dedicated_descriptor_pool_create_info.pPoolSizes =
|
||||
&dedicated_descriptor_pool_size;
|
||||
VkDescriptorPool dedicated_descriptor_pool;
|
||||
if (dfn.vkCreateDescriptorPool(
|
||||
device, &dedicated_descriptor_pool_create_info, nullptr,
|
||||
&dedicated_descriptor_pool) != VK_SUCCESS) {
|
||||
XELOGE(
|
||||
"SingleTypeDescriptorSetAllocator: Failed to create a dedicated pool "
|
||||
"for {} descriptors",
|
||||
dedicated_descriptor_pool_size.descriptorCount);
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
descriptor_set_allocate_info.descriptorPool = dedicated_descriptor_pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) != VK_SUCCESS) {
|
||||
XELOGE(
|
||||
"SingleTypeDescriptorSetAllocator: Failed to allocate {} descriptors "
|
||||
"in a dedicated pool",
|
||||
descriptor_count);
|
||||
dfn.vkDestroyDescriptorPool(device, dedicated_descriptor_pool, nullptr);
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
pages_full_.push_back(dedicated_descriptor_pool);
|
||||
return descriptor_set;
|
||||
}
|
||||
|
||||
// Try allocating from the latest page an allocation has happened from, to
|
||||
// avoid detaching from the map and re-attaching for every allocation.
|
||||
if (page_usable_latest_.pool != VK_NULL_HANDLE) {
|
||||
assert_not_zero(page_usable_latest_.descriptors_remaining);
|
||||
assert_not_zero(page_usable_latest_.descriptor_sets_remaining);
|
||||
if (page_usable_latest_.descriptors_remaining >= descriptor_count) {
|
||||
descriptor_set_allocate_info.descriptorPool = page_usable_latest_.pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) == VK_SUCCESS) {
|
||||
page_usable_latest_.descriptors_remaining -= descriptor_count;
|
||||
--page_usable_latest_.descriptor_sets_remaining;
|
||||
if (!page_usable_latest_.descriptors_remaining ||
|
||||
!page_usable_latest_.descriptor_sets_remaining) {
|
||||
pages_full_.push_back(page_usable_latest_.pool);
|
||||
page_usable_latest_.pool = VK_NULL_HANDLE;
|
||||
}
|
||||
return descriptor_set;
|
||||
}
|
||||
// Failed to allocate internally even though there should be enough space,
|
||||
// don't try to allocate from this pool again at all.
|
||||
pages_full_.push_back(page_usable_latest_.pool);
|
||||
page_usable_latest_.pool = VK_NULL_HANDLE;
|
||||
}
|
||||
}
|
||||
|
||||
// If allocating from the latest pool wasn't possible, pick any that has free
|
||||
// space. Prefer filling pages that have the most free space as they can more
|
||||
// likely be used for more allocations later.
|
||||
while (!pages_usable_.empty()) {
|
||||
auto page_usable_last_it = std::prev(pages_usable_.cend());
|
||||
if (page_usable_last_it->second.descriptors_remaining < descriptor_count) {
|
||||
// All other pages_usable_ entries have fewer free descriptors too (the
|
||||
// remaining count is the map key).
|
||||
break;
|
||||
}
|
||||
// Remove the page from the map unconditionally - in case of a successful
|
||||
// allocation, it will have a different number of free descriptors, thus a
|
||||
// new map key (but it will also become page_usable_latest_ instead even),
|
||||
// or will become full, and in case of a failure to allocate internally even
|
||||
// though there still should be enough space, it should never be allocated
|
||||
// from again.
|
||||
Page map_page = page_usable_last_it->second;
|
||||
pages_usable_.erase(page_usable_last_it);
|
||||
descriptor_set_allocate_info.descriptorPool = map_page.pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) != VK_SUCCESS) {
|
||||
pages_full_.push_back(map_page.pool);
|
||||
continue;
|
||||
}
|
||||
map_page.descriptors_remaining -= descriptor_count;
|
||||
--map_page.descriptor_sets_remaining;
|
||||
if (!map_page.descriptors_remaining ||
|
||||
!map_page.descriptor_sets_remaining) {
|
||||
pages_full_.push_back(map_page.pool);
|
||||
} else {
|
||||
if (page_usable_latest_.pool != VK_NULL_HANDLE) {
|
||||
// Make the page with more free descriptors the next to allocate from.
|
||||
if (map_page.descriptors_remaining >
|
||||
page_usable_latest_.descriptors_remaining) {
|
||||
pages_usable_.emplace(page_usable_latest_.descriptors_remaining,
|
||||
page_usable_latest_);
|
||||
page_usable_latest_ = map_page;
|
||||
} else {
|
||||
pages_usable_.emplace(map_page.descriptors_remaining, map_page);
|
||||
}
|
||||
} else {
|
||||
page_usable_latest_ = map_page;
|
||||
}
|
||||
}
|
||||
return descriptor_set;
|
||||
}
|
||||
|
||||
// Try allocating from a new page.
|
||||
VkDescriptorPoolCreateInfo new_descriptor_pool_create_info;
|
||||
new_descriptor_pool_create_info.sType =
|
||||
VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
||||
new_descriptor_pool_create_info.pNext = nullptr;
|
||||
new_descriptor_pool_create_info.flags = 0;
|
||||
new_descriptor_pool_create_info.maxSets = descriptor_sets_per_page_;
|
||||
new_descriptor_pool_create_info.poolSizeCount = 1;
|
||||
new_descriptor_pool_create_info.pPoolSizes = &descriptor_pool_size_;
|
||||
VkDescriptorPool new_descriptor_pool;
|
||||
if (dfn.vkCreateDescriptorPool(device, &new_descriptor_pool_create_info,
|
||||
nullptr, &new_descriptor_pool) != VK_SUCCESS) {
|
||||
XELOGE(
|
||||
"SingleTypeDescriptorSetAllocator: Failed to create a pool for {} sets "
|
||||
"with {} descriptors",
|
||||
descriptor_sets_per_page_, descriptor_pool_size_.descriptorCount);
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
descriptor_set_allocate_info.descriptorPool = new_descriptor_pool;
|
||||
if (dfn.vkAllocateDescriptorSets(device, &descriptor_set_allocate_info,
|
||||
&descriptor_set) != VK_SUCCESS) {
|
||||
XELOGE(
|
||||
"SingleTypeDescriptorSetAllocator: Failed to allocate {} descriptors",
|
||||
descriptor_count);
|
||||
dfn.vkDestroyDescriptorPool(device, new_descriptor_pool, nullptr);
|
||||
return VK_NULL_HANDLE;
|
||||
}
|
||||
Page new_page;
|
||||
new_page.pool = new_descriptor_pool;
|
||||
new_page.descriptors_remaining =
|
||||
descriptor_pool_size_.descriptorCount - descriptor_count;
|
||||
new_page.descriptor_sets_remaining = descriptor_sets_per_page_ - 1;
|
||||
if (!new_page.descriptors_remaining || !new_page.descriptor_sets_remaining) {
|
||||
pages_full_.push_back(new_page.pool);
|
||||
} else {
|
||||
if (page_usable_latest_.pool != VK_NULL_HANDLE) {
|
||||
// Make the page with more free descriptors the next to allocate from.
|
||||
if (new_page.descriptors_remaining >
|
||||
page_usable_latest_.descriptors_remaining) {
|
||||
pages_usable_.emplace(page_usable_latest_.descriptors_remaining,
|
||||
page_usable_latest_);
|
||||
page_usable_latest_ = new_page;
|
||||
} else {
|
||||
pages_usable_.emplace(new_page.descriptors_remaining, new_page);
|
||||
}
|
||||
} else {
|
||||
page_usable_latest_ = new_page;
|
||||
}
|
||||
}
|
||||
return descriptor_set;
|
||||
}
|
||||
|
||||
} // namespace vulkan
|
||||
} // namespace ui
|
||||
} // namespace xe
|
|
@ -1,84 +0,0 @@
|
|||
/**
|
||||
******************************************************************************
|
||||
* Xenia : Xbox 360 Emulator Research Project *
|
||||
******************************************************************************
|
||||
* Copyright 2022 Ben Vanik. All rights reserved. *
|
||||
* Released under the BSD license - see LICENSE in the root for more details. *
|
||||
******************************************************************************
|
||||
*/
|
||||
|
||||
#ifndef XENIA_UI_VULKAN_SINGLE_TYPE_DESCRIPTOR_SET_ALLOCATOR_H_
|
||||
#define XENIA_UI_VULKAN_SINGLE_TYPE_DESCRIPTOR_SET_ALLOCATOR_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
#include "xenia/base/assert.h"
|
||||
#include "xenia/ui/vulkan/vulkan_provider.h"
|
||||
|
||||
namespace xe {
|
||||
namespace ui {
|
||||
namespace vulkan {
|
||||
|
||||
// Allocates multiple descriptors of a single type in descriptor set layouts
|
||||
// consisting of descriptors of only that type. There's no way to free these
|
||||
// descriptors within the SingleTypeDescriptorSetAllocator, per-layout free
|
||||
// lists should be used externally.
|
||||
class SingleTypeDescriptorSetAllocator {
|
||||
public:
|
||||
explicit SingleTypeDescriptorSetAllocator(
|
||||
const ui::vulkan::VulkanProvider& provider,
|
||||
VkDescriptorType descriptor_type, uint32_t descriptors_per_page,
|
||||
uint32_t descriptor_sets_per_page)
|
||||
: provider_(provider),
|
||||
descriptor_sets_per_page_(descriptor_sets_per_page) {
|
||||
assert_not_zero(descriptor_sets_per_page_);
|
||||
descriptor_pool_size_.type = descriptor_type;
|
||||
// Not allocating sets with 0 descriptors using the allocator - pointless to
|
||||
// have the descriptor count below the set count.
|
||||
descriptor_pool_size_.descriptorCount =
|
||||
std::max(descriptors_per_page, descriptor_sets_per_page);
|
||||
}
|
||||
SingleTypeDescriptorSetAllocator(
|
||||
const SingleTypeDescriptorSetAllocator& allocator) = delete;
|
||||
SingleTypeDescriptorSetAllocator& operator=(
|
||||
const SingleTypeDescriptorSetAllocator& allocator) = delete;
|
||||
~SingleTypeDescriptorSetAllocator() { Reset(); }
|
||||
|
||||
void Reset();
|
||||
|
||||
VkDescriptorSet Allocate(VkDescriptorSetLayout descriptor_set_layout,
|
||||
uint32_t descriptor_count);
|
||||
|
||||
private:
|
||||
struct Page {
|
||||
VkDescriptorPool pool;
|
||||
uint32_t descriptors_remaining;
|
||||
uint32_t descriptor_sets_remaining;
|
||||
};
|
||||
|
||||
const ui::vulkan::VulkanProvider& provider_;
|
||||
|
||||
VkDescriptorPoolSize descriptor_pool_size_;
|
||||
uint32_t descriptor_sets_per_page_;
|
||||
|
||||
std::vector<VkDescriptorPool> pages_full_;
|
||||
// Because allocations must be contiguous, overflow may happen even if a page
|
||||
// still has free descriptors, so multiple pages may have free space.
|
||||
// To avoid removing and re-adding the page to the map that keeps them sorted
|
||||
// (the key is the number of free descriptors remaining, and it changes at
|
||||
// every allocation from a page), instead of always looking for a free space
|
||||
// in the map, maintaining one page outside the map, and allocation attempts
|
||||
// will be made from that page first.
|
||||
std::multimap<uint32_t, Page> pages_usable_;
|
||||
// Doesn't exist if page_usable_latest_.pool == VK_NULL_HANDLE.
|
||||
Page page_usable_latest_ = {};
|
||||
};
|
||||
|
||||
} // namespace vulkan
|
||||
} // namespace ui
|
||||
} // namespace xe
|
||||
|
||||
#endif // XENIA_UI_VULKAN_SINGLE_TYPE_DESCRIPTOR_SET_ALLOCATOR_H_
|
Loading…
Reference in New Issue