diff --git a/cmake/BuildParameters.cmake b/cmake/BuildParameters.cmake
index f556b1802c..bb61086193 100644
--- a/cmake/BuildParameters.cmake
+++ b/cmake/BuildParameters.cmake
@@ -38,6 +38,7 @@ option(USE_VTUNE "Plug VTUNE to profile GS JIT.")
# Graphical option
#-------------------------------------------------------------------------------
option(BUILD_REPLAY_LOADERS "Build GS replayer to ease testing (developer option)")
+option(USE_VULKAN "Enable Vulkan GS renderer" ON)
#-------------------------------------------------------------------------------
# Path and lib option
@@ -234,6 +235,10 @@ if(USE_VTUNE)
list(APPEND PCSX2_DEFS ENABLE_VTUNE)
endif()
+if(USE_VULKAN)
+ list(APPEND PCSX2_DEFS ENABLE_VULKAN)
+endif()
+
if(X11_API)
list(APPEND PCSX2_DEFS X11_API)
endif()
diff --git a/cmake/SearchForStuff.cmake b/cmake/SearchForStuff.cmake
index 8bfc82639c..1e4e073332 100644
--- a/cmake/SearchForStuff.cmake
+++ b/cmake/SearchForStuff.cmake
@@ -244,10 +244,14 @@ else()
endif()
add_subdirectory(3rdparty/glad EXCLUDE_FROM_ALL)
-add_subdirectory(3rdparty/glslang EXCLUDE_FROM_ALL)
add_subdirectory(3rdparty/simpleini EXCLUDE_FROM_ALL)
add_subdirectory(3rdparty/imgui EXCLUDE_FROM_ALL)
+if(USE_VULKAN)
+ add_subdirectory(3rdparty/glslang EXCLUDE_FROM_ALL)
+ add_subdirectory(3rdparty/vulkan-headers EXCLUDE_FROM_ALL)
+endif()
+
if(CUBEB_API)
add_subdirectory(3rdparty/cubeb EXCLUDE_FROM_ALL)
endif()
diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt
index d8e8a02a5f..d527304b23 100644
--- a/common/CMakeLists.txt
+++ b/common/CMakeLists.txt
@@ -62,7 +62,8 @@ target_sources(common PRIVATE
Linux/LnxMisc.cpp
Windows/WinThreads.cpp
Windows/WinHostSys.cpp
- Windows/WinMisc.cpp)
+ Windows/WinMisc.cpp
+)
# x86emitter headers
target_sources(common PRIVATE
@@ -130,7 +131,35 @@ target_sources(common PRIVATE
emitter/tools.h
emitter/x86emitter.h
emitter/x86types.h
+)
+
+if(USE_VULKAN)
+ target_link_libraries(common PUBLIC
+ Vulkan-Headers glslang
)
+ target_sources(common PRIVATE
+ Vulkan/ShaderCache.cpp
+ Vulkan/Texture.cpp
+ Vulkan/Loader.cpp
+ Vulkan/ShaderCompiler.cpp
+ Vulkan/Util.cpp
+ Vulkan/SwapChain.cpp
+ Vulkan/StreamBuffer.cpp
+ Vulkan/Context.cpp
+ Vulkan/Builders.cpp
+ Vulkan/vk_mem_alloc.cpp
+ Vulkan/Context.h
+ Vulkan/Texture.h
+ Vulkan/ShaderCompiler.h
+ Vulkan/SwapChain.h
+ Vulkan/Builders.h
+ Vulkan/StreamBuffer.h
+ Vulkan/ShaderCache.h
+ Vulkan/EntryPoints.h
+ Vulkan/Loader.h
+ Vulkan/Util.h
+ )
+endif()
if(USE_VTUNE)
target_link_libraries(common PUBLIC Vtune::Vtune)
@@ -168,6 +197,7 @@ else()
GL/ContextEGLX11.cpp
GL/ContextEGLX11.h
)
+ target_compile_definitions(common PUBLIC "VULKAN_USE_X11=1")
if(TARGET PkgConfig::XRANDR)
target_link_libraries(common PRIVATE PkgConfig::XRANDR)
target_compile_definitions(common PRIVATE "HAS_XRANDR=1")
@@ -179,7 +209,8 @@ else()
GL/ContextEGLWayland.cpp
GL/ContextEGLWayland.h
)
- target_link_libraries(common PRIVATE ${WAYLAND_EGL_LIBRARIES})
+ target_link_libraries(common PRIVATE ${WAYLAND_EGL_LIBRARIES})
+ target_compile_definitions(common PUBLIC "VULKAN_USE_WAYLAND=1")
endif()
endif()
diff --git a/common/Vulkan/Builders.cpp b/common/Vulkan/Builders.cpp
new file mode 100644
index 0000000000..64a3aa8eaa
--- /dev/null
+++ b/common/Vulkan/Builders.cpp
@@ -0,0 +1,869 @@
+/* PCSX2 - PS2 Emulator for PCs
+ * Copyright (C) 2002-2021 PCSX2 Dev Team
+ *
+ * PCSX2 is free software: you can redistribute it and/or modify it under the terms
+ * of the GNU Lesser General Public License as published by the Free Software Found-
+ * ation, either version 3 of the License, or (at your option) any later version.
+ *
+ * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+ * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ * PURPOSE. See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along with PCSX2.
+ * If not, see .
+ */
+
+#include "common/Vulkan/Builders.h"
+#include "common/Vulkan/Util.h"
+#include "common/Assertions.h"
+
+namespace Vulkan
+{
+ DescriptorSetLayoutBuilder::DescriptorSetLayoutBuilder() { Clear(); }
+
+ void DescriptorSetLayoutBuilder::Clear()
+ {
+ m_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+ m_ci.pNext = nullptr;
+ m_ci.flags = 0;
+ m_ci.pBindings = nullptr;
+ m_ci.bindingCount = 0;
+ }
+
+ VkDescriptorSetLayout DescriptorSetLayoutBuilder::Create(VkDevice device)
+ {
+ VkDescriptorSetLayout layout;
+ VkResult res = vkCreateDescriptorSetLayout(device, &m_ci, nullptr, &layout);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateDescriptorSetLayout() failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ Clear();
+ return layout;
+ }
+
+ void DescriptorSetLayoutBuilder::AddBinding(
+ u32 binding, VkDescriptorType dtype, u32 dcount, VkShaderStageFlags stages)
+ {
+ pxAssert(m_ci.bindingCount < MAX_BINDINGS);
+
+ VkDescriptorSetLayoutBinding& b = m_bindings[m_ci.bindingCount];
+ b.binding = binding;
+ b.descriptorType = dtype;
+ b.descriptorCount = dcount;
+ b.stageFlags = stages;
+ b.pImmutableSamplers = nullptr;
+
+ m_ci.pBindings = m_bindings.data();
+ m_ci.bindingCount++;
+ }
+
+ PipelineLayoutBuilder::PipelineLayoutBuilder() { Clear(); }
+
+ void PipelineLayoutBuilder::Clear()
+ {
+ m_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+ m_ci.pNext = nullptr;
+ m_ci.flags = 0;
+ m_ci.pSetLayouts = nullptr;
+ m_ci.setLayoutCount = 0;
+ m_ci.pPushConstantRanges = nullptr;
+ m_ci.pushConstantRangeCount = 0;
+ }
+
+ VkPipelineLayout PipelineLayoutBuilder::Create(VkDevice device)
+ {
+ VkPipelineLayout layout;
+ VkResult res = vkCreatePipelineLayout(device, &m_ci, nullptr, &layout);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreatePipelineLayout() failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ Clear();
+ return layout;
+ }
+
+ void PipelineLayoutBuilder::AddDescriptorSet(VkDescriptorSetLayout layout)
+ {
+ pxAssert(m_ci.setLayoutCount < MAX_SETS);
+
+ m_sets[m_ci.setLayoutCount] = layout;
+
+ m_ci.setLayoutCount++;
+ m_ci.pSetLayouts = m_sets.data();
+ }
+
+ void PipelineLayoutBuilder::AddPushConstants(VkShaderStageFlags stages, u32 offset, u32 size)
+ {
+ pxAssert(m_ci.pushConstantRangeCount < MAX_PUSH_CONSTANTS);
+
+ VkPushConstantRange& r = m_push_constants[m_ci.pushConstantRangeCount];
+ r.stageFlags = stages;
+ r.offset = offset;
+ r.size = size;
+
+ m_ci.pushConstantRangeCount++;
+ m_ci.pPushConstantRanges = m_push_constants.data();
+ }
+
+ GraphicsPipelineBuilder::GraphicsPipelineBuilder() { Clear(); }
+
+ void GraphicsPipelineBuilder::Clear()
+ {
+ m_ci = {};
+ m_ci.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+
+ m_shader_stages = {};
+
+ m_vertex_input_state = {};
+ m_vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+ m_ci.pVertexInputState = &m_vertex_input_state;
+ m_vertex_attributes = {};
+ m_vertex_buffers = {};
+
+ m_input_assembly = {};
+ m_input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+
+ m_rasterization_state = {};
+ m_rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+ m_rasterization_state.lineWidth = 1.0f;
+ m_depth_state = {};
+ m_depth_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+ m_blend_state = {};
+ m_blend_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+ m_blend_attachments = {};
+
+ m_viewport_state = {};
+ m_viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+ m_viewport = {};
+ m_scissor = {};
+
+ m_dynamic_state = {};
+ m_dynamic_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+ m_dynamic_state_values = {};
+
+ m_multisample_state = {};
+ m_multisample_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+
+ // set defaults
+ SetNoCullRasterizationState();
+ SetNoDepthTestState();
+ SetNoBlendingState();
+ SetPrimitiveTopology(VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
+
+ // have to be specified even if dynamic
+ SetViewport(0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f);
+ SetScissorRect(0, 0, 1, 1);
+ SetMultisamples(VK_SAMPLE_COUNT_1_BIT);
+ }
+
+ VkPipeline GraphicsPipelineBuilder::Create(VkDevice device, VkPipelineCache pipeline_cache, bool clear /* = true */)
+ {
+ VkPipeline pipeline;
+ VkResult res = vkCreateGraphicsPipelines(device, pipeline_cache, 1, &m_ci, nullptr, &pipeline);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateGraphicsPipelines() failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ if (clear)
+ Clear();
+
+ return pipeline;
+ }
+
+ void GraphicsPipelineBuilder::SetShaderStage(
+ VkShaderStageFlagBits stage, VkShaderModule module, const char* entry_point)
+ {
+ pxAssert(m_ci.stageCount < MAX_SHADER_STAGES);
+
+ u32 index = 0;
+ for (; index < m_ci.stageCount; index++)
+ {
+ if (m_shader_stages[index].stage == stage)
+ break;
+ }
+ if (index == m_ci.stageCount)
+ {
+ m_ci.stageCount++;
+ m_ci.pStages = m_shader_stages.data();
+ }
+
+ VkPipelineShaderStageCreateInfo& s = m_shader_stages[index];
+ s.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+ s.stage = stage;
+ s.module = module;
+ s.pName = entry_point;
+ }
+
+ void GraphicsPipelineBuilder::AddVertexBuffer(
+ u32 binding, u32 stride, VkVertexInputRate input_rate /*= VK_VERTEX_INPUT_RATE_VERTEX*/)
+ {
+ pxAssert(m_vertex_input_state.vertexAttributeDescriptionCount < MAX_VERTEX_BUFFERS);
+
+ VkVertexInputBindingDescription& b = m_vertex_buffers[m_vertex_input_state.vertexBindingDescriptionCount];
+ b.binding = binding;
+ b.stride = stride;
+ b.inputRate = input_rate;
+
+ m_vertex_input_state.vertexBindingDescriptionCount++;
+ m_vertex_input_state.pVertexBindingDescriptions = m_vertex_buffers.data();
+ m_ci.pVertexInputState = &m_vertex_input_state;
+ }
+
+ void GraphicsPipelineBuilder::AddVertexAttribute(u32 location, u32 binding, VkFormat format, u32 offset)
+ {
+ pxAssert(m_vertex_input_state.vertexAttributeDescriptionCount < MAX_VERTEX_BUFFERS);
+
+ VkVertexInputAttributeDescription& a =
+ m_vertex_attributes[m_vertex_input_state.vertexAttributeDescriptionCount];
+ a.location = location;
+ a.binding = binding;
+ a.format = format;
+ a.offset = offset;
+
+ m_vertex_input_state.vertexAttributeDescriptionCount++;
+ m_vertex_input_state.pVertexAttributeDescriptions = m_vertex_attributes.data();
+ m_ci.pVertexInputState = &m_vertex_input_state;
+ }
+
+ void GraphicsPipelineBuilder::SetPrimitiveTopology(
+ VkPrimitiveTopology topology, bool enable_primitive_restart /*= false*/)
+ {
+ m_input_assembly.topology = topology;
+ m_input_assembly.primitiveRestartEnable = enable_primitive_restart;
+
+ m_ci.pInputAssemblyState = &m_input_assembly;
+ }
+
+ void GraphicsPipelineBuilder::SetRasterizationState(
+ VkPolygonMode polygon_mode, VkCullModeFlags cull_mode, VkFrontFace front_face)
+ {
+ m_rasterization_state.polygonMode = polygon_mode;
+ m_rasterization_state.cullMode = cull_mode;
+ m_rasterization_state.frontFace = front_face;
+
+ m_ci.pRasterizationState = &m_rasterization_state;
+ }
+
+ void GraphicsPipelineBuilder::SetLineWidth(float width) { m_rasterization_state.lineWidth = width; }
+
+ void GraphicsPipelineBuilder::SetMultisamples(u32 multisamples, bool per_sample_shading)
+ {
+ m_multisample_state.rasterizationSamples = static_cast(multisamples);
+ m_multisample_state.sampleShadingEnable = per_sample_shading;
+ m_multisample_state.minSampleShading = (multisamples > 1) ? 1.0f : 0.0f;
+ }
+
+ void GraphicsPipelineBuilder::SetNoCullRasterizationState()
+ {
+ SetRasterizationState(VK_POLYGON_MODE_FILL, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE);
+ }
+
+ void GraphicsPipelineBuilder::SetDepthState(bool depth_test, bool depth_write, VkCompareOp compare_op)
+ {
+ m_depth_state.depthTestEnable = depth_test;
+ m_depth_state.depthWriteEnable = depth_write;
+ m_depth_state.depthCompareOp = compare_op;
+
+ m_ci.pDepthStencilState = &m_depth_state;
+ }
+
+ void GraphicsPipelineBuilder::SetStencilState(
+ bool stencil_test, const VkStencilOpState& front, const VkStencilOpState& back)
+ {
+ m_depth_state.stencilTestEnable = stencil_test;
+ m_depth_state.front = front;
+ m_depth_state.back = back;
+ }
+
+ void GraphicsPipelineBuilder::SetNoStencilState()
+ {
+ m_depth_state.stencilTestEnable = VK_FALSE;
+ m_depth_state.front = {};
+ m_depth_state.back = {};
+ }
+
+ void GraphicsPipelineBuilder::SetNoDepthTestState() { SetDepthState(false, false, VK_COMPARE_OP_ALWAYS); }
+
+ void GraphicsPipelineBuilder::SetBlendConstants(float r, float g, float b, float a)
+ {
+ m_blend_state.blendConstants[0] = r;
+ m_blend_state.blendConstants[1] = g;
+ m_blend_state.blendConstants[2] = b;
+ m_blend_state.blendConstants[3] = a;
+ m_ci.pColorBlendState = &m_blend_state;
+ }
+
+ void GraphicsPipelineBuilder::AddBlendAttachment(bool blend_enable, VkBlendFactor src_factor,
+ VkBlendFactor dst_factor, VkBlendOp op, VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor,
+ VkBlendOp alpha_op,
+ VkColorComponentFlags
+ write_mask /* = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT */)
+ {
+ pxAssert(m_blend_state.attachmentCount < MAX_ATTACHMENTS);
+
+ VkPipelineColorBlendAttachmentState& bs = m_blend_attachments[m_blend_state.attachmentCount];
+ bs.blendEnable = blend_enable;
+ bs.srcColorBlendFactor = src_factor;
+ bs.dstColorBlendFactor = dst_factor;
+ bs.colorBlendOp = op;
+ bs.srcAlphaBlendFactor = alpha_src_factor;
+ bs.dstAlphaBlendFactor = alpha_dst_factor;
+ bs.alphaBlendOp = alpha_op;
+ bs.colorWriteMask = write_mask;
+
+ m_blend_state.attachmentCount++;
+ m_blend_state.pAttachments = m_blend_attachments.data();
+ m_ci.pColorBlendState = &m_blend_state;
+ }
+
+ void GraphicsPipelineBuilder::SetBlendAttachment(u32 attachment, bool blend_enable, VkBlendFactor src_factor,
+ VkBlendFactor dst_factor, VkBlendOp op, VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor,
+ VkBlendOp alpha_op,
+ VkColorComponentFlags
+ write_mask /*= VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT*/)
+ {
+ pxAssert(attachment < MAX_ATTACHMENTS);
+
+ VkPipelineColorBlendAttachmentState& bs = m_blend_attachments[attachment];
+ bs.blendEnable = blend_enable;
+ bs.srcColorBlendFactor = src_factor;
+ bs.dstColorBlendFactor = dst_factor;
+ bs.colorBlendOp = op;
+ bs.srcAlphaBlendFactor = alpha_src_factor;
+ bs.dstAlphaBlendFactor = alpha_dst_factor;
+ bs.alphaBlendOp = alpha_op;
+ bs.colorWriteMask = write_mask;
+
+ if (attachment >= m_blend_state.attachmentCount)
+ {
+ m_blend_state.attachmentCount = attachment + 1u;
+ m_blend_state.pAttachments = m_blend_attachments.data();
+ m_ci.pColorBlendState = &m_blend_state;
+ }
+ }
+
+ void GraphicsPipelineBuilder::ClearBlendAttachments()
+ {
+ m_blend_attachments = {};
+ m_blend_state.attachmentCount = 0;
+ }
+
+ void GraphicsPipelineBuilder::SetNoBlendingState()
+ {
+ ClearBlendAttachments();
+ SetBlendAttachment(0, false, VK_BLEND_FACTOR_ONE, VK_BLEND_FACTOR_ZERO, VK_BLEND_OP_ADD, VK_BLEND_FACTOR_ONE,
+ VK_BLEND_FACTOR_ZERO, VK_BLEND_OP_ADD,
+ VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT);
+ }
+
+ void GraphicsPipelineBuilder::AddDynamicState(VkDynamicState state)
+ {
+ pxAssert(m_dynamic_state.dynamicStateCount < MAX_DYNAMIC_STATE);
+
+ m_dynamic_state_values[m_dynamic_state.dynamicStateCount] = state;
+ m_dynamic_state.dynamicStateCount++;
+ m_dynamic_state.pDynamicStates = m_dynamic_state_values.data();
+ m_ci.pDynamicState = &m_dynamic_state;
+ }
+
+ void GraphicsPipelineBuilder::SetDynamicViewportAndScissorState()
+ {
+ AddDynamicState(VK_DYNAMIC_STATE_VIEWPORT);
+ AddDynamicState(VK_DYNAMIC_STATE_SCISSOR);
+ }
+
+ void GraphicsPipelineBuilder::SetViewport(
+ float x, float y, float width, float height, float min_depth, float max_depth)
+ {
+ m_viewport.x = x;
+ m_viewport.y = y;
+ m_viewport.width = width;
+ m_viewport.height = height;
+ m_viewport.minDepth = min_depth;
+ m_viewport.maxDepth = max_depth;
+
+ m_viewport_state.pViewports = &m_viewport;
+ m_viewport_state.viewportCount = 1u;
+ m_ci.pViewportState = &m_viewport_state;
+ }
+
+ void GraphicsPipelineBuilder::SetScissorRect(s32 x, s32 y, u32 width, u32 height)
+ {
+ m_scissor.offset.x = x;
+ m_scissor.offset.y = y;
+ m_scissor.extent.width = width;
+ m_scissor.extent.height = height;
+
+ m_viewport_state.pScissors = &m_scissor;
+ m_viewport_state.scissorCount = 1u;
+ m_ci.pViewportState = &m_viewport_state;
+ }
+
+ void GraphicsPipelineBuilder::SetMultisamples(VkSampleCountFlagBits samples)
+ {
+ m_multisample_state.rasterizationSamples = samples;
+ m_ci.pMultisampleState = &m_multisample_state;
+ }
+
+ void GraphicsPipelineBuilder::SetPipelineLayout(VkPipelineLayout layout) { m_ci.layout = layout; }
+
+ void GraphicsPipelineBuilder::SetRenderPass(VkRenderPass render_pass, u32 subpass)
+ {
+ m_ci.renderPass = render_pass;
+ m_ci.subpass = subpass;
+ }
+
+ SamplerBuilder::SamplerBuilder() { Clear(); }
+
+ void SamplerBuilder::Clear()
+ {
+ m_ci = {};
+ m_ci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+ }
+
+ VkSampler SamplerBuilder::Create(VkDevice device, bool clear /* = true */)
+ {
+ VkSampler sampler;
+ VkResult res = vkCreateSampler(device, &m_ci, nullptr, &sampler);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateSampler() failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ return sampler;
+ }
+
+ void SamplerBuilder::SetFilter(VkFilter mag_filter, VkFilter min_filter, VkSamplerMipmapMode mip_filter)
+ {
+ m_ci.magFilter = mag_filter;
+ m_ci.minFilter = min_filter;
+ m_ci.mipmapMode = mip_filter;
+ }
+
+ void SamplerBuilder::SetAddressMode(VkSamplerAddressMode u, VkSamplerAddressMode v, VkSamplerAddressMode w)
+ {
+ m_ci.addressModeU = u;
+ m_ci.addressModeV = v;
+ m_ci.addressModeW = w;
+ }
+
+ void SamplerBuilder::SetPointSampler(
+ VkSamplerAddressMode address_mode /* = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER */)
+ {
+ Clear();
+ SetFilter(VK_FILTER_NEAREST, VK_FILTER_NEAREST, VK_SAMPLER_MIPMAP_MODE_NEAREST);
+ SetAddressMode(address_mode, address_mode, address_mode);
+ }
+
+ void SamplerBuilder::SetLinearSampler(
+ bool mipmaps, VkSamplerAddressMode address_mode /* = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER */)
+ {
+ Clear();
+ SetFilter(VK_FILTER_LINEAR, VK_FILTER_LINEAR,
+ mipmaps ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST);
+ SetAddressMode(address_mode, address_mode, address_mode);
+
+ if (mipmaps)
+ {
+ m_ci.minLod = std::numeric_limits::min();
+ m_ci.maxLod = std::numeric_limits::max();
+ }
+ }
+
+ DescriptorSetUpdateBuilder::DescriptorSetUpdateBuilder() { Clear(); }
+
+ void DescriptorSetUpdateBuilder::Clear()
+ {
+ m_writes = {};
+ m_num_writes = 0;
+ }
+
+ void DescriptorSetUpdateBuilder::Update(VkDevice device, bool clear /*= true*/)
+ {
+ pxAssert(m_num_writes > 0);
+
+ vkUpdateDescriptorSets(device, m_num_writes, (m_num_writes > 0) ? m_writes.data() : nullptr, 0, nullptr);
+
+ if (clear)
+ Clear();
+ }
+
+ void DescriptorSetUpdateBuilder::AddImageDescriptorWrite(VkDescriptorSet set, u32 binding, VkImageView view,
+ VkImageLayout layout /*= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL*/)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS);
+
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = view;
+ ii.imageLayout = layout;
+ ii.sampler = VK_NULL_HANDLE;
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = 1;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ dw.pImageInfo = ⅈ
+ }
+
+
+ void DescriptorSetUpdateBuilder::AddImageDescriptorWrites(VkDescriptorSet set, u32 binding,
+ const VkImageView* views, u32 num_views, VkImageLayout layout /*= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL*/)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && (m_num_image_infos + num_views) < MAX_IMAGE_INFOS);
+
+#if 1
+ // NOTE: This is deliberately split up - updating multiple descriptors in one write is broken on Adreno.
+ for (u32 i = 0; i < num_views; i++)
+ AddImageDescriptorWrite(set, binding + i, views[i], layout);
+#else
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = num_views;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+ dw.pImageInfo = &m_image_infos[m_num_image_infos];
+
+ for (u32 i = 0; i < num_views; i++)
+ {
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = views[i];
+ ii.imageLayout = layout;
+ ii.sampler = VK_NULL_HANDLE;
+ }
+#endif
+ }
+
+ void DescriptorSetUpdateBuilder::AddSamplerDescriptorWrite(VkDescriptorSet set, u32 binding, VkSampler sampler)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS);
+
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = VK_NULL_HANDLE;
+ ii.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ ii.sampler = sampler;
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = 1;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ dw.pImageInfo = ⅈ
+ }
+
+
+ void DescriptorSetUpdateBuilder::AddSamplerDescriptorWrites(
+ VkDescriptorSet set, u32 binding, const VkSampler* samplers, u32 num_samplers)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && (m_num_image_infos + num_samplers) < MAX_IMAGE_INFOS);
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = num_samplers;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+ dw.pImageInfo = &m_image_infos[m_num_image_infos];
+
+ for (u32 i = 0; i < num_samplers; i++)
+ {
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = VK_NULL_HANDLE;
+ ii.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ ii.sampler = samplers[i];
+ }
+ }
+
+ void DescriptorSetUpdateBuilder::AddCombinedImageSamplerDescriptorWrite(VkDescriptorSet set, u32 binding,
+ VkImageView view, VkSampler sampler, VkImageLayout layout /*= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL*/)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS);
+
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = view;
+ ii.imageLayout = layout;
+ ii.sampler = sampler;
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = 1;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dw.pImageInfo = ⅈ
+ }
+
+ void DescriptorSetUpdateBuilder::AddCombinedImageSamplerDescriptorWrites(VkDescriptorSet set, u32 binding,
+ const VkImageView* views, const VkSampler* samplers, u32 num_views,
+ VkImageLayout layout /* = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL */)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && (m_num_image_infos + num_views) < MAX_IMAGE_INFOS);
+
+#if 1
+ // NOTE: This is deliberately split up - updating multiple descriptors in one write is broken on Adreno.
+ for (u32 i = 0; i < num_views; i++)
+ AddCombinedImageSamplerDescriptorWrite(set, binding + i, views[i], samplers[i], layout);
+#else
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = num_views;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ dw.pImageInfo = &m_image_infos[m_num_image_infos];
+
+ for (u32 i = 0; i < num_views; i++)
+ {
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = views[i];
+ ii.sampler = samplers[i];
+ ii.imageLayout = layout;
+ }
+#endif
+ }
+
+ void DescriptorSetUpdateBuilder::AddBufferDescriptorWrite(
+ VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBuffer buffer, u32 offset, u32 size)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && m_num_buffer_infos < MAX_BUFFER_INFOS);
+
+ VkDescriptorBufferInfo& bi = m_buffer_infos[m_num_buffer_infos++];
+ bi.buffer = buffer;
+ bi.offset = offset;
+ bi.range = size;
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = 1;
+ dw.descriptorType = dtype;
+ dw.pBufferInfo = &bi;
+ }
+
+ void DescriptorSetUpdateBuilder::AddBufferViewDescriptorWrite(
+ VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBufferView view)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && m_num_views < MAX_VIEWS);
+
+ VkBufferView& bi = m_views[m_num_views++];
+ bi = view;
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = 1;
+ dw.descriptorType = dtype;
+ dw.pTexelBufferView = &bi;
+ }
+
+ void DescriptorSetUpdateBuilder::AddInputAttachmentDescriptorWrite(
+ VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout /*= VK_IMAGE_LAYOUT_GENERAL*/)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS);
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = 1;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+ dw.pImageInfo = &m_image_infos[m_num_image_infos];
+
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = view;
+ ii.imageLayout = layout;
+ ii.sampler = VK_NULL_HANDLE;
+ }
+
+ void DescriptorSetUpdateBuilder::AddStorageImageDescriptorWrite(
+ VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout /*= VK_IMAGE_LAYOUT_GENERAL*/)
+ {
+ pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS);
+
+ VkWriteDescriptorSet& dw = m_writes[m_num_writes++];
+ dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ dw.dstSet = set;
+ dw.dstBinding = binding;
+ dw.descriptorCount = 1;
+ dw.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ dw.pImageInfo = &m_image_infos[m_num_image_infos];
+
+ VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++];
+ ii.imageView = view;
+ ii.imageLayout = layout;
+ ii.sampler = VK_NULL_HANDLE;
+ }
+
+ FramebufferBuilder::FramebufferBuilder() { Clear(); }
+
+ void FramebufferBuilder::Clear()
+ {
+ m_ci = {};
+ m_ci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+ m_images = {};
+ }
+
+ VkFramebuffer FramebufferBuilder::Create(VkDevice device, bool clear /*= true*/)
+ {
+ VkFramebuffer fb;
+ VkResult res = vkCreateFramebuffer(device, &m_ci, nullptr, &fb);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateFramebuffer() failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ if (clear)
+ Clear();
+
+ return fb;
+ }
+
+ void FramebufferBuilder::AddAttachment(VkImageView image)
+ {
+ pxAssert(m_ci.attachmentCount < MAX_ATTACHMENTS);
+
+ m_images[m_ci.attachmentCount] = image;
+
+ m_ci.attachmentCount++;
+ m_ci.pAttachments = m_images.data();
+ }
+
+ void FramebufferBuilder::SetSize(u32 width, u32 height, u32 layers)
+ {
+ m_ci.width = width;
+ m_ci.height = height;
+ m_ci.layers = layers;
+ }
+
+ void FramebufferBuilder::SetRenderPass(VkRenderPass render_pass) { m_ci.renderPass = render_pass; }
+
+ RenderPassBuilder::RenderPassBuilder() { Clear(); }
+
+ void RenderPassBuilder::Clear()
+ {
+ m_ci = {};
+ m_ci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+ m_attachments = {};
+ m_attachment_references = {};
+ m_num_attachment_references = 0;
+ m_subpasses = {};
+ }
+
+ VkRenderPass RenderPassBuilder::Create(VkDevice device, bool clear /*= true*/)
+ {
+ VkRenderPass rp;
+ VkResult res = vkCreateRenderPass(device, &m_ci, nullptr, &rp);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateRenderPass() failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ return rp;
+ }
+
+ u32 RenderPassBuilder::AddAttachment(VkFormat format, VkSampleCountFlagBits samples, VkAttachmentLoadOp load_op,
+ VkAttachmentStoreOp store_op, VkImageLayout initial_layout, VkImageLayout final_layout)
+ {
+ pxAssert(m_ci.attachmentCount < MAX_ATTACHMENTS);
+
+ const u32 index = m_ci.attachmentCount;
+ VkAttachmentDescription& ad = m_attachments[index];
+ ad.format = format;
+ ad.samples = samples;
+ ad.loadOp = load_op;
+ ad.storeOp = store_op;
+ ad.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+ ad.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+ ad.initialLayout = initial_layout;
+ ad.finalLayout = final_layout;
+
+ m_ci.attachmentCount++;
+ m_ci.pAttachments = m_attachments.data();
+
+ return index;
+ }
+
+ u32 RenderPassBuilder::AddSubpass()
+ {
+ pxAssert(m_ci.subpassCount < MAX_SUBPASSES);
+
+ const u32 index = m_ci.subpassCount;
+ VkSubpassDescription& sp = m_subpasses[index];
+ sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+
+ m_ci.subpassCount++;
+ m_ci.pSubpasses = m_subpasses.data();
+
+ return index;
+ }
+
+ void RenderPassBuilder::AddSubpassColorAttachment(u32 subpass, u32 attachment, VkImageLayout layout)
+ {
+ pxAssert(subpass < m_ci.subpassCount && m_num_attachment_references < MAX_ATTACHMENT_REFERENCES);
+
+ VkAttachmentReference& ar = m_attachment_references[m_num_attachment_references++];
+ ar.attachment = attachment;
+ ar.layout = layout;
+
+ VkSubpassDescription& sp = m_subpasses[subpass];
+ if (sp.colorAttachmentCount == 0)
+ sp.pColorAttachments = &ar;
+ sp.colorAttachmentCount++;
+ }
+
+ void RenderPassBuilder::AddSubpassDepthAttachment(u32 subpass, u32 attachment, VkImageLayout layout)
+ {
+ pxAssert(subpass < m_ci.subpassCount && m_num_attachment_references < MAX_ATTACHMENT_REFERENCES);
+
+ VkAttachmentReference& ar = m_attachment_references[m_num_attachment_references++];
+ ar.attachment = attachment;
+ ar.layout = layout;
+
+ VkSubpassDescription& sp = m_subpasses[subpass];
+ sp.pDepthStencilAttachment = &ar;
+ }
+
+ BufferViewBuilder::BufferViewBuilder() { Clear(); }
+
+ void BufferViewBuilder::Clear()
+ {
+ m_ci = {};
+ m_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+ }
+
+ VkBufferView BufferViewBuilder::Create(VkDevice device, bool clear /*= true*/)
+ {
+ VkBufferView bv;
+ VkResult res = vkCreateBufferView(device, &m_ci, nullptr, &bv);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateBufferView() failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ return bv;
+ }
+
+ void BufferViewBuilder::Set(VkBuffer buffer, VkFormat format, u32 offset, u32 size)
+ {
+ m_ci.buffer = buffer;
+ m_ci.format = format;
+ m_ci.offset = offset;
+ m_ci.range = size;
+ }
+} // namespace Vulkan
\ No newline at end of file
diff --git a/common/Vulkan/Builders.h b/common/Vulkan/Builders.h
new file mode 100644
index 0000000000..f3ffbb68a0
--- /dev/null
+++ b/common/Vulkan/Builders.h
@@ -0,0 +1,295 @@
+/* PCSX2 - PS2 Emulator for PCs
+ * Copyright (C) 2002-2021 PCSX2 Dev Team
+ *
+ * PCSX2 is free software: you can redistribute it and/or modify it under the terms
+ * of the GNU Lesser General Public License as published by the Free Software Found-
+ * ation, either version 3 of the License, or (at your option) any later version.
+ *
+ * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+ * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ * PURPOSE. See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along with PCSX2.
+ * If not, see .
+ */
+
+#pragma once
+#include "common/Pcsx2Defs.h"
+#include "common/Vulkan/Loader.h"
+#include
+
+namespace Vulkan
+{
+ class DescriptorSetLayoutBuilder
+ {
+ public:
+ enum : u32
+ {
+ MAX_BINDINGS = 16,
+ };
+
+ DescriptorSetLayoutBuilder();
+
+ void Clear();
+
+ VkDescriptorSetLayout Create(VkDevice device);
+
+ void AddBinding(u32 binding, VkDescriptorType dtype, u32 dcount, VkShaderStageFlags stages);
+
+ private:
+ VkDescriptorSetLayoutCreateInfo m_ci{};
+ std::array m_bindings{};
+ };
+
+ class PipelineLayoutBuilder
+ {
+ public:
+ enum : u32
+ {
+ MAX_SETS = 8,
+ MAX_PUSH_CONSTANTS = 1
+ };
+
+ PipelineLayoutBuilder();
+
+ void Clear();
+
+ VkPipelineLayout Create(VkDevice device);
+
+ void AddDescriptorSet(VkDescriptorSetLayout layout);
+
+ void AddPushConstants(VkShaderStageFlags stages, u32 offset, u32 size);
+
+ private:
+ VkPipelineLayoutCreateInfo m_ci{};
+ std::array m_sets{};
+ std::array m_push_constants{};
+ };
+
+ class GraphicsPipelineBuilder
+ {
+ public:
+ enum : u32
+ {
+ MAX_SHADER_STAGES = 3,
+ MAX_VERTEX_ATTRIBUTES = 16,
+ MAX_VERTEX_BUFFERS = 8,
+ MAX_ATTACHMENTS = 2,
+ MAX_DYNAMIC_STATE = 8
+ };
+
+ GraphicsPipelineBuilder();
+
+ void Clear();
+
+ VkPipeline Create(VkDevice device, VkPipelineCache pipeline_cache = VK_NULL_HANDLE, bool clear = true);
+
+ void SetShaderStage(VkShaderStageFlagBits stage, VkShaderModule module, const char* entry_point);
+ void SetVertexShader(VkShaderModule module) { SetShaderStage(VK_SHADER_STAGE_VERTEX_BIT, module, "main"); }
+ void SetGeometryShader(VkShaderModule module) { SetShaderStage(VK_SHADER_STAGE_GEOMETRY_BIT, module, "main"); }
+ void SetFragmentShader(VkShaderModule module) { SetShaderStage(VK_SHADER_STAGE_FRAGMENT_BIT, module, "main"); }
+
+ void AddVertexBuffer(u32 binding, u32 stride, VkVertexInputRate input_rate = VK_VERTEX_INPUT_RATE_VERTEX);
+ void AddVertexAttribute(u32 location, u32 binding, VkFormat format, u32 offset);
+
+ void SetPrimitiveTopology(VkPrimitiveTopology topology, bool enable_primitive_restart = false);
+
+ void SetRasterizationState(VkPolygonMode polygon_mode, VkCullModeFlags cull_mode, VkFrontFace front_face);
+ void SetLineWidth(float width);
+ void SetMultisamples(u32 multisamples, bool per_sample_shading);
+ void SetNoCullRasterizationState();
+
+ void SetDepthState(bool depth_test, bool depth_write, VkCompareOp compare_op);
+ void SetStencilState(bool stencil_test, const VkStencilOpState& front, const VkStencilOpState& back);
+ void SetNoDepthTestState();
+ void SetNoStencilState();
+
+ void AddBlendAttachment(bool blend_enable, VkBlendFactor src_factor, VkBlendFactor dst_factor, VkBlendOp op,
+ VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor, VkBlendOp alpha_op,
+ VkColorComponentFlags write_mask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |
+ VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT);
+ void SetBlendAttachment(u32 attachment, bool blend_enable, VkBlendFactor src_factor, VkBlendFactor dst_factor,
+ VkBlendOp op, VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor, VkBlendOp alpha_op,
+ VkColorComponentFlags write_mask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |
+ VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT);
+ void ClearBlendAttachments();
+
+ void SetBlendConstants(float r, float g, float b, float a);
+ void SetNoBlendingState();
+
+ void AddDynamicState(VkDynamicState state);
+
+ void SetDynamicViewportAndScissorState();
+ void SetViewport(float x, float y, float width, float height, float min_depth, float max_depth);
+ void SetScissorRect(s32 x, s32 y, u32 width, u32 height);
+
+ void SetMultisamples(VkSampleCountFlagBits samples);
+
+ void SetPipelineLayout(VkPipelineLayout layout);
+ void SetRenderPass(VkRenderPass render_pass, u32 subpass);
+
+ private:
+ VkGraphicsPipelineCreateInfo m_ci;
+ std::array m_shader_stages;
+
+ VkPipelineVertexInputStateCreateInfo m_vertex_input_state;
+ std::array m_vertex_buffers;
+ std::array m_vertex_attributes;
+
+ VkPipelineInputAssemblyStateCreateInfo m_input_assembly;
+
+ VkPipelineRasterizationStateCreateInfo m_rasterization_state;
+ VkPipelineDepthStencilStateCreateInfo m_depth_state;
+
+ VkPipelineColorBlendStateCreateInfo m_blend_state;
+ std::array m_blend_attachments;
+
+ VkPipelineViewportStateCreateInfo m_viewport_state;
+ VkViewport m_viewport;
+ VkRect2D m_scissor;
+
+ VkPipelineDynamicStateCreateInfo m_dynamic_state;
+ std::array m_dynamic_state_values;
+
+ VkPipelineMultisampleStateCreateInfo m_multisample_state;
+ };
+
+ class SamplerBuilder
+ {
+ public:
+ SamplerBuilder();
+
+ void Clear();
+
+ VkSampler Create(VkDevice device, bool clear = true);
+
+ void SetFilter(VkFilter mag_filter, VkFilter min_filter, VkSamplerMipmapMode mip_filter);
+ void SetAddressMode(VkSamplerAddressMode u, VkSamplerAddressMode v, VkSamplerAddressMode w);
+
+ void SetPointSampler(VkSamplerAddressMode address_mode = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER);
+ void SetLinearSampler(
+ bool mipmaps, VkSamplerAddressMode address_mode = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER);
+
+ private:
+ VkSamplerCreateInfo m_ci;
+ };
+
+ class DescriptorSetUpdateBuilder
+ {
+ enum : u32
+ {
+ MAX_WRITES = 16,
+ MAX_IMAGE_INFOS = 8,
+ MAX_BUFFER_INFOS = 4,
+ MAX_VIEWS = 4,
+ };
+
+ public:
+ DescriptorSetUpdateBuilder();
+
+ void Clear();
+
+ void Update(VkDevice device, bool clear = true);
+
+ void AddImageDescriptorWrite(VkDescriptorSet set, u32 binding, VkImageView view,
+ VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+ void AddImageDescriptorWrites(VkDescriptorSet set, u32 binding, const VkImageView* views, u32 num_views,
+ VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+ void AddSamplerDescriptorWrite(VkDescriptorSet set, u32 binding, VkSampler sampler);
+ void AddSamplerDescriptorWrites(VkDescriptorSet set, u32 binding, const VkSampler* samplers, u32 num_samplers);
+ void AddCombinedImageSamplerDescriptorWrite(VkDescriptorSet set, u32 binding, VkImageView view,
+ VkSampler sampler, VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+ void AddCombinedImageSamplerDescriptorWrites(VkDescriptorSet set, u32 binding, const VkImageView* views,
+ const VkSampler* samplers, u32 num_views, VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+ void AddBufferDescriptorWrite(
+ VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBuffer buffer, u32 offset, u32 size);
+ void AddBufferViewDescriptorWrite(VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBufferView view);
+ void AddInputAttachmentDescriptorWrite(
+ VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout = VK_IMAGE_LAYOUT_GENERAL);
+ void AddStorageImageDescriptorWrite(
+ VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout = VK_IMAGE_LAYOUT_GENERAL);
+
+ private:
+ std::array m_writes;
+ u32 m_num_writes = 0;
+
+ std::array m_buffer_infos;
+ std::array m_image_infos;
+ std::array m_views;
+ u32 m_num_buffer_infos = 0;
+ u32 m_num_image_infos = 0;
+ u32 m_num_views = 0;
+ };
+
+ class FramebufferBuilder
+ {
+ enum : u32
+ {
+ MAX_ATTACHMENTS = 2,
+ };
+
+ public:
+ FramebufferBuilder();
+
+ void Clear();
+
+ VkFramebuffer Create(VkDevice device, bool clear = true);
+
+ void AddAttachment(VkImageView image);
+
+ void SetSize(u32 width, u32 height, u32 layers);
+
+ void SetRenderPass(VkRenderPass render_pass);
+
+ private:
+ VkFramebufferCreateInfo m_ci;
+ std::array m_images;
+ };
+
+ class RenderPassBuilder
+ {
+ enum : u32
+ {
+ MAX_ATTACHMENTS = 2,
+ MAX_ATTACHMENT_REFERENCES = 2,
+ MAX_SUBPASSES = 1,
+ };
+
+ public:
+ RenderPassBuilder();
+
+ void Clear();
+
+ VkRenderPass Create(VkDevice device, bool clear = true);
+
+ u32 AddAttachment(VkFormat format, VkSampleCountFlagBits samples, VkAttachmentLoadOp load_op,
+ VkAttachmentStoreOp store_op, VkImageLayout initial_layout, VkImageLayout final_layout);
+
+ u32 AddSubpass();
+ void AddSubpassColorAttachment(u32 subpass, u32 attachment, VkImageLayout layout);
+ void AddSubpassDepthAttachment(u32 subpass, u32 attachment, VkImageLayout layout);
+
+ private:
+ VkRenderPassCreateInfo m_ci;
+ std::array m_attachments;
+ std::array m_attachment_references;
+ u32 m_num_attachment_references = 0;
+ std::array m_subpasses;
+ };
+
+ class BufferViewBuilder
+ {
+ public:
+ BufferViewBuilder();
+
+ void Clear();
+
+ VkBufferView Create(VkDevice device, bool clear = true);
+
+ void Set(VkBuffer buffer, VkFormat format, u32 offset, u32 size);
+
+ private:
+ VkBufferViewCreateInfo m_ci;
+ };
+
+} // namespace Vulkan
\ No newline at end of file
diff --git a/common/Vulkan/Context.cpp b/common/Vulkan/Context.cpp
new file mode 100644
index 0000000000..fadd3f30a2
--- /dev/null
+++ b/common/Vulkan/Context.cpp
@@ -0,0 +1,1416 @@
+/* PCSX2 - PS2 Emulator for PCs
+ * Copyright (C) 2002-2021 PCSX2 Dev Team
+ *
+ * PCSX2 is free software: you can redistribute it and/or modify it under the terms
+ * of the GNU Lesser General Public License as published by the Free Software Found-
+ * ation, either version 3 of the License, or (at your option) any later version.
+ *
+ * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+ * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ * PURPOSE. See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along with PCSX2.
+ * If not, see .
+ */
+
+#include "common/Vulkan/Context.h"
+#include "common/Assertions.h"
+#include "common/Console.h"
+#include "common/StringUtil.h"
+#include "common/Vulkan/SwapChain.h"
+#include "common/Vulkan/Util.h"
+#include
+#include
+#include
+
+std::unique_ptr g_vulkan_context;
+
+// Tweakables
+enum : u32
+{
+ MAX_DRAW_CALLS_PER_FRAME = 8192,
+ MAX_COMBINED_IMAGE_SAMPLER_DESCRIPTORS_PER_FRAME = 2 * MAX_DRAW_CALLS_PER_FRAME,
+ MAX_SAMPLED_IMAGE_DESCRIPTORS_PER_FRAME =
+ MAX_DRAW_CALLS_PER_FRAME, // assume at least half our draws aren't going to be shuffle/blending
+ MAX_STORAGE_IMAGE_DESCRIPTORS_PER_FRAME = MAX_DRAW_CALLS_PER_FRAME,
+ MAX_INPUT_ATTACHMENT_IMAGE_DESCRIPTORS_PER_FRAME = MAX_DRAW_CALLS_PER_FRAME,
+ MAX_DESCRIPTOR_SETS_PER_FRAME = MAX_DRAW_CALLS_PER_FRAME * 2
+};
+
+namespace Vulkan
+{
+ Context::Context(VkInstance instance, VkPhysicalDevice physical_device)
+ : m_instance(instance)
+ , m_physical_device(physical_device)
+ {
+ // Read device physical memory properties, we need it for allocating buffers
+ vkGetPhysicalDeviceProperties(physical_device, &m_device_properties);
+ vkGetPhysicalDeviceMemoryProperties(physical_device, &m_device_memory_properties);
+
+ // Would any drivers be this silly? I hope not...
+ m_device_properties.limits.minUniformBufferOffsetAlignment =
+ std::max(m_device_properties.limits.minUniformBufferOffsetAlignment, static_cast(1));
+ m_device_properties.limits.minTexelBufferOffsetAlignment =
+ std::max(m_device_properties.limits.minTexelBufferOffsetAlignment, static_cast(1));
+ m_device_properties.limits.optimalBufferCopyOffsetAlignment =
+ std::max(m_device_properties.limits.optimalBufferCopyOffsetAlignment, static_cast(1));
+ m_device_properties.limits.optimalBufferCopyRowPitchAlignment =
+ std::max(m_device_properties.limits.optimalBufferCopyRowPitchAlignment, static_cast(1));
+ }
+
+ Context::~Context() = default;
+
+ bool Context::CheckValidationLayerAvailablility()
+ {
+ u32 extension_count = 0;
+ VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumerateInstanceExtensionProperties failed: ");
+ return false;
+ }
+
+ std::vector extension_list(extension_count);
+ res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, extension_list.data());
+ pxAssert(res == VK_SUCCESS);
+
+ u32 layer_count = 0;
+ res = vkEnumerateInstanceLayerProperties(&layer_count, nullptr);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumerateInstanceExtensionProperties failed: ");
+ return false;
+ }
+
+ std::vector layer_list(layer_count);
+ res = vkEnumerateInstanceLayerProperties(&layer_count, layer_list.data());
+ pxAssert(res == VK_SUCCESS);
+
+ // Check for both VK_EXT_debug_utils and VK_LAYER_LUNARG_standard_validation
+ return (std::find_if(extension_list.begin(), extension_list.end(),
+ [](const auto& it) { return strcmp(it.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0; }) !=
+ extension_list.end() &&
+ std::find_if(layer_list.begin(), layer_list.end(), [](const auto& it) {
+ return strcmp(it.layerName, "VK_LAYER_KHRONOS_validation") == 0;
+ }) != layer_list.end());
+ }
+
+ VkInstance Context::CreateVulkanInstance(
+ const WindowInfo* wi, bool enable_debug_utils, bool enable_validation_layer)
+ {
+ ExtensionList enabled_extensions;
+ if (!SelectInstanceExtensions(&enabled_extensions, wi, enable_debug_utils))
+ return VK_NULL_HANDLE;
+
+ VkApplicationInfo app_info = {};
+ app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+ app_info.pNext = nullptr;
+ app_info.pApplicationName = "DuckStation";
+ app_info.applicationVersion = VK_MAKE_VERSION(0, 1, 0);
+ app_info.pEngineName = "DuckStation";
+ app_info.engineVersion = VK_MAKE_VERSION(0, 1, 0);
+ app_info.apiVersion = VK_MAKE_VERSION(1, 1, 0);
+
+ VkInstanceCreateInfo instance_create_info = {};
+ instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+ instance_create_info.pNext = nullptr;
+ instance_create_info.flags = 0;
+ instance_create_info.pApplicationInfo = &app_info;
+ instance_create_info.enabledExtensionCount = static_cast(enabled_extensions.size());
+ instance_create_info.ppEnabledExtensionNames = enabled_extensions.data();
+ instance_create_info.enabledLayerCount = 0;
+ instance_create_info.ppEnabledLayerNames = nullptr;
+
+ // Enable debug layer on debug builds
+ if (enable_validation_layer)
+ {
+ static const char* layer_names[] = {"VK_LAYER_KHRONOS_validation"};
+ instance_create_info.enabledLayerCount = 1;
+ instance_create_info.ppEnabledLayerNames = layer_names;
+ }
+
+ VkInstance instance;
+ VkResult res = vkCreateInstance(&instance_create_info, nullptr, &instance);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateInstance failed: ");
+ return nullptr;
+ }
+
+ return instance;
+ }
+
+ bool Context::SelectInstanceExtensions(ExtensionList* extension_list, const WindowInfo* wi, bool enable_debug_utils)
+ {
+ u32 extension_count = 0;
+ VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumerateInstanceExtensionProperties failed: ");
+ return false;
+ }
+
+ if (extension_count == 0)
+ {
+ Console.Error("Vulkan: No extensions supported by instance.");
+ return false;
+ }
+
+ std::vector available_extension_list(extension_count);
+ res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, available_extension_list.data());
+ pxAssert(res == VK_SUCCESS);
+
+ auto SupportsExtension = [&](const char* name, bool required) {
+ if (std::find_if(available_extension_list.begin(), available_extension_list.end(),
+ [&](const VkExtensionProperties& properties) { return !strcmp(name, properties.extensionName); }) !=
+ available_extension_list.end())
+ {
+ DevCon.WriteLn("Enabling extension: %s", name);
+ extension_list->push_back(name);
+ return true;
+ }
+
+ if (required)
+ Console.Error("Vulkan: Missing required extension %s.", name);
+
+ return false;
+ };
+
+ // Common extensions
+ if (wi && wi->type != WindowInfo::Type::Surfaceless && !SupportsExtension(VK_KHR_SURFACE_EXTENSION_NAME, true))
+ return false;
+
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+ if (wi && wi->type == WindowInfo::Type::Win32 && !SupportsExtension(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, true))
+ return false;
+#endif
+#if defined(VK_USE_PLATFORM_XLIB_KHR)
+ if (wi && wi->type == WindowInfo::Type::X11 && !SupportsExtension(VK_KHR_XLIB_SURFACE_EXTENSION_NAME, true))
+ return false;
+#endif
+#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
+ if (wi && wi->type == WindowInfo::Type::Wayland &&
+ !SupportsExtension(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, true))
+ return false;
+#endif
+#if defined(VK_USE_PLATFORM_ANDROID_KHR)
+ if (wi && wi->type == WindowInfo::Type::Android &&
+ !SupportsExtension(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, true))
+ return false;
+#endif
+#if defined(VK_USE_PLATFORM_METAL_EXT)
+ if (wi && wi->type == WindowInfo::Type::MacOS && !SupportsExtension(VK_EXT_METAL_SURFACE_EXTENSION_NAME, true))
+ return false;
+#endif
+
+#if 0
+ if (wi && wi->type == WindowInfo::Type::Display && !SupportsExtension(VK_KHR_DISPLAY_EXTENSION_NAME, true))
+ return false;
+#endif
+
+ // VK_EXT_debug_utils
+ if (enable_debug_utils && !SupportsExtension(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, false))
+ Console.Warning("Vulkan: Debug report requested, but extension is not available.");
+
+ return true;
+ }
+
+ Context::GPUList Context::EnumerateGPUs(VkInstance instance)
+ {
+ u32 gpu_count = 0;
+ VkResult res = vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr);
+ if (res != VK_SUCCESS || gpu_count == 0)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: ");
+ return {};
+ }
+
+ GPUList gpus;
+ gpus.resize(gpu_count);
+
+ res = vkEnumeratePhysicalDevices(instance, &gpu_count, gpus.data());
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: ");
+ return {};
+ }
+
+ return gpus;
+ }
+
+ Context::GPUNameList Context::EnumerateGPUNames(VkInstance instance)
+ {
+ u32 gpu_count = 0;
+ VkResult res = vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr);
+ if (res != VK_SUCCESS || gpu_count == 0)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: ");
+ return {};
+ }
+
+ GPUList gpus;
+ gpus.resize(gpu_count);
+
+ res = vkEnumeratePhysicalDevices(instance, &gpu_count, gpus.data());
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: ");
+ return {};
+ }
+
+ GPUNameList gpu_names;
+ gpu_names.reserve(gpu_count);
+ for (u32 i = 0; i < gpu_count; i++)
+ {
+ VkPhysicalDeviceProperties props = {};
+ vkGetPhysicalDeviceProperties(gpus[i], &props);
+
+ std::string gpu_name(props.deviceName);
+
+ // handle duplicate adapter names
+ if (std::any_of(gpu_names.begin(), gpu_names.end(),
+ [&gpu_name](const std::string& other) { return (gpu_name == other); }))
+ {
+ std::string original_adapter_name = std::move(gpu_name);
+
+ u32 current_extra = 2;
+ do
+ {
+ gpu_name = StringUtil::StdStringFromFormat("%s (%u)", original_adapter_name.c_str(), current_extra);
+ current_extra++;
+ } while (std::any_of(gpu_names.begin(), gpu_names.end(),
+ [&gpu_name](const std::string& other) { return (gpu_name == other); }));
+ }
+
+ gpu_names.push_back(std::move(gpu_name));
+ }
+
+ return gpu_names;
+ }
+
+ bool Context::Create(std::string_view gpu_name, const WindowInfo* wi, std::unique_ptr* out_swap_chain,
+ bool threaded_presentation, bool enable_debug_utils, bool enable_validation_layer)
+ {
+ pxAssertMsg(!g_vulkan_context, "Has no current context");
+
+ if (!Vulkan::LoadVulkanLibrary())
+ {
+ Console.Error("Failed to load Vulkan library");
+ return false;
+ }
+
+ const bool enable_surface = (wi && wi->type != WindowInfo::Type::Surfaceless);
+ VkInstance instance = CreateVulkanInstance(wi, enable_debug_utils, enable_validation_layer);
+ if (instance == VK_NULL_HANDLE)
+ {
+ Vulkan::UnloadVulkanLibrary();
+ return false;
+ }
+
+ if (!Vulkan::LoadVulkanInstanceFunctions(instance))
+ {
+ Console.Error("Failed to load Vulkan instance functions");
+ vkDestroyInstance(instance, nullptr);
+ Vulkan::UnloadVulkanLibrary();
+ return false;
+ }
+
+ GPUList gpus = EnumerateGPUs(instance);
+ if (gpus.empty())
+ {
+ vkDestroyInstance(instance, nullptr);
+ Vulkan::UnloadVulkanLibrary();
+ return false;
+ }
+
+ u32 gpu_index = 0;
+ GPUNameList gpu_names = EnumerateGPUNames(instance);
+ if (!gpu_name.empty())
+ {
+ for (; gpu_index < static_cast(gpu_names.size()); gpu_index++)
+ {
+ Console.WriteLn("GPU %u: %s", static_cast(gpu_index), gpu_names[gpu_index].c_str());
+ if (gpu_names[gpu_index] == gpu_name)
+ break;
+ }
+
+ if (gpu_index == static_cast(gpu_names.size()))
+ {
+ Console.Warning("Requested GPU '%s' not found, using first (%s)", std::string(gpu_name).c_str(),
+ gpu_names[0].c_str());
+ gpu_index = 0;
+ }
+ }
+ else
+ {
+ Console.WriteLn("No GPU requested, using first (%s)", gpu_names[0].c_str());
+ }
+
+ VkSurfaceKHR surface = VK_NULL_HANDLE;
+ WindowInfo wi_copy;
+ if (wi)
+ wi_copy = *wi;
+
+ if (enable_surface &&
+ (surface = SwapChain::CreateVulkanSurface(instance, gpus[gpu_index], &wi_copy)) == VK_NULL_HANDLE)
+ {
+ vkDestroyInstance(instance, nullptr);
+ Vulkan::UnloadVulkanLibrary();
+ return false;
+ }
+
+ g_vulkan_context.reset(new Context(instance, gpus[gpu_index]));
+
+ if (enable_debug_utils)
+ g_vulkan_context->EnableDebugUtils();
+
+ // Attempt to create the device.
+ if (!g_vulkan_context->CreateDevice(surface, enable_validation_layer, nullptr, 0, nullptr, 0, nullptr) ||
+ !g_vulkan_context->CreateAllocator() || !g_vulkan_context->CreateGlobalDescriptorPool() ||
+ !g_vulkan_context->CreateCommandBuffers() || !g_vulkan_context->CreateTextureStreamBuffer() ||
+ (enable_surface && (*out_swap_chain = SwapChain::Create(wi_copy, surface, true)) == nullptr))
+ {
+ // Since we are destroying the instance, we're also responsible for destroying the surface.
+ if (surface != VK_NULL_HANDLE)
+ vkDestroySurfaceKHR(instance, surface, nullptr);
+
+ g_vulkan_context.reset();
+ return false;
+ }
+
+ if (threaded_presentation)
+ g_vulkan_context->StartPresentThread();
+
+ return true;
+ }
+
+ void Context::Destroy()
+ {
+ pxAssertMsg(g_vulkan_context, "Has context");
+
+ g_vulkan_context->StopPresentThread();
+
+ if (g_vulkan_context->m_device != VK_NULL_HANDLE)
+ g_vulkan_context->WaitForGPUIdle();
+
+ g_vulkan_context->m_texture_upload_buffer.Destroy(false);
+
+ g_vulkan_context->DestroyRenderPassCache();
+ g_vulkan_context->DestroyGlobalDescriptorPool();
+ g_vulkan_context->DestroyCommandBuffers();
+ g_vulkan_context->DestroyAllocator();
+
+ if (g_vulkan_context->m_device != VK_NULL_HANDLE)
+ vkDestroyDevice(g_vulkan_context->m_device, nullptr);
+
+ if (g_vulkan_context->m_debug_messenger_callback != VK_NULL_HANDLE)
+ g_vulkan_context->DisableDebugUtils();
+
+ vkDestroyInstance(g_vulkan_context->m_instance, nullptr);
+ Vulkan::UnloadVulkanLibrary();
+
+ g_vulkan_context.reset();
+ }
+
+ bool Context::SelectDeviceExtensions(ExtensionList* extension_list, bool enable_surface)
+ {
+ u32 extension_count = 0;
+ VkResult res = vkEnumerateDeviceExtensionProperties(m_physical_device, nullptr, &extension_count, nullptr);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEnumerateDeviceExtensionProperties failed: ");
+ return false;
+ }
+
+ if (extension_count == 0)
+ {
+ Console.Error("Vulkan: No extensions supported by device.");
+ return false;
+ }
+
+ std::vector available_extension_list(extension_count);
+ res = vkEnumerateDeviceExtensionProperties(
+ m_physical_device, nullptr, &extension_count, available_extension_list.data());
+ pxAssert(res == VK_SUCCESS);
+
+ auto SupportsExtension = [&](const char* name, bool required) {
+ if (std::find_if(available_extension_list.begin(), available_extension_list.end(),
+ [&](const VkExtensionProperties& properties) { return !strcmp(name, properties.extensionName); }) !=
+ available_extension_list.end())
+ {
+ if (std::none_of(extension_list->begin(), extension_list->end(),
+ [&](const char* existing_name) { return (std::strcmp(existing_name, name) == 0); }))
+ {
+ DevCon.WriteLn("Enabling extension: %s", name);
+ extension_list->push_back(name);
+ }
+
+ return true;
+ }
+
+ if (required)
+ Console.Error("Vulkan: Missing required extension %s.", name);
+
+ return false;
+ };
+
+ if (enable_surface && !SupportsExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, true))
+ return false;
+
+ return true;
+ }
+
+ bool Context::SelectDeviceFeatures(const VkPhysicalDeviceFeatures* required_features)
+ {
+ VkPhysicalDeviceFeatures available_features;
+ vkGetPhysicalDeviceFeatures(m_physical_device, &available_features);
+
+ if (required_features)
+ std::memcpy(&m_device_features, required_features, sizeof(m_device_features));
+
+ // Enable the features we use.
+ m_device_features.dualSrcBlend = available_features.dualSrcBlend;
+ m_device_features.geometryShader = available_features.geometryShader;
+ m_device_features.largePoints = available_features.largePoints;
+ m_device_features.wideLines = available_features.wideLines;
+ m_device_features.fragmentStoresAndAtomics = available_features.fragmentStoresAndAtomics;
+
+ return true;
+ }
+
+ bool Context::CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer,
+ const char** required_device_extensions, u32 num_required_device_extensions,
+ const char** required_device_layers, u32 num_required_device_layers,
+ const VkPhysicalDeviceFeatures* required_features)
+ {
+ u32 queue_family_count;
+ vkGetPhysicalDeviceQueueFamilyProperties(m_physical_device, &queue_family_count, nullptr);
+ if (queue_family_count == 0)
+ {
+ Console.Error("No queue families found on specified vulkan physical device.");
+ return false;
+ }
+
+ std::vector queue_family_properties(queue_family_count);
+ vkGetPhysicalDeviceQueueFamilyProperties(
+ m_physical_device, &queue_family_count, queue_family_properties.data());
+ Console.WriteLn("%u vulkan queue families", queue_family_count);
+
+ // Find graphics and present queues.
+ m_graphics_queue_family_index = queue_family_count;
+ m_present_queue_family_index = queue_family_count;
+ for (uint32_t i = 0; i < queue_family_count; i++)
+ {
+ VkBool32 graphics_supported = queue_family_properties[i].queueFlags & VK_QUEUE_GRAPHICS_BIT;
+ if (graphics_supported)
+ {
+ m_graphics_queue_family_index = i;
+ // Quit now, no need for a present queue.
+ if (!surface)
+ {
+ break;
+ }
+ }
+
+ if (surface)
+ {
+ VkBool32 present_supported;
+ VkResult res = vkGetPhysicalDeviceSurfaceSupportKHR(m_physical_device, i, surface, &present_supported);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceSurfaceSupportKHR failed: ");
+ return false;
+ }
+
+ if (present_supported)
+ {
+ m_present_queue_family_index = i;
+ }
+
+ // Prefer one queue family index that does both graphics and present.
+ if (graphics_supported && present_supported)
+ {
+ break;
+ }
+ }
+ }
+ if (m_graphics_queue_family_index == queue_family_count)
+ {
+ Console.Error("Vulkan: Failed to find an acceptable graphics queue.");
+ return false;
+ }
+ if (surface && m_present_queue_family_index == queue_family_count)
+ {
+ Console.Error("Vulkan: Failed to find an acceptable present queue.");
+ return false;
+ }
+
+ VkDeviceCreateInfo device_info = {};
+ device_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+ device_info.pNext = nullptr;
+ device_info.flags = 0;
+
+ static constexpr float queue_priorities[] = {1.0f};
+ VkDeviceQueueCreateInfo graphics_queue_info = {};
+ graphics_queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+ graphics_queue_info.pNext = nullptr;
+ graphics_queue_info.flags = 0;
+ graphics_queue_info.queueFamilyIndex = m_graphics_queue_family_index;
+ graphics_queue_info.queueCount = 1;
+ graphics_queue_info.pQueuePriorities = queue_priorities;
+
+ VkDeviceQueueCreateInfo present_queue_info = {};
+ present_queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+ present_queue_info.pNext = nullptr;
+ present_queue_info.flags = 0;
+ present_queue_info.queueFamilyIndex = m_present_queue_family_index;
+ present_queue_info.queueCount = 1;
+ present_queue_info.pQueuePriorities = queue_priorities;
+
+ std::array queue_infos = {{
+ graphics_queue_info,
+ present_queue_info,
+ }};
+
+ device_info.queueCreateInfoCount = 1;
+ if (m_graphics_queue_family_index != m_present_queue_family_index)
+ {
+ device_info.queueCreateInfoCount = 2;
+ }
+ device_info.pQueueCreateInfos = queue_infos.data();
+
+ ExtensionList enabled_extensions;
+ for (u32 i = 0; i < num_required_device_extensions; i++)
+ enabled_extensions.emplace_back(required_device_extensions[i]);
+ if (!SelectDeviceExtensions(&enabled_extensions, surface != VK_NULL_HANDLE))
+ return false;
+
+ device_info.enabledLayerCount = num_required_device_layers;
+ device_info.ppEnabledLayerNames = required_device_layers;
+ device_info.enabledExtensionCount = static_cast(enabled_extensions.size());
+ device_info.ppEnabledExtensionNames = enabled_extensions.data();
+
+ // Check for required features before creating.
+ if (!SelectDeviceFeatures(required_features))
+ return false;
+
+ device_info.pEnabledFeatures = &m_device_features;
+
+ // Enable debug layer on debug builds
+ if (enable_validation_layer)
+ {
+ static const char* layer_names[] = {"VK_LAYER_LUNARG_standard_validation"};
+ device_info.enabledLayerCount = 1;
+ device_info.ppEnabledLayerNames = layer_names;
+ }
+
+ VkResult res = vkCreateDevice(m_physical_device, &device_info, nullptr, &m_device);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateDevice failed: ");
+ return false;
+ }
+
+ // With the device created, we can fill the remaining entry points.
+ if (!LoadVulkanDeviceFunctions(m_device))
+ return false;
+
+ // Grab the graphics and present queues.
+ vkGetDeviceQueue(m_device, m_graphics_queue_family_index, 0, &m_graphics_queue);
+ if (surface)
+ {
+ vkGetDeviceQueue(m_device, m_present_queue_family_index, 0, &m_present_queue);
+ }
+ return true;
+ }
+
+ bool Context::CreateAllocator()
+ {
+ VmaAllocatorCreateInfo ci = {};
+ ci.vulkanApiVersion = VK_API_VERSION_1_1;
+ ci.flags = VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT;
+ ci.physicalDevice = m_physical_device;
+ ci.device = m_device;
+ ci.instance = m_instance;
+
+ VkResult res = vmaCreateAllocator(&ci, &m_allocator);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vmaCreateAllocator failed: ");
+ return false;
+ }
+
+ return true;
+ }
+
+ void Context::DestroyAllocator()
+ {
+ if (m_allocator == VK_NULL_HANDLE)
+ return;
+
+ vmaDestroyAllocator(m_allocator);
+ m_allocator = VK_NULL_HANDLE;
+ }
+
+ bool Context::CreateCommandBuffers()
+ {
+ VkResult res;
+
+ uint32_t frame_index = 0;
+ for (FrameResources& resources : m_frame_resources)
+ {
+ resources.needs_fence_wait = false;
+
+ VkCommandPoolCreateInfo pool_info = {
+ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0, m_graphics_queue_family_index};
+ res = vkCreateCommandPool(m_device, &pool_info, nullptr, &resources.command_pool);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateCommandPool failed: ");
+ return false;
+ }
+ Vulkan::Util::SetObjectName(
+ g_vulkan_context->GetDevice(), resources.command_pool, "Frame Command Pool %u", frame_index);
+
+ VkCommandBufferAllocateInfo buffer_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, nullptr,
+ resources.command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+ static_cast(resources.command_buffers.size())};
+
+ res = vkAllocateCommandBuffers(m_device, &buffer_info, resources.command_buffers.data());
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkAllocateCommandBuffers failed: ");
+ return false;
+ }
+ for (u32 i = 0; i < resources.command_buffers.size(); i++)
+ {
+ Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), resources.command_buffers[i],
+ "Frame %u %sCommand Buffer", frame_index, (i == 0) ? "Init" : "");
+ }
+
+ VkFenceCreateInfo fence_info = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT};
+
+ res = vkCreateFence(m_device, &fence_info, nullptr, &resources.fence);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateFence failed: ");
+ return false;
+ }
+ Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), resources.fence, "Frame Fence %u", frame_index);
+ // TODO: A better way to choose the number of descriptors.
+ VkDescriptorPoolSize pool_sizes[] = {
+ {VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_COMBINED_IMAGE_SAMPLER_DESCRIPTORS_PER_FRAME},
+ {VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, MAX_SAMPLED_IMAGE_DESCRIPTORS_PER_FRAME},
+ {VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, MAX_STORAGE_IMAGE_DESCRIPTORS_PER_FRAME},
+ {VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, MAX_STORAGE_IMAGE_DESCRIPTORS_PER_FRAME},
+ };
+
+ VkDescriptorPoolCreateInfo pool_create_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, 0,
+ MAX_DESCRIPTOR_SETS_PER_FRAME, static_cast(std::size(pool_sizes)), pool_sizes};
+
+ res = vkCreateDescriptorPool(m_device, &pool_create_info, nullptr, &resources.descriptor_pool);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateDescriptorPool failed: ");
+ return false;
+ }
+ Vulkan::Util::SetObjectName(
+ g_vulkan_context->GetDevice(), resources.descriptor_pool, "Frame Descriptor Pool %u", frame_index);
+
+ ++frame_index;
+ }
+
+ ActivateCommandBuffer(0);
+ return true;
+ }
+
+ void Context::DestroyCommandBuffers()
+ {
+ for (FrameResources& resources : m_frame_resources)
+ {
+ for (auto& it : resources.cleanup_resources)
+ it();
+ resources.cleanup_resources.clear();
+
+ if (resources.fence != VK_NULL_HANDLE)
+ {
+ vkDestroyFence(m_device, resources.fence, nullptr);
+ resources.fence = VK_NULL_HANDLE;
+ }
+ if (resources.descriptor_pool != VK_NULL_HANDLE)
+ {
+ vkDestroyDescriptorPool(m_device, resources.descriptor_pool, nullptr);
+ resources.descriptor_pool = VK_NULL_HANDLE;
+ }
+ if (resources.command_buffers[0] != VK_NULL_HANDLE)
+ {
+ vkFreeCommandBuffers(m_device, resources.command_pool,
+ static_cast(resources.command_buffers.size()), resources.command_buffers.data());
+ resources.command_buffers.fill(VK_NULL_HANDLE);
+ }
+ if (resources.command_pool != VK_NULL_HANDLE)
+ {
+ vkDestroyCommandPool(m_device, resources.command_pool, nullptr);
+ resources.command_pool = VK_NULL_HANDLE;
+ }
+ }
+ }
+
+ bool Context::CreateGlobalDescriptorPool()
+ {
+ // TODO: A better way to choose the number of descriptors.
+ VkDescriptorPoolSize pool_sizes[] = {
+ {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1024},
+ {VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1024},
+ };
+
+ VkDescriptorPoolCreateInfo pool_create_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr,
+ VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+ 1024, // TODO: tweak this
+ static_cast(std::size(pool_sizes)), pool_sizes};
+
+ VkResult res = vkCreateDescriptorPool(m_device, &pool_create_info, nullptr, &m_global_descriptor_pool);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateDescriptorPool failed: ");
+ return false;
+ }
+ Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_global_descriptor_pool, "Global Descriptor Pool");
+ return true;
+ }
+
+ void Context::DestroyGlobalDescriptorPool()
+ {
+ if (m_global_descriptor_pool != VK_NULL_HANDLE)
+ {
+ vkDestroyDescriptorPool(m_device, m_global_descriptor_pool, nullptr);
+ m_global_descriptor_pool = VK_NULL_HANDLE;
+ }
+ }
+
+ bool Context::CreateTextureStreamBuffer()
+ {
+ if (!m_texture_upload_buffer.Create(VK_BUFFER_USAGE_TRANSFER_SRC_BIT, TEXTURE_BUFFER_SIZE))
+ {
+ Console.Error("Failed to allocate texture upload buffer");
+ return false;
+ }
+
+ return true;
+ }
+
+ VkCommandBuffer Context::GetCurrentInitCommandBuffer()
+ {
+ FrameResources& res = m_frame_resources[m_current_frame];
+ VkCommandBuffer buf = res.command_buffers[0];
+ if (res.init_buffer_used)
+ return buf;
+
+ VkCommandBufferBeginInfo bi{
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
+ vkBeginCommandBuffer(buf, &bi);
+ res.init_buffer_used = true;
+ return buf;
+ }
+
+ VkDescriptorSet Context::AllocateDescriptorSet(VkDescriptorSetLayout set_layout)
+ {
+ VkDescriptorSetAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, nullptr,
+ m_frame_resources[m_current_frame].descriptor_pool, 1, &set_layout};
+
+ VkDescriptorSet descriptor_set;
+ VkResult res = vkAllocateDescriptorSets(m_device, &allocate_info, &descriptor_set);
+ if (res != VK_SUCCESS)
+ {
+ // Failing to allocate a descriptor set is not a fatal error, we can
+ // recover by moving to the next command buffer.
+ return VK_NULL_HANDLE;
+ }
+
+ return descriptor_set;
+ }
+
+ VkDescriptorSet Context::AllocatePersistentDescriptorSet(VkDescriptorSetLayout set_layout)
+ {
+ VkDescriptorSetAllocateInfo allocate_info = {
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, nullptr, m_global_descriptor_pool, 1, &set_layout};
+
+ VkDescriptorSet descriptor_set;
+ VkResult res = vkAllocateDescriptorSets(m_device, &allocate_info, &descriptor_set);
+ if (res != VK_SUCCESS)
+ return VK_NULL_HANDLE;
+
+ return descriptor_set;
+ }
+
+ void Context::FreeGlobalDescriptorSet(VkDescriptorSet set)
+ {
+ vkFreeDescriptorSets(m_device, m_global_descriptor_pool, 1, &set);
+ }
+
+ void Context::WaitForFenceCounter(u64 fence_counter)
+ {
+ if (m_completed_fence_counter >= fence_counter)
+ return;
+
+ // Find the first command buffer which covers this counter value.
+ u32 index = (m_current_frame + 1) % NUM_COMMAND_BUFFERS;
+ while (index != m_current_frame)
+ {
+ if (m_frame_resources[index].fence_counter >= fence_counter)
+ break;
+
+ index = (index + 1) % NUM_COMMAND_BUFFERS;
+ }
+
+ pxAssert(index != m_current_frame);
+ WaitForCommandBufferCompletion(index);
+ }
+
+ void Context::WaitForGPUIdle()
+ {
+ WaitForPresentComplete();
+ vkDeviceWaitIdle(m_device);
+ }
+
+ void Context::WaitForCommandBufferCompletion(u32 index)
+ {
+ // Wait for this command buffer to be completed.
+ VkResult res = vkWaitForFences(m_device, 1, &m_frame_resources[index].fence, VK_TRUE, UINT64_MAX);
+ if (res != VK_SUCCESS)
+ LOG_VULKAN_ERROR(res, "vkWaitForFences failed: ");
+
+ // Clean up any resources for command buffers between the last known completed buffer and this
+ // now-completed command buffer. If we use >2 buffers, this may be more than one buffer.
+ const u64 now_completed_counter = m_frame_resources[index].fence_counter;
+ u32 cleanup_index = (m_current_frame + 1) % NUM_COMMAND_BUFFERS;
+ while (cleanup_index != m_current_frame)
+ {
+ FrameResources& resources = m_frame_resources[cleanup_index];
+ if (resources.fence_counter > now_completed_counter)
+ break;
+
+ if (resources.fence_counter > m_completed_fence_counter)
+ {
+ for (auto& it : resources.cleanup_resources)
+ it();
+ resources.cleanup_resources.clear();
+ }
+
+ cleanup_index = (cleanup_index + 1) % NUM_COMMAND_BUFFERS;
+ }
+
+ m_completed_fence_counter = now_completed_counter;
+ }
+
+ void Context::SubmitCommandBuffer(VkSemaphore wait_semaphore /* = VK_NULL_HANDLE */,
+ VkSemaphore signal_semaphore /* = VK_NULL_HANDLE */, VkSwapchainKHR present_swap_chain /* = VK_NULL_HANDLE */,
+ uint32_t present_image_index /* = 0xFFFFFFFF */, bool submit_on_thread /* = false */)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+
+ // End the current command buffer.
+ VkResult res;
+ if (resources.init_buffer_used)
+ {
+ res = vkEndCommandBuffer(resources.command_buffers[0]);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEndCommandBuffer failed: ");
+ pxFailRel("Failed to end command buffer");
+ }
+ }
+
+ res = vkEndCommandBuffer(resources.command_buffers[1]);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkEndCommandBuffer failed: ");
+ pxFailRel("Failed to end command buffer");
+ }
+
+ // This command buffer now has commands, so can't be re-used without waiting.
+ resources.needs_fence_wait = true;
+
+ std::unique_lock lock(m_present_mutex);
+ WaitForPresentComplete(lock);
+
+ if (!submit_on_thread || !m_present_thread.joinable())
+ {
+ DoSubmitCommandBuffer(m_current_frame, wait_semaphore, signal_semaphore);
+ if (present_swap_chain != VK_NULL_HANDLE)
+ DoPresent(signal_semaphore, present_swap_chain, present_image_index);
+ return;
+ }
+
+ m_queued_present.command_buffer_index = m_current_frame;
+ m_queued_present.present_swap_chain = present_swap_chain;
+ m_queued_present.present_image_index = present_image_index;
+ m_queued_present.wait_semaphore = wait_semaphore;
+ m_queued_present.signal_semaphore = signal_semaphore;
+ m_present_done.store(false);
+ m_present_queued_cv.notify_one();
+ }
+
+ void Context::DoSubmitCommandBuffer(u32 index, VkSemaphore wait_semaphore, VkSemaphore signal_semaphore)
+ {
+ FrameResources& resources = m_frame_resources[index];
+
+ uint32_t wait_bits = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+ VkSubmitInfo submit_info = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &wait_bits,
+ resources.init_buffer_used ? 2u : 1u,
+ resources.init_buffer_used ? resources.command_buffers.data() : &resources.command_buffers[1], 0, nullptr};
+
+ if (wait_semaphore != VK_NULL_HANDLE)
+ {
+ submit_info.pWaitSemaphores = &wait_semaphore;
+ submit_info.waitSemaphoreCount = 1;
+ }
+
+ if (signal_semaphore != VK_NULL_HANDLE)
+ {
+ submit_info.signalSemaphoreCount = 1;
+ submit_info.pSignalSemaphores = &signal_semaphore;
+ }
+
+ VkResult res = vkQueueSubmit(m_graphics_queue, 1, &submit_info, resources.fence);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkQueueSubmit failed: ");
+ pxFailRel("Failed to submit command buffer.");
+ }
+ }
+
+ void Context::DoPresent(VkSemaphore wait_semaphore, VkSwapchainKHR present_swap_chain, uint32_t present_image_index)
+ {
+ // Should have a signal semaphore.
+ pxAssert(wait_semaphore != VK_NULL_HANDLE);
+ VkPresentInfoKHR present_info = {VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, nullptr, 1, &wait_semaphore, 1,
+ &present_swap_chain, &present_image_index, nullptr};
+
+ VkResult res = vkQueuePresentKHR(m_present_queue, &present_info);
+ if (res != VK_SUCCESS)
+ {
+ // VK_ERROR_OUT_OF_DATE_KHR is not fatal, just means we need to recreate our swap chain.
+ if (res != VK_ERROR_OUT_OF_DATE_KHR && res != VK_SUBOPTIMAL_KHR)
+ LOG_VULKAN_ERROR(res, "vkQueuePresentKHR failed: ");
+
+ m_last_present_failed.store(true);
+ }
+ }
+
+ void Context::WaitForPresentComplete()
+ {
+ if (m_present_done.load())
+ return;
+
+ std::unique_lock lock(m_present_mutex);
+ WaitForPresentComplete(lock);
+ }
+
+ void Context::WaitForPresentComplete(std::unique_lock& lock)
+ {
+ if (m_present_done.load())
+ return;
+
+ m_present_done_cv.wait(lock, [this]() { return m_present_done.load(); });
+ }
+
+ void Context::PresentThread()
+ {
+ std::unique_lock lock(m_present_mutex);
+ while (!m_present_thread_done.load())
+ {
+ m_present_queued_cv.wait(lock, [this]() { return !m_present_done.load() || m_present_thread_done.load(); });
+
+ if (m_present_done.load())
+ continue;
+
+ DoSubmitCommandBuffer(m_queued_present.command_buffer_index, m_queued_present.wait_semaphore,
+ m_queued_present.signal_semaphore);
+ DoPresent(m_queued_present.signal_semaphore, m_queued_present.present_swap_chain,
+ m_queued_present.present_image_index);
+ m_present_done.store(true);
+ m_present_done_cv.notify_one();
+ }
+ }
+
+ void Context::StartPresentThread()
+ {
+ pxAssert(!m_present_thread.joinable());
+ m_present_thread_done.store(false);
+ m_present_thread = std::thread(&Context::PresentThread, this);
+ }
+
+ void Context::StopPresentThread()
+ {
+ if (!m_present_thread.joinable())
+ return;
+
+ {
+ std::unique_lock lock(m_present_mutex);
+ WaitForPresentComplete(lock);
+ m_present_thread_done.store(true);
+ m_present_queued_cv.notify_one();
+ }
+
+ m_present_thread.join();
+ }
+
+ void Context::MoveToNextCommandBuffer() { ActivateCommandBuffer((m_current_frame + 1) % NUM_COMMAND_BUFFERS); }
+
+ void Context::ActivateCommandBuffer(u32 index)
+ {
+ FrameResources& resources = m_frame_resources[index];
+
+ if (!m_present_done.load() && m_queued_present.command_buffer_index == index)
+ WaitForPresentComplete();
+
+ // Wait for the GPU to finish with all resources for this command buffer.
+ if (resources.fence_counter > m_completed_fence_counter)
+ WaitForCommandBufferCompletion(index);
+
+ // Reset fence to unsignaled before starting.
+ VkResult res = vkResetFences(m_device, 1, &resources.fence);
+ if (res != VK_SUCCESS)
+ LOG_VULKAN_ERROR(res, "vkResetFences failed: ");
+
+ // Reset command pools to beginning since we can re-use the memory now
+ res = vkResetCommandPool(m_device, resources.command_pool, 0);
+ if (res != VK_SUCCESS)
+ LOG_VULKAN_ERROR(res, "vkResetCommandPool failed: ");
+
+ // Enable commands to be recorded to the two buffers again.
+ VkCommandBufferBeginInfo begin_info = {
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
+ res = vkBeginCommandBuffer(resources.command_buffers[1], &begin_info);
+ if (res != VK_SUCCESS)
+ LOG_VULKAN_ERROR(res, "vkBeginCommandBuffer failed: ");
+
+ // Also can do the same for the descriptor pools
+ res = vkResetDescriptorPool(m_device, resources.descriptor_pool, 0);
+ if (res != VK_SUCCESS)
+ LOG_VULKAN_ERROR(res, "vkResetDescriptorPool failed: ");
+
+ m_current_frame = index;
+ m_current_command_buffer = resources.command_buffers[1];
+ resources.fence_counter = m_next_fence_counter++;
+ resources.init_buffer_used = false;
+ }
+
+ void Context::ExecuteCommandBuffer(bool wait_for_completion)
+ {
+ // If we're waiting for completion, don't bother waking the worker thread.
+ const u32 current_frame = m_current_frame;
+ SubmitCommandBuffer();
+ MoveToNextCommandBuffer();
+
+ if (wait_for_completion)
+ WaitForCommandBufferCompletion(current_frame);
+ }
+
+ bool Context::CheckLastPresentFail()
+ {
+ bool res = m_last_present_failed;
+ m_last_present_failed = false;
+ return res;
+ }
+
+ void Context::DeferBufferDestruction(VkBuffer object)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, object]() { vkDestroyBuffer(m_device, object, nullptr); });
+ }
+
+ void Context::DeferBufferViewDestruction(VkBufferView object)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, object]() { vkDestroyBufferView(m_device, object, nullptr); });
+ }
+
+ void Context::DeferDeviceMemoryDestruction(VkDeviceMemory object)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, object]() { vkFreeMemory(m_device, object, nullptr); });
+ }
+
+ void Context::DeferFramebufferDestruction(VkFramebuffer object)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, object]() { vkDestroyFramebuffer(m_device, object, nullptr); });
+ }
+
+ void Context::DeferImageDestruction(VkImage object)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, object]() { vkDestroyImage(m_device, object, nullptr); });
+ }
+
+ void Context::DeferImageDestruction(VkImage object, VmaAllocation allocation)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back(
+ [this, object, allocation]() { vmaDestroyImage(m_allocator, object, allocation); });
+ }
+
+ void Context::DeferImageViewDestruction(VkImageView object)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, object]() { vkDestroyImageView(m_device, object, nullptr); });
+ }
+
+ void Context::DeferPipelineDestruction(VkPipeline pipeline)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, pipeline]() { vkDestroyPipeline(m_device, pipeline, nullptr); });
+ }
+
+ void Context::DeferSamplerDestruction(VkSampler sampler)
+ {
+ FrameResources& resources = m_frame_resources[m_current_frame];
+ resources.cleanup_resources.push_back([this, sampler]() { vkDestroySampler(m_device, sampler, nullptr); });
+ }
+
+ VKAPI_ATTR VkBool32 VKAPI_CALL DebugMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
+ VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData,
+ void* pUserData)
+ {
+ if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT)
+ {
+ Console.Error("Vulkan debug report: (%s) %s",
+ pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage);
+ }
+ else if (severity & (VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT))
+ {
+ Console.Warning("Vulkan debug report: (%s) %s",
+ pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage);
+ }
+ else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT)
+ {
+ Console.WriteLn("Vulkan debug report: (%s) %s",
+ pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage);
+ }
+ else
+ {
+ DevCon.WriteLn("Vulkan debug report: (%s) %s",
+ pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage);
+ }
+
+ return VK_FALSE;
+ }
+
+ bool Context::EnableDebugUtils()
+ {
+ // Already enabled?
+ if (m_debug_messenger_callback != VK_NULL_HANDLE)
+ return true;
+
+ // Check for presence of the functions before calling
+ if (!vkCreateDebugUtilsMessengerEXT || !vkDestroyDebugUtilsMessengerEXT || !vkSubmitDebugUtilsMessageEXT)
+ {
+ return false;
+ }
+
+ VkDebugUtilsMessengerCreateInfoEXT messenger_info = {VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
+ nullptr, 0,
+ VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
+ VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
+ VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT |
+ VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+ DebugMessengerCallback, nullptr};
+
+ VkResult res =
+ vkCreateDebugUtilsMessengerEXT(m_instance, &messenger_info, nullptr, &m_debug_messenger_callback);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateDebugUtilsMessengerEXT failed: ");
+ return false;
+ }
+
+ return true;
+ }
+
+ void Context::DisableDebugUtils()
+ {
+ if (m_debug_messenger_callback != VK_NULL_HANDLE)
+ {
+ vkDestroyDebugUtilsMessengerEXT(m_instance, m_debug_messenger_callback, nullptr);
+ m_debug_messenger_callback = VK_NULL_HANDLE;
+ }
+ }
+
+ bool Context::GetMemoryType(u32 bits, VkMemoryPropertyFlags properties, u32* out_type_index)
+ {
+ for (u32 i = 0; i < VK_MAX_MEMORY_TYPES; i++)
+ {
+ if ((bits & (1 << i)) != 0)
+ {
+ u32 supported = m_device_memory_properties.memoryTypes[i].propertyFlags & properties;
+ if (supported == properties)
+ {
+ *out_type_index = i;
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+
+ u32 Context::GetMemoryType(u32 bits, VkMemoryPropertyFlags properties)
+ {
+ u32 type_index = VK_MAX_MEMORY_TYPES;
+ if (!GetMemoryType(bits, properties, &type_index))
+ {
+ Console.Error("Unable to find memory type for %x:%x", bits, properties);
+ pxFailRel("Unable to find memory type");
+ }
+
+ return type_index;
+ }
+
+ u32 Context::GetUploadMemoryType(u32 bits, bool* is_coherent)
+ {
+ // Try for coherent memory first.
+ VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+ u32 type_index;
+ if (!GetMemoryType(bits, flags, &type_index))
+ {
+ Console.Warning("Vulkan: Failed to find a coherent memory type for uploads, this will affect performance.");
+
+ // Try non-coherent memory.
+ flags &= ~VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ if (!GetMemoryType(bits, flags, &type_index))
+ {
+ // We shouldn't have any memory types that aren't host-visible.
+ pxFailRel("Unable to get memory type for upload.");
+ type_index = 0;
+ }
+ }
+
+ if (is_coherent)
+ *is_coherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0);
+
+ return type_index;
+ }
+
+ u32 Context::GetReadbackMemoryType(u32 bits, bool* is_coherent, bool* is_cached)
+ {
+ // Try for cached and coherent memory first.
+ VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT |
+ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+ u32 type_index;
+ if (!GetMemoryType(bits, flags, &type_index))
+ {
+ // For readbacks, caching is more important than coherency.
+ flags &= ~VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ if (!GetMemoryType(bits, flags, &type_index))
+ {
+ Console.Warning("Vulkan: Failed to find a cached memory type for readbacks, this will affect "
+ "performance.");
+
+ // Remove the cached bit as well.
+ flags &= ~VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+ if (!GetMemoryType(bits, flags, &type_index))
+ {
+ // We shouldn't have any memory types that aren't host-visible.
+ pxFailRel("Unable to get memory type for upload.");
+ type_index = 0;
+ }
+ }
+ }
+
+ if (is_coherent)
+ *is_coherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0);
+ if (is_cached)
+ *is_cached = ((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0);
+
+ return type_index;
+ }
+
+ VkRenderPass Context::CreateCachedRenderPass(RenderPassCacheKey key)
+ {
+ VkAttachmentReference color_reference;
+ VkAttachmentReference* color_reference_ptr = nullptr;
+ VkAttachmentReference depth_reference;
+ VkAttachmentReference* depth_reference_ptr = nullptr;
+ VkAttachmentReference input_reference;
+ VkAttachmentReference* input_reference_ptr = nullptr;
+ VkSubpassDependency subpass_dependency;
+ VkSubpassDependency* subpass_dependency_ptr = nullptr;
+ std::array attachments;
+ u32 num_attachments = 0;
+ if (key.color_format != VK_FORMAT_UNDEFINED)
+ {
+ attachments[num_attachments] = {0, static_cast(key.color_format), VK_SAMPLE_COUNT_1_BIT,
+ static_cast(key.color_load_op),
+ static_cast(key.color_store_op), VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+ VK_ATTACHMENT_STORE_OP_DONT_CARE,
+ key.color_feedback_loop ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ key.color_feedback_loop ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+ color_reference.attachment = num_attachments;
+ color_reference.layout =
+ key.color_feedback_loop ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+ color_reference_ptr = &color_reference;
+
+ if (key.color_feedback_loop)
+ {
+ input_reference.attachment = num_attachments;
+ input_reference.layout = VK_IMAGE_LAYOUT_GENERAL;
+ input_reference_ptr = &input_reference;
+
+ subpass_dependency.srcSubpass = 0;
+ subpass_dependency.dstSubpass = 0;
+ subpass_dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+ subpass_dependency.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
+ subpass_dependency.srcAccessMask =
+ VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+ subpass_dependency.dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
+ subpass_dependency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
+ subpass_dependency_ptr = &subpass_dependency;
+ }
+
+ num_attachments++;
+ }
+ if (key.depth_format != VK_FORMAT_UNDEFINED)
+ {
+ attachments[num_attachments] = {0, static_cast(key.depth_format), VK_SAMPLE_COUNT_1_BIT,
+ static_cast(key.depth_load_op),
+ static_cast(key.depth_store_op),
+ static_cast(key.stencil_load_op),
+ static_cast(key.stencil_store_op),
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+ depth_reference.attachment = num_attachments;
+ depth_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+ depth_reference_ptr = &depth_reference;
+ num_attachments++;
+ }
+
+ const VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, input_reference_ptr ? 1u : 0u,
+ input_reference_ptr ? input_reference_ptr : nullptr, color_reference_ptr ? 1u : 0u,
+ color_reference_ptr ? color_reference_ptr : nullptr, nullptr, depth_reference_ptr, 0, nullptr};
+ const VkRenderPassCreateInfo pass_info = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0u,
+ num_attachments, attachments.data(), 1u, &subpass, subpass_dependency_ptr ? 1u : 0u,
+ subpass_dependency_ptr};
+
+ VkRenderPass pass;
+ VkResult res = vkCreateRenderPass(m_device, &pass_info, nullptr, &pass);
+ if (res != VK_SUCCESS)
+ {
+ LOG_VULKAN_ERROR(res, "vkCreateRenderPass failed: ");
+ return VK_NULL_HANDLE;
+ }
+
+ m_render_pass_cache.emplace(key.key, pass);
+ return pass;
+ }
+
+ void Context::DestroyRenderPassCache()
+ {
+ for (auto& it : m_render_pass_cache)
+ vkDestroyRenderPass(m_device, it.second, nullptr);
+
+ m_render_pass_cache.clear();
+ }
+} // namespace Vulkan
diff --git a/common/Vulkan/Context.h b/common/Vulkan/Context.h
new file mode 100644
index 0000000000..7b126f666e
--- /dev/null
+++ b/common/Vulkan/Context.h
@@ -0,0 +1,329 @@
+/* PCSX2 - PS2 Emulator for PCs
+ * Copyright (C) 2002-2021 PCSX2 Dev Team
+ *
+ * PCSX2 is free software: you can redistribute it and/or modify it under the terms
+ * of the GNU Lesser General Public License as published by the Free Software Found-
+ * ation, either version 3 of the License, or (at your option) any later version.
+ *
+ * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+ * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ * PURPOSE. See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along with PCSX2.
+ * If not, see .
+ */
+
+#pragma once
+
+#include "common/Pcsx2Defs.h"
+
+#include "common/Vulkan/Loader.h"
+#include "common/Vulkan/StreamBuffer.h"
+
+#include
+#include
+#include
+#include
+#include