From a27b6a113aedfd3ca6a759e210f404ef39881934 Mon Sep 17 00:00:00 2001 From: Connor McLaughlin Date: Sat, 6 Nov 2021 16:53:01 +1000 Subject: [PATCH] Common: Add Vulkan helper classes --- cmake/BuildParameters.cmake | 5 + cmake/SearchForStuff.cmake | 6 +- common/CMakeLists.txt | 35 +- common/Vulkan/Builders.cpp | 869 ++++++++++++++++++ common/Vulkan/Builders.h | 295 ++++++ common/Vulkan/Context.cpp | 1416 +++++++++++++++++++++++++++++ common/Vulkan/Context.h | 329 +++++++ common/Vulkan/EntryPoints.h | 216 +++++ common/Vulkan/EntryPoints.inl | 239 +++++ common/Vulkan/Loader.cpp | 260 ++++++ common/Vulkan/Loader.h | 117 +++ common/Vulkan/ShaderCache.cpp | 534 +++++++++++ common/Vulkan/ShaderCache.h | 118 +++ common/Vulkan/ShaderCompiler.cpp | 192 ++++ common/Vulkan/ShaderCompiler.h | 53 ++ common/Vulkan/StreamBuffer.cpp | 379 ++++++++ common/Vulkan/StreamBuffer.h | 75 ++ common/Vulkan/SwapChain.cpp | 866 ++++++++++++++++++ common/Vulkan/SwapChain.h | 118 +++ common/Vulkan/Texture.cpp | 381 ++++++++ common/Vulkan/Texture.h | 92 ++ common/Vulkan/Util.cpp | 352 +++++++ common/Vulkan/Util.h | 135 +++ common/Vulkan/vk_mem_alloc.cpp | 20 + common/WindowInfo.h | 3 + common/common.vcxproj | 29 +- common/common.vcxproj.filters | 71 +- common/vsprops/3rdpartyDeps.props | 2 +- 28 files changed, 7200 insertions(+), 7 deletions(-) create mode 100644 common/Vulkan/Builders.cpp create mode 100644 common/Vulkan/Builders.h create mode 100644 common/Vulkan/Context.cpp create mode 100644 common/Vulkan/Context.h create mode 100644 common/Vulkan/EntryPoints.h create mode 100644 common/Vulkan/EntryPoints.inl create mode 100644 common/Vulkan/Loader.cpp create mode 100644 common/Vulkan/Loader.h create mode 100644 common/Vulkan/ShaderCache.cpp create mode 100644 common/Vulkan/ShaderCache.h create mode 100644 common/Vulkan/ShaderCompiler.cpp create mode 100644 common/Vulkan/ShaderCompiler.h create mode 100644 common/Vulkan/StreamBuffer.cpp create mode 100644 common/Vulkan/StreamBuffer.h create mode 100644 common/Vulkan/SwapChain.cpp create mode 100644 common/Vulkan/SwapChain.h create mode 100644 common/Vulkan/Texture.cpp create mode 100644 common/Vulkan/Texture.h create mode 100644 common/Vulkan/Util.cpp create mode 100644 common/Vulkan/Util.h create mode 100644 common/Vulkan/vk_mem_alloc.cpp diff --git a/cmake/BuildParameters.cmake b/cmake/BuildParameters.cmake index f556b1802c..bb61086193 100644 --- a/cmake/BuildParameters.cmake +++ b/cmake/BuildParameters.cmake @@ -38,6 +38,7 @@ option(USE_VTUNE "Plug VTUNE to profile GS JIT.") # Graphical option #------------------------------------------------------------------------------- option(BUILD_REPLAY_LOADERS "Build GS replayer to ease testing (developer option)") +option(USE_VULKAN "Enable Vulkan GS renderer" ON) #------------------------------------------------------------------------------- # Path and lib option @@ -234,6 +235,10 @@ if(USE_VTUNE) list(APPEND PCSX2_DEFS ENABLE_VTUNE) endif() +if(USE_VULKAN) + list(APPEND PCSX2_DEFS ENABLE_VULKAN) +endif() + if(X11_API) list(APPEND PCSX2_DEFS X11_API) endif() diff --git a/cmake/SearchForStuff.cmake b/cmake/SearchForStuff.cmake index 8bfc82639c..1e4e073332 100644 --- a/cmake/SearchForStuff.cmake +++ b/cmake/SearchForStuff.cmake @@ -244,10 +244,14 @@ else() endif() add_subdirectory(3rdparty/glad EXCLUDE_FROM_ALL) -add_subdirectory(3rdparty/glslang EXCLUDE_FROM_ALL) add_subdirectory(3rdparty/simpleini EXCLUDE_FROM_ALL) add_subdirectory(3rdparty/imgui EXCLUDE_FROM_ALL) +if(USE_VULKAN) + add_subdirectory(3rdparty/glslang EXCLUDE_FROM_ALL) + add_subdirectory(3rdparty/vulkan-headers EXCLUDE_FROM_ALL) +endif() + if(CUBEB_API) add_subdirectory(3rdparty/cubeb EXCLUDE_FROM_ALL) endif() diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index d8e8a02a5f..d527304b23 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -62,7 +62,8 @@ target_sources(common PRIVATE Linux/LnxMisc.cpp Windows/WinThreads.cpp Windows/WinHostSys.cpp - Windows/WinMisc.cpp) + Windows/WinMisc.cpp +) # x86emitter headers target_sources(common PRIVATE @@ -130,7 +131,35 @@ target_sources(common PRIVATE emitter/tools.h emitter/x86emitter.h emitter/x86types.h +) + +if(USE_VULKAN) + target_link_libraries(common PUBLIC + Vulkan-Headers glslang ) + target_sources(common PRIVATE + Vulkan/ShaderCache.cpp + Vulkan/Texture.cpp + Vulkan/Loader.cpp + Vulkan/ShaderCompiler.cpp + Vulkan/Util.cpp + Vulkan/SwapChain.cpp + Vulkan/StreamBuffer.cpp + Vulkan/Context.cpp + Vulkan/Builders.cpp + Vulkan/vk_mem_alloc.cpp + Vulkan/Context.h + Vulkan/Texture.h + Vulkan/ShaderCompiler.h + Vulkan/SwapChain.h + Vulkan/Builders.h + Vulkan/StreamBuffer.h + Vulkan/ShaderCache.h + Vulkan/EntryPoints.h + Vulkan/Loader.h + Vulkan/Util.h + ) +endif() if(USE_VTUNE) target_link_libraries(common PUBLIC Vtune::Vtune) @@ -168,6 +197,7 @@ else() GL/ContextEGLX11.cpp GL/ContextEGLX11.h ) + target_compile_definitions(common PUBLIC "VULKAN_USE_X11=1") if(TARGET PkgConfig::XRANDR) target_link_libraries(common PRIVATE PkgConfig::XRANDR) target_compile_definitions(common PRIVATE "HAS_XRANDR=1") @@ -179,7 +209,8 @@ else() GL/ContextEGLWayland.cpp GL/ContextEGLWayland.h ) - target_link_libraries(common PRIVATE ${WAYLAND_EGL_LIBRARIES}) + target_link_libraries(common PRIVATE ${WAYLAND_EGL_LIBRARIES}) + target_compile_definitions(common PUBLIC "VULKAN_USE_WAYLAND=1") endif() endif() diff --git a/common/Vulkan/Builders.cpp b/common/Vulkan/Builders.cpp new file mode 100644 index 0000000000..64a3aa8eaa --- /dev/null +++ b/common/Vulkan/Builders.cpp @@ -0,0 +1,869 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/Builders.h" +#include "common/Vulkan/Util.h" +#include "common/Assertions.h" + +namespace Vulkan +{ + DescriptorSetLayoutBuilder::DescriptorSetLayoutBuilder() { Clear(); } + + void DescriptorSetLayoutBuilder::Clear() + { + m_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + m_ci.pNext = nullptr; + m_ci.flags = 0; + m_ci.pBindings = nullptr; + m_ci.bindingCount = 0; + } + + VkDescriptorSetLayout DescriptorSetLayoutBuilder::Create(VkDevice device) + { + VkDescriptorSetLayout layout; + VkResult res = vkCreateDescriptorSetLayout(device, &m_ci, nullptr, &layout); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateDescriptorSetLayout() failed: "); + return VK_NULL_HANDLE; + } + + Clear(); + return layout; + } + + void DescriptorSetLayoutBuilder::AddBinding( + u32 binding, VkDescriptorType dtype, u32 dcount, VkShaderStageFlags stages) + { + pxAssert(m_ci.bindingCount < MAX_BINDINGS); + + VkDescriptorSetLayoutBinding& b = m_bindings[m_ci.bindingCount]; + b.binding = binding; + b.descriptorType = dtype; + b.descriptorCount = dcount; + b.stageFlags = stages; + b.pImmutableSamplers = nullptr; + + m_ci.pBindings = m_bindings.data(); + m_ci.bindingCount++; + } + + PipelineLayoutBuilder::PipelineLayoutBuilder() { Clear(); } + + void PipelineLayoutBuilder::Clear() + { + m_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + m_ci.pNext = nullptr; + m_ci.flags = 0; + m_ci.pSetLayouts = nullptr; + m_ci.setLayoutCount = 0; + m_ci.pPushConstantRanges = nullptr; + m_ci.pushConstantRangeCount = 0; + } + + VkPipelineLayout PipelineLayoutBuilder::Create(VkDevice device) + { + VkPipelineLayout layout; + VkResult res = vkCreatePipelineLayout(device, &m_ci, nullptr, &layout); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreatePipelineLayout() failed: "); + return VK_NULL_HANDLE; + } + + Clear(); + return layout; + } + + void PipelineLayoutBuilder::AddDescriptorSet(VkDescriptorSetLayout layout) + { + pxAssert(m_ci.setLayoutCount < MAX_SETS); + + m_sets[m_ci.setLayoutCount] = layout; + + m_ci.setLayoutCount++; + m_ci.pSetLayouts = m_sets.data(); + } + + void PipelineLayoutBuilder::AddPushConstants(VkShaderStageFlags stages, u32 offset, u32 size) + { + pxAssert(m_ci.pushConstantRangeCount < MAX_PUSH_CONSTANTS); + + VkPushConstantRange& r = m_push_constants[m_ci.pushConstantRangeCount]; + r.stageFlags = stages; + r.offset = offset; + r.size = size; + + m_ci.pushConstantRangeCount++; + m_ci.pPushConstantRanges = m_push_constants.data(); + } + + GraphicsPipelineBuilder::GraphicsPipelineBuilder() { Clear(); } + + void GraphicsPipelineBuilder::Clear() + { + m_ci = {}; + m_ci.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + + m_shader_stages = {}; + + m_vertex_input_state = {}; + m_vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + m_ci.pVertexInputState = &m_vertex_input_state; + m_vertex_attributes = {}; + m_vertex_buffers = {}; + + m_input_assembly = {}; + m_input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + + m_rasterization_state = {}; + m_rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + m_rasterization_state.lineWidth = 1.0f; + m_depth_state = {}; + m_depth_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; + m_blend_state = {}; + m_blend_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + m_blend_attachments = {}; + + m_viewport_state = {}; + m_viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + m_viewport = {}; + m_scissor = {}; + + m_dynamic_state = {}; + m_dynamic_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + m_dynamic_state_values = {}; + + m_multisample_state = {}; + m_multisample_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + + // set defaults + SetNoCullRasterizationState(); + SetNoDepthTestState(); + SetNoBlendingState(); + SetPrimitiveTopology(VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST); + + // have to be specified even if dynamic + SetViewport(0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f); + SetScissorRect(0, 0, 1, 1); + SetMultisamples(VK_SAMPLE_COUNT_1_BIT); + } + + VkPipeline GraphicsPipelineBuilder::Create(VkDevice device, VkPipelineCache pipeline_cache, bool clear /* = true */) + { + VkPipeline pipeline; + VkResult res = vkCreateGraphicsPipelines(device, pipeline_cache, 1, &m_ci, nullptr, &pipeline); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateGraphicsPipelines() failed: "); + return VK_NULL_HANDLE; + } + + if (clear) + Clear(); + + return pipeline; + } + + void GraphicsPipelineBuilder::SetShaderStage( + VkShaderStageFlagBits stage, VkShaderModule module, const char* entry_point) + { + pxAssert(m_ci.stageCount < MAX_SHADER_STAGES); + + u32 index = 0; + for (; index < m_ci.stageCount; index++) + { + if (m_shader_stages[index].stage == stage) + break; + } + if (index == m_ci.stageCount) + { + m_ci.stageCount++; + m_ci.pStages = m_shader_stages.data(); + } + + VkPipelineShaderStageCreateInfo& s = m_shader_stages[index]; + s.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + s.stage = stage; + s.module = module; + s.pName = entry_point; + } + + void GraphicsPipelineBuilder::AddVertexBuffer( + u32 binding, u32 stride, VkVertexInputRate input_rate /*= VK_VERTEX_INPUT_RATE_VERTEX*/) + { + pxAssert(m_vertex_input_state.vertexAttributeDescriptionCount < MAX_VERTEX_BUFFERS); + + VkVertexInputBindingDescription& b = m_vertex_buffers[m_vertex_input_state.vertexBindingDescriptionCount]; + b.binding = binding; + b.stride = stride; + b.inputRate = input_rate; + + m_vertex_input_state.vertexBindingDescriptionCount++; + m_vertex_input_state.pVertexBindingDescriptions = m_vertex_buffers.data(); + m_ci.pVertexInputState = &m_vertex_input_state; + } + + void GraphicsPipelineBuilder::AddVertexAttribute(u32 location, u32 binding, VkFormat format, u32 offset) + { + pxAssert(m_vertex_input_state.vertexAttributeDescriptionCount < MAX_VERTEX_BUFFERS); + + VkVertexInputAttributeDescription& a = + m_vertex_attributes[m_vertex_input_state.vertexAttributeDescriptionCount]; + a.location = location; + a.binding = binding; + a.format = format; + a.offset = offset; + + m_vertex_input_state.vertexAttributeDescriptionCount++; + m_vertex_input_state.pVertexAttributeDescriptions = m_vertex_attributes.data(); + m_ci.pVertexInputState = &m_vertex_input_state; + } + + void GraphicsPipelineBuilder::SetPrimitiveTopology( + VkPrimitiveTopology topology, bool enable_primitive_restart /*= false*/) + { + m_input_assembly.topology = topology; + m_input_assembly.primitiveRestartEnable = enable_primitive_restart; + + m_ci.pInputAssemblyState = &m_input_assembly; + } + + void GraphicsPipelineBuilder::SetRasterizationState( + VkPolygonMode polygon_mode, VkCullModeFlags cull_mode, VkFrontFace front_face) + { + m_rasterization_state.polygonMode = polygon_mode; + m_rasterization_state.cullMode = cull_mode; + m_rasterization_state.frontFace = front_face; + + m_ci.pRasterizationState = &m_rasterization_state; + } + + void GraphicsPipelineBuilder::SetLineWidth(float width) { m_rasterization_state.lineWidth = width; } + + void GraphicsPipelineBuilder::SetMultisamples(u32 multisamples, bool per_sample_shading) + { + m_multisample_state.rasterizationSamples = static_cast(multisamples); + m_multisample_state.sampleShadingEnable = per_sample_shading; + m_multisample_state.minSampleShading = (multisamples > 1) ? 1.0f : 0.0f; + } + + void GraphicsPipelineBuilder::SetNoCullRasterizationState() + { + SetRasterizationState(VK_POLYGON_MODE_FILL, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE); + } + + void GraphicsPipelineBuilder::SetDepthState(bool depth_test, bool depth_write, VkCompareOp compare_op) + { + m_depth_state.depthTestEnable = depth_test; + m_depth_state.depthWriteEnable = depth_write; + m_depth_state.depthCompareOp = compare_op; + + m_ci.pDepthStencilState = &m_depth_state; + } + + void GraphicsPipelineBuilder::SetStencilState( + bool stencil_test, const VkStencilOpState& front, const VkStencilOpState& back) + { + m_depth_state.stencilTestEnable = stencil_test; + m_depth_state.front = front; + m_depth_state.back = back; + } + + void GraphicsPipelineBuilder::SetNoStencilState() + { + m_depth_state.stencilTestEnable = VK_FALSE; + m_depth_state.front = {}; + m_depth_state.back = {}; + } + + void GraphicsPipelineBuilder::SetNoDepthTestState() { SetDepthState(false, false, VK_COMPARE_OP_ALWAYS); } + + void GraphicsPipelineBuilder::SetBlendConstants(float r, float g, float b, float a) + { + m_blend_state.blendConstants[0] = r; + m_blend_state.blendConstants[1] = g; + m_blend_state.blendConstants[2] = b; + m_blend_state.blendConstants[3] = a; + m_ci.pColorBlendState = &m_blend_state; + } + + void GraphicsPipelineBuilder::AddBlendAttachment(bool blend_enable, VkBlendFactor src_factor, + VkBlendFactor dst_factor, VkBlendOp op, VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor, + VkBlendOp alpha_op, + VkColorComponentFlags + write_mask /* = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT */) + { + pxAssert(m_blend_state.attachmentCount < MAX_ATTACHMENTS); + + VkPipelineColorBlendAttachmentState& bs = m_blend_attachments[m_blend_state.attachmentCount]; + bs.blendEnable = blend_enable; + bs.srcColorBlendFactor = src_factor; + bs.dstColorBlendFactor = dst_factor; + bs.colorBlendOp = op; + bs.srcAlphaBlendFactor = alpha_src_factor; + bs.dstAlphaBlendFactor = alpha_dst_factor; + bs.alphaBlendOp = alpha_op; + bs.colorWriteMask = write_mask; + + m_blend_state.attachmentCount++; + m_blend_state.pAttachments = m_blend_attachments.data(); + m_ci.pColorBlendState = &m_blend_state; + } + + void GraphicsPipelineBuilder::SetBlendAttachment(u32 attachment, bool blend_enable, VkBlendFactor src_factor, + VkBlendFactor dst_factor, VkBlendOp op, VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor, + VkBlendOp alpha_op, + VkColorComponentFlags + write_mask /*= VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT*/) + { + pxAssert(attachment < MAX_ATTACHMENTS); + + VkPipelineColorBlendAttachmentState& bs = m_blend_attachments[attachment]; + bs.blendEnable = blend_enable; + bs.srcColorBlendFactor = src_factor; + bs.dstColorBlendFactor = dst_factor; + bs.colorBlendOp = op; + bs.srcAlphaBlendFactor = alpha_src_factor; + bs.dstAlphaBlendFactor = alpha_dst_factor; + bs.alphaBlendOp = alpha_op; + bs.colorWriteMask = write_mask; + + if (attachment >= m_blend_state.attachmentCount) + { + m_blend_state.attachmentCount = attachment + 1u; + m_blend_state.pAttachments = m_blend_attachments.data(); + m_ci.pColorBlendState = &m_blend_state; + } + } + + void GraphicsPipelineBuilder::ClearBlendAttachments() + { + m_blend_attachments = {}; + m_blend_state.attachmentCount = 0; + } + + void GraphicsPipelineBuilder::SetNoBlendingState() + { + ClearBlendAttachments(); + SetBlendAttachment(0, false, VK_BLEND_FACTOR_ONE, VK_BLEND_FACTOR_ZERO, VK_BLEND_OP_ADD, VK_BLEND_FACTOR_ONE, + VK_BLEND_FACTOR_ZERO, VK_BLEND_OP_ADD, + VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT); + } + + void GraphicsPipelineBuilder::AddDynamicState(VkDynamicState state) + { + pxAssert(m_dynamic_state.dynamicStateCount < MAX_DYNAMIC_STATE); + + m_dynamic_state_values[m_dynamic_state.dynamicStateCount] = state; + m_dynamic_state.dynamicStateCount++; + m_dynamic_state.pDynamicStates = m_dynamic_state_values.data(); + m_ci.pDynamicState = &m_dynamic_state; + } + + void GraphicsPipelineBuilder::SetDynamicViewportAndScissorState() + { + AddDynamicState(VK_DYNAMIC_STATE_VIEWPORT); + AddDynamicState(VK_DYNAMIC_STATE_SCISSOR); + } + + void GraphicsPipelineBuilder::SetViewport( + float x, float y, float width, float height, float min_depth, float max_depth) + { + m_viewport.x = x; + m_viewport.y = y; + m_viewport.width = width; + m_viewport.height = height; + m_viewport.minDepth = min_depth; + m_viewport.maxDepth = max_depth; + + m_viewport_state.pViewports = &m_viewport; + m_viewport_state.viewportCount = 1u; + m_ci.pViewportState = &m_viewport_state; + } + + void GraphicsPipelineBuilder::SetScissorRect(s32 x, s32 y, u32 width, u32 height) + { + m_scissor.offset.x = x; + m_scissor.offset.y = y; + m_scissor.extent.width = width; + m_scissor.extent.height = height; + + m_viewport_state.pScissors = &m_scissor; + m_viewport_state.scissorCount = 1u; + m_ci.pViewportState = &m_viewport_state; + } + + void GraphicsPipelineBuilder::SetMultisamples(VkSampleCountFlagBits samples) + { + m_multisample_state.rasterizationSamples = samples; + m_ci.pMultisampleState = &m_multisample_state; + } + + void GraphicsPipelineBuilder::SetPipelineLayout(VkPipelineLayout layout) { m_ci.layout = layout; } + + void GraphicsPipelineBuilder::SetRenderPass(VkRenderPass render_pass, u32 subpass) + { + m_ci.renderPass = render_pass; + m_ci.subpass = subpass; + } + + SamplerBuilder::SamplerBuilder() { Clear(); } + + void SamplerBuilder::Clear() + { + m_ci = {}; + m_ci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; + } + + VkSampler SamplerBuilder::Create(VkDevice device, bool clear /* = true */) + { + VkSampler sampler; + VkResult res = vkCreateSampler(device, &m_ci, nullptr, &sampler); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateSampler() failed: "); + return VK_NULL_HANDLE; + } + + return sampler; + } + + void SamplerBuilder::SetFilter(VkFilter mag_filter, VkFilter min_filter, VkSamplerMipmapMode mip_filter) + { + m_ci.magFilter = mag_filter; + m_ci.minFilter = min_filter; + m_ci.mipmapMode = mip_filter; + } + + void SamplerBuilder::SetAddressMode(VkSamplerAddressMode u, VkSamplerAddressMode v, VkSamplerAddressMode w) + { + m_ci.addressModeU = u; + m_ci.addressModeV = v; + m_ci.addressModeW = w; + } + + void SamplerBuilder::SetPointSampler( + VkSamplerAddressMode address_mode /* = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER */) + { + Clear(); + SetFilter(VK_FILTER_NEAREST, VK_FILTER_NEAREST, VK_SAMPLER_MIPMAP_MODE_NEAREST); + SetAddressMode(address_mode, address_mode, address_mode); + } + + void SamplerBuilder::SetLinearSampler( + bool mipmaps, VkSamplerAddressMode address_mode /* = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER */) + { + Clear(); + SetFilter(VK_FILTER_LINEAR, VK_FILTER_LINEAR, + mipmaps ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST); + SetAddressMode(address_mode, address_mode, address_mode); + + if (mipmaps) + { + m_ci.minLod = std::numeric_limits::min(); + m_ci.maxLod = std::numeric_limits::max(); + } + } + + DescriptorSetUpdateBuilder::DescriptorSetUpdateBuilder() { Clear(); } + + void DescriptorSetUpdateBuilder::Clear() + { + m_writes = {}; + m_num_writes = 0; + } + + void DescriptorSetUpdateBuilder::Update(VkDevice device, bool clear /*= true*/) + { + pxAssert(m_num_writes > 0); + + vkUpdateDescriptorSets(device, m_num_writes, (m_num_writes > 0) ? m_writes.data() : nullptr, 0, nullptr); + + if (clear) + Clear(); + } + + void DescriptorSetUpdateBuilder::AddImageDescriptorWrite(VkDescriptorSet set, u32 binding, VkImageView view, + VkImageLayout layout /*= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL*/) + { + pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS); + + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = view; + ii.imageLayout = layout; + ii.sampler = VK_NULL_HANDLE; + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = 1; + dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; + dw.pImageInfo = ⅈ + } + + + void DescriptorSetUpdateBuilder::AddImageDescriptorWrites(VkDescriptorSet set, u32 binding, + const VkImageView* views, u32 num_views, VkImageLayout layout /*= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL*/) + { + pxAssert(m_num_writes < MAX_WRITES && (m_num_image_infos + num_views) < MAX_IMAGE_INFOS); + +#if 1 + // NOTE: This is deliberately split up - updating multiple descriptors in one write is broken on Adreno. + for (u32 i = 0; i < num_views; i++) + AddImageDescriptorWrite(set, binding + i, views[i], layout); +#else + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = num_views; + dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE; + dw.pImageInfo = &m_image_infos[m_num_image_infos]; + + for (u32 i = 0; i < num_views; i++) + { + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = views[i]; + ii.imageLayout = layout; + ii.sampler = VK_NULL_HANDLE; + } +#endif + } + + void DescriptorSetUpdateBuilder::AddSamplerDescriptorWrite(VkDescriptorSet set, u32 binding, VkSampler sampler) + { + pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS); + + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = VK_NULL_HANDLE; + ii.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED; + ii.sampler = sampler; + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = 1; + dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER; + dw.pImageInfo = ⅈ + } + + + void DescriptorSetUpdateBuilder::AddSamplerDescriptorWrites( + VkDescriptorSet set, u32 binding, const VkSampler* samplers, u32 num_samplers) + { + pxAssert(m_num_writes < MAX_WRITES && (m_num_image_infos + num_samplers) < MAX_IMAGE_INFOS); + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = num_samplers; + dw.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER; + dw.pImageInfo = &m_image_infos[m_num_image_infos]; + + for (u32 i = 0; i < num_samplers; i++) + { + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = VK_NULL_HANDLE; + ii.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + ii.sampler = samplers[i]; + } + } + + void DescriptorSetUpdateBuilder::AddCombinedImageSamplerDescriptorWrite(VkDescriptorSet set, u32 binding, + VkImageView view, VkSampler sampler, VkImageLayout layout /*= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL*/) + { + pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS); + + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = view; + ii.imageLayout = layout; + ii.sampler = sampler; + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = 1; + dw.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + dw.pImageInfo = ⅈ + } + + void DescriptorSetUpdateBuilder::AddCombinedImageSamplerDescriptorWrites(VkDescriptorSet set, u32 binding, + const VkImageView* views, const VkSampler* samplers, u32 num_views, + VkImageLayout layout /* = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL */) + { + pxAssert(m_num_writes < MAX_WRITES && (m_num_image_infos + num_views) < MAX_IMAGE_INFOS); + +#if 1 + // NOTE: This is deliberately split up - updating multiple descriptors in one write is broken on Adreno. + for (u32 i = 0; i < num_views; i++) + AddCombinedImageSamplerDescriptorWrite(set, binding + i, views[i], samplers[i], layout); +#else + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = num_views; + dw.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + dw.pImageInfo = &m_image_infos[m_num_image_infos]; + + for (u32 i = 0; i < num_views; i++) + { + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = views[i]; + ii.sampler = samplers[i]; + ii.imageLayout = layout; + } +#endif + } + + void DescriptorSetUpdateBuilder::AddBufferDescriptorWrite( + VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBuffer buffer, u32 offset, u32 size) + { + pxAssert(m_num_writes < MAX_WRITES && m_num_buffer_infos < MAX_BUFFER_INFOS); + + VkDescriptorBufferInfo& bi = m_buffer_infos[m_num_buffer_infos++]; + bi.buffer = buffer; + bi.offset = offset; + bi.range = size; + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = 1; + dw.descriptorType = dtype; + dw.pBufferInfo = &bi; + } + + void DescriptorSetUpdateBuilder::AddBufferViewDescriptorWrite( + VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBufferView view) + { + pxAssert(m_num_writes < MAX_WRITES && m_num_views < MAX_VIEWS); + + VkBufferView& bi = m_views[m_num_views++]; + bi = view; + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = 1; + dw.descriptorType = dtype; + dw.pTexelBufferView = &bi; + } + + void DescriptorSetUpdateBuilder::AddInputAttachmentDescriptorWrite( + VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout /*= VK_IMAGE_LAYOUT_GENERAL*/) + { + pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS); + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = 1; + dw.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; + dw.pImageInfo = &m_image_infos[m_num_image_infos]; + + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = view; + ii.imageLayout = layout; + ii.sampler = VK_NULL_HANDLE; + } + + void DescriptorSetUpdateBuilder::AddStorageImageDescriptorWrite( + VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout /*= VK_IMAGE_LAYOUT_GENERAL*/) + { + pxAssert(m_num_writes < MAX_WRITES && m_num_image_infos < MAX_IMAGE_INFOS); + + VkWriteDescriptorSet& dw = m_writes[m_num_writes++]; + dw.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + dw.dstSet = set; + dw.dstBinding = binding; + dw.descriptorCount = 1; + dw.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE; + dw.pImageInfo = &m_image_infos[m_num_image_infos]; + + VkDescriptorImageInfo& ii = m_image_infos[m_num_image_infos++]; + ii.imageView = view; + ii.imageLayout = layout; + ii.sampler = VK_NULL_HANDLE; + } + + FramebufferBuilder::FramebufferBuilder() { Clear(); } + + void FramebufferBuilder::Clear() + { + m_ci = {}; + m_ci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + m_images = {}; + } + + VkFramebuffer FramebufferBuilder::Create(VkDevice device, bool clear /*= true*/) + { + VkFramebuffer fb; + VkResult res = vkCreateFramebuffer(device, &m_ci, nullptr, &fb); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateFramebuffer() failed: "); + return VK_NULL_HANDLE; + } + + if (clear) + Clear(); + + return fb; + } + + void FramebufferBuilder::AddAttachment(VkImageView image) + { + pxAssert(m_ci.attachmentCount < MAX_ATTACHMENTS); + + m_images[m_ci.attachmentCount] = image; + + m_ci.attachmentCount++; + m_ci.pAttachments = m_images.data(); + } + + void FramebufferBuilder::SetSize(u32 width, u32 height, u32 layers) + { + m_ci.width = width; + m_ci.height = height; + m_ci.layers = layers; + } + + void FramebufferBuilder::SetRenderPass(VkRenderPass render_pass) { m_ci.renderPass = render_pass; } + + RenderPassBuilder::RenderPassBuilder() { Clear(); } + + void RenderPassBuilder::Clear() + { + m_ci = {}; + m_ci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + m_attachments = {}; + m_attachment_references = {}; + m_num_attachment_references = 0; + m_subpasses = {}; + } + + VkRenderPass RenderPassBuilder::Create(VkDevice device, bool clear /*= true*/) + { + VkRenderPass rp; + VkResult res = vkCreateRenderPass(device, &m_ci, nullptr, &rp); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateRenderPass() failed: "); + return VK_NULL_HANDLE; + } + + return rp; + } + + u32 RenderPassBuilder::AddAttachment(VkFormat format, VkSampleCountFlagBits samples, VkAttachmentLoadOp load_op, + VkAttachmentStoreOp store_op, VkImageLayout initial_layout, VkImageLayout final_layout) + { + pxAssert(m_ci.attachmentCount < MAX_ATTACHMENTS); + + const u32 index = m_ci.attachmentCount; + VkAttachmentDescription& ad = m_attachments[index]; + ad.format = format; + ad.samples = samples; + ad.loadOp = load_op; + ad.storeOp = store_op; + ad.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + ad.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + ad.initialLayout = initial_layout; + ad.finalLayout = final_layout; + + m_ci.attachmentCount++; + m_ci.pAttachments = m_attachments.data(); + + return index; + } + + u32 RenderPassBuilder::AddSubpass() + { + pxAssert(m_ci.subpassCount < MAX_SUBPASSES); + + const u32 index = m_ci.subpassCount; + VkSubpassDescription& sp = m_subpasses[index]; + sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + + m_ci.subpassCount++; + m_ci.pSubpasses = m_subpasses.data(); + + return index; + } + + void RenderPassBuilder::AddSubpassColorAttachment(u32 subpass, u32 attachment, VkImageLayout layout) + { + pxAssert(subpass < m_ci.subpassCount && m_num_attachment_references < MAX_ATTACHMENT_REFERENCES); + + VkAttachmentReference& ar = m_attachment_references[m_num_attachment_references++]; + ar.attachment = attachment; + ar.layout = layout; + + VkSubpassDescription& sp = m_subpasses[subpass]; + if (sp.colorAttachmentCount == 0) + sp.pColorAttachments = &ar; + sp.colorAttachmentCount++; + } + + void RenderPassBuilder::AddSubpassDepthAttachment(u32 subpass, u32 attachment, VkImageLayout layout) + { + pxAssert(subpass < m_ci.subpassCount && m_num_attachment_references < MAX_ATTACHMENT_REFERENCES); + + VkAttachmentReference& ar = m_attachment_references[m_num_attachment_references++]; + ar.attachment = attachment; + ar.layout = layout; + + VkSubpassDescription& sp = m_subpasses[subpass]; + sp.pDepthStencilAttachment = &ar; + } + + BufferViewBuilder::BufferViewBuilder() { Clear(); } + + void BufferViewBuilder::Clear() + { + m_ci = {}; + m_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO; + } + + VkBufferView BufferViewBuilder::Create(VkDevice device, bool clear /*= true*/) + { + VkBufferView bv; + VkResult res = vkCreateBufferView(device, &m_ci, nullptr, &bv); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateBufferView() failed: "); + return VK_NULL_HANDLE; + } + + return bv; + } + + void BufferViewBuilder::Set(VkBuffer buffer, VkFormat format, u32 offset, u32 size) + { + m_ci.buffer = buffer; + m_ci.format = format; + m_ci.offset = offset; + m_ci.range = size; + } +} // namespace Vulkan \ No newline at end of file diff --git a/common/Vulkan/Builders.h b/common/Vulkan/Builders.h new file mode 100644 index 0000000000..f3ffbb68a0 --- /dev/null +++ b/common/Vulkan/Builders.h @@ -0,0 +1,295 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once +#include "common/Pcsx2Defs.h" +#include "common/Vulkan/Loader.h" +#include + +namespace Vulkan +{ + class DescriptorSetLayoutBuilder + { + public: + enum : u32 + { + MAX_BINDINGS = 16, + }; + + DescriptorSetLayoutBuilder(); + + void Clear(); + + VkDescriptorSetLayout Create(VkDevice device); + + void AddBinding(u32 binding, VkDescriptorType dtype, u32 dcount, VkShaderStageFlags stages); + + private: + VkDescriptorSetLayoutCreateInfo m_ci{}; + std::array m_bindings{}; + }; + + class PipelineLayoutBuilder + { + public: + enum : u32 + { + MAX_SETS = 8, + MAX_PUSH_CONSTANTS = 1 + }; + + PipelineLayoutBuilder(); + + void Clear(); + + VkPipelineLayout Create(VkDevice device); + + void AddDescriptorSet(VkDescriptorSetLayout layout); + + void AddPushConstants(VkShaderStageFlags stages, u32 offset, u32 size); + + private: + VkPipelineLayoutCreateInfo m_ci{}; + std::array m_sets{}; + std::array m_push_constants{}; + }; + + class GraphicsPipelineBuilder + { + public: + enum : u32 + { + MAX_SHADER_STAGES = 3, + MAX_VERTEX_ATTRIBUTES = 16, + MAX_VERTEX_BUFFERS = 8, + MAX_ATTACHMENTS = 2, + MAX_DYNAMIC_STATE = 8 + }; + + GraphicsPipelineBuilder(); + + void Clear(); + + VkPipeline Create(VkDevice device, VkPipelineCache pipeline_cache = VK_NULL_HANDLE, bool clear = true); + + void SetShaderStage(VkShaderStageFlagBits stage, VkShaderModule module, const char* entry_point); + void SetVertexShader(VkShaderModule module) { SetShaderStage(VK_SHADER_STAGE_VERTEX_BIT, module, "main"); } + void SetGeometryShader(VkShaderModule module) { SetShaderStage(VK_SHADER_STAGE_GEOMETRY_BIT, module, "main"); } + void SetFragmentShader(VkShaderModule module) { SetShaderStage(VK_SHADER_STAGE_FRAGMENT_BIT, module, "main"); } + + void AddVertexBuffer(u32 binding, u32 stride, VkVertexInputRate input_rate = VK_VERTEX_INPUT_RATE_VERTEX); + void AddVertexAttribute(u32 location, u32 binding, VkFormat format, u32 offset); + + void SetPrimitiveTopology(VkPrimitiveTopology topology, bool enable_primitive_restart = false); + + void SetRasterizationState(VkPolygonMode polygon_mode, VkCullModeFlags cull_mode, VkFrontFace front_face); + void SetLineWidth(float width); + void SetMultisamples(u32 multisamples, bool per_sample_shading); + void SetNoCullRasterizationState(); + + void SetDepthState(bool depth_test, bool depth_write, VkCompareOp compare_op); + void SetStencilState(bool stencil_test, const VkStencilOpState& front, const VkStencilOpState& back); + void SetNoDepthTestState(); + void SetNoStencilState(); + + void AddBlendAttachment(bool blend_enable, VkBlendFactor src_factor, VkBlendFactor dst_factor, VkBlendOp op, + VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor, VkBlendOp alpha_op, + VkColorComponentFlags write_mask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | + VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT); + void SetBlendAttachment(u32 attachment, bool blend_enable, VkBlendFactor src_factor, VkBlendFactor dst_factor, + VkBlendOp op, VkBlendFactor alpha_src_factor, VkBlendFactor alpha_dst_factor, VkBlendOp alpha_op, + VkColorComponentFlags write_mask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | + VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT); + void ClearBlendAttachments(); + + void SetBlendConstants(float r, float g, float b, float a); + void SetNoBlendingState(); + + void AddDynamicState(VkDynamicState state); + + void SetDynamicViewportAndScissorState(); + void SetViewport(float x, float y, float width, float height, float min_depth, float max_depth); + void SetScissorRect(s32 x, s32 y, u32 width, u32 height); + + void SetMultisamples(VkSampleCountFlagBits samples); + + void SetPipelineLayout(VkPipelineLayout layout); + void SetRenderPass(VkRenderPass render_pass, u32 subpass); + + private: + VkGraphicsPipelineCreateInfo m_ci; + std::array m_shader_stages; + + VkPipelineVertexInputStateCreateInfo m_vertex_input_state; + std::array m_vertex_buffers; + std::array m_vertex_attributes; + + VkPipelineInputAssemblyStateCreateInfo m_input_assembly; + + VkPipelineRasterizationStateCreateInfo m_rasterization_state; + VkPipelineDepthStencilStateCreateInfo m_depth_state; + + VkPipelineColorBlendStateCreateInfo m_blend_state; + std::array m_blend_attachments; + + VkPipelineViewportStateCreateInfo m_viewport_state; + VkViewport m_viewport; + VkRect2D m_scissor; + + VkPipelineDynamicStateCreateInfo m_dynamic_state; + std::array m_dynamic_state_values; + + VkPipelineMultisampleStateCreateInfo m_multisample_state; + }; + + class SamplerBuilder + { + public: + SamplerBuilder(); + + void Clear(); + + VkSampler Create(VkDevice device, bool clear = true); + + void SetFilter(VkFilter mag_filter, VkFilter min_filter, VkSamplerMipmapMode mip_filter); + void SetAddressMode(VkSamplerAddressMode u, VkSamplerAddressMode v, VkSamplerAddressMode w); + + void SetPointSampler(VkSamplerAddressMode address_mode = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER); + void SetLinearSampler( + bool mipmaps, VkSamplerAddressMode address_mode = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER); + + private: + VkSamplerCreateInfo m_ci; + }; + + class DescriptorSetUpdateBuilder + { + enum : u32 + { + MAX_WRITES = 16, + MAX_IMAGE_INFOS = 8, + MAX_BUFFER_INFOS = 4, + MAX_VIEWS = 4, + }; + + public: + DescriptorSetUpdateBuilder(); + + void Clear(); + + void Update(VkDevice device, bool clear = true); + + void AddImageDescriptorWrite(VkDescriptorSet set, u32 binding, VkImageView view, + VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + void AddImageDescriptorWrites(VkDescriptorSet set, u32 binding, const VkImageView* views, u32 num_views, + VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + void AddSamplerDescriptorWrite(VkDescriptorSet set, u32 binding, VkSampler sampler); + void AddSamplerDescriptorWrites(VkDescriptorSet set, u32 binding, const VkSampler* samplers, u32 num_samplers); + void AddCombinedImageSamplerDescriptorWrite(VkDescriptorSet set, u32 binding, VkImageView view, + VkSampler sampler, VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + void AddCombinedImageSamplerDescriptorWrites(VkDescriptorSet set, u32 binding, const VkImageView* views, + const VkSampler* samplers, u32 num_views, VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + void AddBufferDescriptorWrite( + VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBuffer buffer, u32 offset, u32 size); + void AddBufferViewDescriptorWrite(VkDescriptorSet set, u32 binding, VkDescriptorType dtype, VkBufferView view); + void AddInputAttachmentDescriptorWrite( + VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout = VK_IMAGE_LAYOUT_GENERAL); + void AddStorageImageDescriptorWrite( + VkDescriptorSet set, u32 binding, VkImageView view, VkImageLayout layout = VK_IMAGE_LAYOUT_GENERAL); + + private: + std::array m_writes; + u32 m_num_writes = 0; + + std::array m_buffer_infos; + std::array m_image_infos; + std::array m_views; + u32 m_num_buffer_infos = 0; + u32 m_num_image_infos = 0; + u32 m_num_views = 0; + }; + + class FramebufferBuilder + { + enum : u32 + { + MAX_ATTACHMENTS = 2, + }; + + public: + FramebufferBuilder(); + + void Clear(); + + VkFramebuffer Create(VkDevice device, bool clear = true); + + void AddAttachment(VkImageView image); + + void SetSize(u32 width, u32 height, u32 layers); + + void SetRenderPass(VkRenderPass render_pass); + + private: + VkFramebufferCreateInfo m_ci; + std::array m_images; + }; + + class RenderPassBuilder + { + enum : u32 + { + MAX_ATTACHMENTS = 2, + MAX_ATTACHMENT_REFERENCES = 2, + MAX_SUBPASSES = 1, + }; + + public: + RenderPassBuilder(); + + void Clear(); + + VkRenderPass Create(VkDevice device, bool clear = true); + + u32 AddAttachment(VkFormat format, VkSampleCountFlagBits samples, VkAttachmentLoadOp load_op, + VkAttachmentStoreOp store_op, VkImageLayout initial_layout, VkImageLayout final_layout); + + u32 AddSubpass(); + void AddSubpassColorAttachment(u32 subpass, u32 attachment, VkImageLayout layout); + void AddSubpassDepthAttachment(u32 subpass, u32 attachment, VkImageLayout layout); + + private: + VkRenderPassCreateInfo m_ci; + std::array m_attachments; + std::array m_attachment_references; + u32 m_num_attachment_references = 0; + std::array m_subpasses; + }; + + class BufferViewBuilder + { + public: + BufferViewBuilder(); + + void Clear(); + + VkBufferView Create(VkDevice device, bool clear = true); + + void Set(VkBuffer buffer, VkFormat format, u32 offset, u32 size); + + private: + VkBufferViewCreateInfo m_ci; + }; + +} // namespace Vulkan \ No newline at end of file diff --git a/common/Vulkan/Context.cpp b/common/Vulkan/Context.cpp new file mode 100644 index 0000000000..fadd3f30a2 --- /dev/null +++ b/common/Vulkan/Context.cpp @@ -0,0 +1,1416 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/Context.h" +#include "common/Assertions.h" +#include "common/Console.h" +#include "common/StringUtil.h" +#include "common/Vulkan/SwapChain.h" +#include "common/Vulkan/Util.h" +#include +#include +#include + +std::unique_ptr g_vulkan_context; + +// Tweakables +enum : u32 +{ + MAX_DRAW_CALLS_PER_FRAME = 8192, + MAX_COMBINED_IMAGE_SAMPLER_DESCRIPTORS_PER_FRAME = 2 * MAX_DRAW_CALLS_PER_FRAME, + MAX_SAMPLED_IMAGE_DESCRIPTORS_PER_FRAME = + MAX_DRAW_CALLS_PER_FRAME, // assume at least half our draws aren't going to be shuffle/blending + MAX_STORAGE_IMAGE_DESCRIPTORS_PER_FRAME = MAX_DRAW_CALLS_PER_FRAME, + MAX_INPUT_ATTACHMENT_IMAGE_DESCRIPTORS_PER_FRAME = MAX_DRAW_CALLS_PER_FRAME, + MAX_DESCRIPTOR_SETS_PER_FRAME = MAX_DRAW_CALLS_PER_FRAME * 2 +}; + +namespace Vulkan +{ + Context::Context(VkInstance instance, VkPhysicalDevice physical_device) + : m_instance(instance) + , m_physical_device(physical_device) + { + // Read device physical memory properties, we need it for allocating buffers + vkGetPhysicalDeviceProperties(physical_device, &m_device_properties); + vkGetPhysicalDeviceMemoryProperties(physical_device, &m_device_memory_properties); + + // Would any drivers be this silly? I hope not... + m_device_properties.limits.minUniformBufferOffsetAlignment = + std::max(m_device_properties.limits.minUniformBufferOffsetAlignment, static_cast(1)); + m_device_properties.limits.minTexelBufferOffsetAlignment = + std::max(m_device_properties.limits.minTexelBufferOffsetAlignment, static_cast(1)); + m_device_properties.limits.optimalBufferCopyOffsetAlignment = + std::max(m_device_properties.limits.optimalBufferCopyOffsetAlignment, static_cast(1)); + m_device_properties.limits.optimalBufferCopyRowPitchAlignment = + std::max(m_device_properties.limits.optimalBufferCopyRowPitchAlignment, static_cast(1)); + } + + Context::~Context() = default; + + bool Context::CheckValidationLayerAvailablility() + { + u32 extension_count = 0; + VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEnumerateInstanceExtensionProperties failed: "); + return false; + } + + std::vector extension_list(extension_count); + res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, extension_list.data()); + pxAssert(res == VK_SUCCESS); + + u32 layer_count = 0; + res = vkEnumerateInstanceLayerProperties(&layer_count, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEnumerateInstanceExtensionProperties failed: "); + return false; + } + + std::vector layer_list(layer_count); + res = vkEnumerateInstanceLayerProperties(&layer_count, layer_list.data()); + pxAssert(res == VK_SUCCESS); + + // Check for both VK_EXT_debug_utils and VK_LAYER_LUNARG_standard_validation + return (std::find_if(extension_list.begin(), extension_list.end(), + [](const auto& it) { return strcmp(it.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0; }) != + extension_list.end() && + std::find_if(layer_list.begin(), layer_list.end(), [](const auto& it) { + return strcmp(it.layerName, "VK_LAYER_KHRONOS_validation") == 0; + }) != layer_list.end()); + } + + VkInstance Context::CreateVulkanInstance( + const WindowInfo* wi, bool enable_debug_utils, bool enable_validation_layer) + { + ExtensionList enabled_extensions; + if (!SelectInstanceExtensions(&enabled_extensions, wi, enable_debug_utils)) + return VK_NULL_HANDLE; + + VkApplicationInfo app_info = {}; + app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; + app_info.pNext = nullptr; + app_info.pApplicationName = "DuckStation"; + app_info.applicationVersion = VK_MAKE_VERSION(0, 1, 0); + app_info.pEngineName = "DuckStation"; + app_info.engineVersion = VK_MAKE_VERSION(0, 1, 0); + app_info.apiVersion = VK_MAKE_VERSION(1, 1, 0); + + VkInstanceCreateInfo instance_create_info = {}; + instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + instance_create_info.pNext = nullptr; + instance_create_info.flags = 0; + instance_create_info.pApplicationInfo = &app_info; + instance_create_info.enabledExtensionCount = static_cast(enabled_extensions.size()); + instance_create_info.ppEnabledExtensionNames = enabled_extensions.data(); + instance_create_info.enabledLayerCount = 0; + instance_create_info.ppEnabledLayerNames = nullptr; + + // Enable debug layer on debug builds + if (enable_validation_layer) + { + static const char* layer_names[] = {"VK_LAYER_KHRONOS_validation"}; + instance_create_info.enabledLayerCount = 1; + instance_create_info.ppEnabledLayerNames = layer_names; + } + + VkInstance instance; + VkResult res = vkCreateInstance(&instance_create_info, nullptr, &instance); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateInstance failed: "); + return nullptr; + } + + return instance; + } + + bool Context::SelectInstanceExtensions(ExtensionList* extension_list, const WindowInfo* wi, bool enable_debug_utils) + { + u32 extension_count = 0; + VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEnumerateInstanceExtensionProperties failed: "); + return false; + } + + if (extension_count == 0) + { + Console.Error("Vulkan: No extensions supported by instance."); + return false; + } + + std::vector available_extension_list(extension_count); + res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, available_extension_list.data()); + pxAssert(res == VK_SUCCESS); + + auto SupportsExtension = [&](const char* name, bool required) { + if (std::find_if(available_extension_list.begin(), available_extension_list.end(), + [&](const VkExtensionProperties& properties) { return !strcmp(name, properties.extensionName); }) != + available_extension_list.end()) + { + DevCon.WriteLn("Enabling extension: %s", name); + extension_list->push_back(name); + return true; + } + + if (required) + Console.Error("Vulkan: Missing required extension %s.", name); + + return false; + }; + + // Common extensions + if (wi && wi->type != WindowInfo::Type::Surfaceless && !SupportsExtension(VK_KHR_SURFACE_EXTENSION_NAME, true)) + return false; + +#if defined(VK_USE_PLATFORM_WIN32_KHR) + if (wi && wi->type == WindowInfo::Type::Win32 && !SupportsExtension(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, true)) + return false; +#endif +#if defined(VK_USE_PLATFORM_XLIB_KHR) + if (wi && wi->type == WindowInfo::Type::X11 && !SupportsExtension(VK_KHR_XLIB_SURFACE_EXTENSION_NAME, true)) + return false; +#endif +#if defined(VK_USE_PLATFORM_WAYLAND_KHR) + if (wi && wi->type == WindowInfo::Type::Wayland && + !SupportsExtension(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, true)) + return false; +#endif +#if defined(VK_USE_PLATFORM_ANDROID_KHR) + if (wi && wi->type == WindowInfo::Type::Android && + !SupportsExtension(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, true)) + return false; +#endif +#if defined(VK_USE_PLATFORM_METAL_EXT) + if (wi && wi->type == WindowInfo::Type::MacOS && !SupportsExtension(VK_EXT_METAL_SURFACE_EXTENSION_NAME, true)) + return false; +#endif + +#if 0 + if (wi && wi->type == WindowInfo::Type::Display && !SupportsExtension(VK_KHR_DISPLAY_EXTENSION_NAME, true)) + return false; +#endif + + // VK_EXT_debug_utils + if (enable_debug_utils && !SupportsExtension(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, false)) + Console.Warning("Vulkan: Debug report requested, but extension is not available."); + + return true; + } + + Context::GPUList Context::EnumerateGPUs(VkInstance instance) + { + u32 gpu_count = 0; + VkResult res = vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr); + if (res != VK_SUCCESS || gpu_count == 0) + { + LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: "); + return {}; + } + + GPUList gpus; + gpus.resize(gpu_count); + + res = vkEnumeratePhysicalDevices(instance, &gpu_count, gpus.data()); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: "); + return {}; + } + + return gpus; + } + + Context::GPUNameList Context::EnumerateGPUNames(VkInstance instance) + { + u32 gpu_count = 0; + VkResult res = vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr); + if (res != VK_SUCCESS || gpu_count == 0) + { + LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: "); + return {}; + } + + GPUList gpus; + gpus.resize(gpu_count); + + res = vkEnumeratePhysicalDevices(instance, &gpu_count, gpus.data()); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEnumeratePhysicalDevices failed: "); + return {}; + } + + GPUNameList gpu_names; + gpu_names.reserve(gpu_count); + for (u32 i = 0; i < gpu_count; i++) + { + VkPhysicalDeviceProperties props = {}; + vkGetPhysicalDeviceProperties(gpus[i], &props); + + std::string gpu_name(props.deviceName); + + // handle duplicate adapter names + if (std::any_of(gpu_names.begin(), gpu_names.end(), + [&gpu_name](const std::string& other) { return (gpu_name == other); })) + { + std::string original_adapter_name = std::move(gpu_name); + + u32 current_extra = 2; + do + { + gpu_name = StringUtil::StdStringFromFormat("%s (%u)", original_adapter_name.c_str(), current_extra); + current_extra++; + } while (std::any_of(gpu_names.begin(), gpu_names.end(), + [&gpu_name](const std::string& other) { return (gpu_name == other); })); + } + + gpu_names.push_back(std::move(gpu_name)); + } + + return gpu_names; + } + + bool Context::Create(std::string_view gpu_name, const WindowInfo* wi, std::unique_ptr* out_swap_chain, + bool threaded_presentation, bool enable_debug_utils, bool enable_validation_layer) + { + pxAssertMsg(!g_vulkan_context, "Has no current context"); + + if (!Vulkan::LoadVulkanLibrary()) + { + Console.Error("Failed to load Vulkan library"); + return false; + } + + const bool enable_surface = (wi && wi->type != WindowInfo::Type::Surfaceless); + VkInstance instance = CreateVulkanInstance(wi, enable_debug_utils, enable_validation_layer); + if (instance == VK_NULL_HANDLE) + { + Vulkan::UnloadVulkanLibrary(); + return false; + } + + if (!Vulkan::LoadVulkanInstanceFunctions(instance)) + { + Console.Error("Failed to load Vulkan instance functions"); + vkDestroyInstance(instance, nullptr); + Vulkan::UnloadVulkanLibrary(); + return false; + } + + GPUList gpus = EnumerateGPUs(instance); + if (gpus.empty()) + { + vkDestroyInstance(instance, nullptr); + Vulkan::UnloadVulkanLibrary(); + return false; + } + + u32 gpu_index = 0; + GPUNameList gpu_names = EnumerateGPUNames(instance); + if (!gpu_name.empty()) + { + for (; gpu_index < static_cast(gpu_names.size()); gpu_index++) + { + Console.WriteLn("GPU %u: %s", static_cast(gpu_index), gpu_names[gpu_index].c_str()); + if (gpu_names[gpu_index] == gpu_name) + break; + } + + if (gpu_index == static_cast(gpu_names.size())) + { + Console.Warning("Requested GPU '%s' not found, using first (%s)", std::string(gpu_name).c_str(), + gpu_names[0].c_str()); + gpu_index = 0; + } + } + else + { + Console.WriteLn("No GPU requested, using first (%s)", gpu_names[0].c_str()); + } + + VkSurfaceKHR surface = VK_NULL_HANDLE; + WindowInfo wi_copy; + if (wi) + wi_copy = *wi; + + if (enable_surface && + (surface = SwapChain::CreateVulkanSurface(instance, gpus[gpu_index], &wi_copy)) == VK_NULL_HANDLE) + { + vkDestroyInstance(instance, nullptr); + Vulkan::UnloadVulkanLibrary(); + return false; + } + + g_vulkan_context.reset(new Context(instance, gpus[gpu_index])); + + if (enable_debug_utils) + g_vulkan_context->EnableDebugUtils(); + + // Attempt to create the device. + if (!g_vulkan_context->CreateDevice(surface, enable_validation_layer, nullptr, 0, nullptr, 0, nullptr) || + !g_vulkan_context->CreateAllocator() || !g_vulkan_context->CreateGlobalDescriptorPool() || + !g_vulkan_context->CreateCommandBuffers() || !g_vulkan_context->CreateTextureStreamBuffer() || + (enable_surface && (*out_swap_chain = SwapChain::Create(wi_copy, surface, true)) == nullptr)) + { + // Since we are destroying the instance, we're also responsible for destroying the surface. + if (surface != VK_NULL_HANDLE) + vkDestroySurfaceKHR(instance, surface, nullptr); + + g_vulkan_context.reset(); + return false; + } + + if (threaded_presentation) + g_vulkan_context->StartPresentThread(); + + return true; + } + + void Context::Destroy() + { + pxAssertMsg(g_vulkan_context, "Has context"); + + g_vulkan_context->StopPresentThread(); + + if (g_vulkan_context->m_device != VK_NULL_HANDLE) + g_vulkan_context->WaitForGPUIdle(); + + g_vulkan_context->m_texture_upload_buffer.Destroy(false); + + g_vulkan_context->DestroyRenderPassCache(); + g_vulkan_context->DestroyGlobalDescriptorPool(); + g_vulkan_context->DestroyCommandBuffers(); + g_vulkan_context->DestroyAllocator(); + + if (g_vulkan_context->m_device != VK_NULL_HANDLE) + vkDestroyDevice(g_vulkan_context->m_device, nullptr); + + if (g_vulkan_context->m_debug_messenger_callback != VK_NULL_HANDLE) + g_vulkan_context->DisableDebugUtils(); + + vkDestroyInstance(g_vulkan_context->m_instance, nullptr); + Vulkan::UnloadVulkanLibrary(); + + g_vulkan_context.reset(); + } + + bool Context::SelectDeviceExtensions(ExtensionList* extension_list, bool enable_surface) + { + u32 extension_count = 0; + VkResult res = vkEnumerateDeviceExtensionProperties(m_physical_device, nullptr, &extension_count, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEnumerateDeviceExtensionProperties failed: "); + return false; + } + + if (extension_count == 0) + { + Console.Error("Vulkan: No extensions supported by device."); + return false; + } + + std::vector available_extension_list(extension_count); + res = vkEnumerateDeviceExtensionProperties( + m_physical_device, nullptr, &extension_count, available_extension_list.data()); + pxAssert(res == VK_SUCCESS); + + auto SupportsExtension = [&](const char* name, bool required) { + if (std::find_if(available_extension_list.begin(), available_extension_list.end(), + [&](const VkExtensionProperties& properties) { return !strcmp(name, properties.extensionName); }) != + available_extension_list.end()) + { + if (std::none_of(extension_list->begin(), extension_list->end(), + [&](const char* existing_name) { return (std::strcmp(existing_name, name) == 0); })) + { + DevCon.WriteLn("Enabling extension: %s", name); + extension_list->push_back(name); + } + + return true; + } + + if (required) + Console.Error("Vulkan: Missing required extension %s.", name); + + return false; + }; + + if (enable_surface && !SupportsExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, true)) + return false; + + return true; + } + + bool Context::SelectDeviceFeatures(const VkPhysicalDeviceFeatures* required_features) + { + VkPhysicalDeviceFeatures available_features; + vkGetPhysicalDeviceFeatures(m_physical_device, &available_features); + + if (required_features) + std::memcpy(&m_device_features, required_features, sizeof(m_device_features)); + + // Enable the features we use. + m_device_features.dualSrcBlend = available_features.dualSrcBlend; + m_device_features.geometryShader = available_features.geometryShader; + m_device_features.largePoints = available_features.largePoints; + m_device_features.wideLines = available_features.wideLines; + m_device_features.fragmentStoresAndAtomics = available_features.fragmentStoresAndAtomics; + + return true; + } + + bool Context::CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer, + const char** required_device_extensions, u32 num_required_device_extensions, + const char** required_device_layers, u32 num_required_device_layers, + const VkPhysicalDeviceFeatures* required_features) + { + u32 queue_family_count; + vkGetPhysicalDeviceQueueFamilyProperties(m_physical_device, &queue_family_count, nullptr); + if (queue_family_count == 0) + { + Console.Error("No queue families found on specified vulkan physical device."); + return false; + } + + std::vector queue_family_properties(queue_family_count); + vkGetPhysicalDeviceQueueFamilyProperties( + m_physical_device, &queue_family_count, queue_family_properties.data()); + Console.WriteLn("%u vulkan queue families", queue_family_count); + + // Find graphics and present queues. + m_graphics_queue_family_index = queue_family_count; + m_present_queue_family_index = queue_family_count; + for (uint32_t i = 0; i < queue_family_count; i++) + { + VkBool32 graphics_supported = queue_family_properties[i].queueFlags & VK_QUEUE_GRAPHICS_BIT; + if (graphics_supported) + { + m_graphics_queue_family_index = i; + // Quit now, no need for a present queue. + if (!surface) + { + break; + } + } + + if (surface) + { + VkBool32 present_supported; + VkResult res = vkGetPhysicalDeviceSurfaceSupportKHR(m_physical_device, i, surface, &present_supported); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceSurfaceSupportKHR failed: "); + return false; + } + + if (present_supported) + { + m_present_queue_family_index = i; + } + + // Prefer one queue family index that does both graphics and present. + if (graphics_supported && present_supported) + { + break; + } + } + } + if (m_graphics_queue_family_index == queue_family_count) + { + Console.Error("Vulkan: Failed to find an acceptable graphics queue."); + return false; + } + if (surface && m_present_queue_family_index == queue_family_count) + { + Console.Error("Vulkan: Failed to find an acceptable present queue."); + return false; + } + + VkDeviceCreateInfo device_info = {}; + device_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + device_info.pNext = nullptr; + device_info.flags = 0; + + static constexpr float queue_priorities[] = {1.0f}; + VkDeviceQueueCreateInfo graphics_queue_info = {}; + graphics_queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + graphics_queue_info.pNext = nullptr; + graphics_queue_info.flags = 0; + graphics_queue_info.queueFamilyIndex = m_graphics_queue_family_index; + graphics_queue_info.queueCount = 1; + graphics_queue_info.pQueuePriorities = queue_priorities; + + VkDeviceQueueCreateInfo present_queue_info = {}; + present_queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + present_queue_info.pNext = nullptr; + present_queue_info.flags = 0; + present_queue_info.queueFamilyIndex = m_present_queue_family_index; + present_queue_info.queueCount = 1; + present_queue_info.pQueuePriorities = queue_priorities; + + std::array queue_infos = {{ + graphics_queue_info, + present_queue_info, + }}; + + device_info.queueCreateInfoCount = 1; + if (m_graphics_queue_family_index != m_present_queue_family_index) + { + device_info.queueCreateInfoCount = 2; + } + device_info.pQueueCreateInfos = queue_infos.data(); + + ExtensionList enabled_extensions; + for (u32 i = 0; i < num_required_device_extensions; i++) + enabled_extensions.emplace_back(required_device_extensions[i]); + if (!SelectDeviceExtensions(&enabled_extensions, surface != VK_NULL_HANDLE)) + return false; + + device_info.enabledLayerCount = num_required_device_layers; + device_info.ppEnabledLayerNames = required_device_layers; + device_info.enabledExtensionCount = static_cast(enabled_extensions.size()); + device_info.ppEnabledExtensionNames = enabled_extensions.data(); + + // Check for required features before creating. + if (!SelectDeviceFeatures(required_features)) + return false; + + device_info.pEnabledFeatures = &m_device_features; + + // Enable debug layer on debug builds + if (enable_validation_layer) + { + static const char* layer_names[] = {"VK_LAYER_LUNARG_standard_validation"}; + device_info.enabledLayerCount = 1; + device_info.ppEnabledLayerNames = layer_names; + } + + VkResult res = vkCreateDevice(m_physical_device, &device_info, nullptr, &m_device); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateDevice failed: "); + return false; + } + + // With the device created, we can fill the remaining entry points. + if (!LoadVulkanDeviceFunctions(m_device)) + return false; + + // Grab the graphics and present queues. + vkGetDeviceQueue(m_device, m_graphics_queue_family_index, 0, &m_graphics_queue); + if (surface) + { + vkGetDeviceQueue(m_device, m_present_queue_family_index, 0, &m_present_queue); + } + return true; + } + + bool Context::CreateAllocator() + { + VmaAllocatorCreateInfo ci = {}; + ci.vulkanApiVersion = VK_API_VERSION_1_1; + ci.flags = VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT; + ci.physicalDevice = m_physical_device; + ci.device = m_device; + ci.instance = m_instance; + + VkResult res = vmaCreateAllocator(&ci, &m_allocator); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vmaCreateAllocator failed: "); + return false; + } + + return true; + } + + void Context::DestroyAllocator() + { + if (m_allocator == VK_NULL_HANDLE) + return; + + vmaDestroyAllocator(m_allocator); + m_allocator = VK_NULL_HANDLE; + } + + bool Context::CreateCommandBuffers() + { + VkResult res; + + uint32_t frame_index = 0; + for (FrameResources& resources : m_frame_resources) + { + resources.needs_fence_wait = false; + + VkCommandPoolCreateInfo pool_info = { + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0, m_graphics_queue_family_index}; + res = vkCreateCommandPool(m_device, &pool_info, nullptr, &resources.command_pool); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateCommandPool failed: "); + return false; + } + Vulkan::Util::SetObjectName( + g_vulkan_context->GetDevice(), resources.command_pool, "Frame Command Pool %u", frame_index); + + VkCommandBufferAllocateInfo buffer_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, nullptr, + resources.command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, + static_cast(resources.command_buffers.size())}; + + res = vkAllocateCommandBuffers(m_device, &buffer_info, resources.command_buffers.data()); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkAllocateCommandBuffers failed: "); + return false; + } + for (u32 i = 0; i < resources.command_buffers.size(); i++) + { + Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), resources.command_buffers[i], + "Frame %u %sCommand Buffer", frame_index, (i == 0) ? "Init" : ""); + } + + VkFenceCreateInfo fence_info = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT}; + + res = vkCreateFence(m_device, &fence_info, nullptr, &resources.fence); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateFence failed: "); + return false; + } + Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), resources.fence, "Frame Fence %u", frame_index); + // TODO: A better way to choose the number of descriptors. + VkDescriptorPoolSize pool_sizes[] = { + {VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_COMBINED_IMAGE_SAMPLER_DESCRIPTORS_PER_FRAME}, + {VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, MAX_SAMPLED_IMAGE_DESCRIPTORS_PER_FRAME}, + {VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, MAX_STORAGE_IMAGE_DESCRIPTORS_PER_FRAME}, + {VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, MAX_STORAGE_IMAGE_DESCRIPTORS_PER_FRAME}, + }; + + VkDescriptorPoolCreateInfo pool_create_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, 0, + MAX_DESCRIPTOR_SETS_PER_FRAME, static_cast(std::size(pool_sizes)), pool_sizes}; + + res = vkCreateDescriptorPool(m_device, &pool_create_info, nullptr, &resources.descriptor_pool); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateDescriptorPool failed: "); + return false; + } + Vulkan::Util::SetObjectName( + g_vulkan_context->GetDevice(), resources.descriptor_pool, "Frame Descriptor Pool %u", frame_index); + + ++frame_index; + } + + ActivateCommandBuffer(0); + return true; + } + + void Context::DestroyCommandBuffers() + { + for (FrameResources& resources : m_frame_resources) + { + for (auto& it : resources.cleanup_resources) + it(); + resources.cleanup_resources.clear(); + + if (resources.fence != VK_NULL_HANDLE) + { + vkDestroyFence(m_device, resources.fence, nullptr); + resources.fence = VK_NULL_HANDLE; + } + if (resources.descriptor_pool != VK_NULL_HANDLE) + { + vkDestroyDescriptorPool(m_device, resources.descriptor_pool, nullptr); + resources.descriptor_pool = VK_NULL_HANDLE; + } + if (resources.command_buffers[0] != VK_NULL_HANDLE) + { + vkFreeCommandBuffers(m_device, resources.command_pool, + static_cast(resources.command_buffers.size()), resources.command_buffers.data()); + resources.command_buffers.fill(VK_NULL_HANDLE); + } + if (resources.command_pool != VK_NULL_HANDLE) + { + vkDestroyCommandPool(m_device, resources.command_pool, nullptr); + resources.command_pool = VK_NULL_HANDLE; + } + } + } + + bool Context::CreateGlobalDescriptorPool() + { + // TODO: A better way to choose the number of descriptors. + VkDescriptorPoolSize pool_sizes[] = { + {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1024}, + {VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1024}, + }; + + VkDescriptorPoolCreateInfo pool_create_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + 1024, // TODO: tweak this + static_cast(std::size(pool_sizes)), pool_sizes}; + + VkResult res = vkCreateDescriptorPool(m_device, &pool_create_info, nullptr, &m_global_descriptor_pool); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateDescriptorPool failed: "); + return false; + } + Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_global_descriptor_pool, "Global Descriptor Pool"); + return true; + } + + void Context::DestroyGlobalDescriptorPool() + { + if (m_global_descriptor_pool != VK_NULL_HANDLE) + { + vkDestroyDescriptorPool(m_device, m_global_descriptor_pool, nullptr); + m_global_descriptor_pool = VK_NULL_HANDLE; + } + } + + bool Context::CreateTextureStreamBuffer() + { + if (!m_texture_upload_buffer.Create(VK_BUFFER_USAGE_TRANSFER_SRC_BIT, TEXTURE_BUFFER_SIZE)) + { + Console.Error("Failed to allocate texture upload buffer"); + return false; + } + + return true; + } + + VkCommandBuffer Context::GetCurrentInitCommandBuffer() + { + FrameResources& res = m_frame_resources[m_current_frame]; + VkCommandBuffer buf = res.command_buffers[0]; + if (res.init_buffer_used) + return buf; + + VkCommandBufferBeginInfo bi{ + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr}; + vkBeginCommandBuffer(buf, &bi); + res.init_buffer_used = true; + return buf; + } + + VkDescriptorSet Context::AllocateDescriptorSet(VkDescriptorSetLayout set_layout) + { + VkDescriptorSetAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, nullptr, + m_frame_resources[m_current_frame].descriptor_pool, 1, &set_layout}; + + VkDescriptorSet descriptor_set; + VkResult res = vkAllocateDescriptorSets(m_device, &allocate_info, &descriptor_set); + if (res != VK_SUCCESS) + { + // Failing to allocate a descriptor set is not a fatal error, we can + // recover by moving to the next command buffer. + return VK_NULL_HANDLE; + } + + return descriptor_set; + } + + VkDescriptorSet Context::AllocatePersistentDescriptorSet(VkDescriptorSetLayout set_layout) + { + VkDescriptorSetAllocateInfo allocate_info = { + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, nullptr, m_global_descriptor_pool, 1, &set_layout}; + + VkDescriptorSet descriptor_set; + VkResult res = vkAllocateDescriptorSets(m_device, &allocate_info, &descriptor_set); + if (res != VK_SUCCESS) + return VK_NULL_HANDLE; + + return descriptor_set; + } + + void Context::FreeGlobalDescriptorSet(VkDescriptorSet set) + { + vkFreeDescriptorSets(m_device, m_global_descriptor_pool, 1, &set); + } + + void Context::WaitForFenceCounter(u64 fence_counter) + { + if (m_completed_fence_counter >= fence_counter) + return; + + // Find the first command buffer which covers this counter value. + u32 index = (m_current_frame + 1) % NUM_COMMAND_BUFFERS; + while (index != m_current_frame) + { + if (m_frame_resources[index].fence_counter >= fence_counter) + break; + + index = (index + 1) % NUM_COMMAND_BUFFERS; + } + + pxAssert(index != m_current_frame); + WaitForCommandBufferCompletion(index); + } + + void Context::WaitForGPUIdle() + { + WaitForPresentComplete(); + vkDeviceWaitIdle(m_device); + } + + void Context::WaitForCommandBufferCompletion(u32 index) + { + // Wait for this command buffer to be completed. + VkResult res = vkWaitForFences(m_device, 1, &m_frame_resources[index].fence, VK_TRUE, UINT64_MAX); + if (res != VK_SUCCESS) + LOG_VULKAN_ERROR(res, "vkWaitForFences failed: "); + + // Clean up any resources for command buffers between the last known completed buffer and this + // now-completed command buffer. If we use >2 buffers, this may be more than one buffer. + const u64 now_completed_counter = m_frame_resources[index].fence_counter; + u32 cleanup_index = (m_current_frame + 1) % NUM_COMMAND_BUFFERS; + while (cleanup_index != m_current_frame) + { + FrameResources& resources = m_frame_resources[cleanup_index]; + if (resources.fence_counter > now_completed_counter) + break; + + if (resources.fence_counter > m_completed_fence_counter) + { + for (auto& it : resources.cleanup_resources) + it(); + resources.cleanup_resources.clear(); + } + + cleanup_index = (cleanup_index + 1) % NUM_COMMAND_BUFFERS; + } + + m_completed_fence_counter = now_completed_counter; + } + + void Context::SubmitCommandBuffer(VkSemaphore wait_semaphore /* = VK_NULL_HANDLE */, + VkSemaphore signal_semaphore /* = VK_NULL_HANDLE */, VkSwapchainKHR present_swap_chain /* = VK_NULL_HANDLE */, + uint32_t present_image_index /* = 0xFFFFFFFF */, bool submit_on_thread /* = false */) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + + // End the current command buffer. + VkResult res; + if (resources.init_buffer_used) + { + res = vkEndCommandBuffer(resources.command_buffers[0]); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEndCommandBuffer failed: "); + pxFailRel("Failed to end command buffer"); + } + } + + res = vkEndCommandBuffer(resources.command_buffers[1]); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkEndCommandBuffer failed: "); + pxFailRel("Failed to end command buffer"); + } + + // This command buffer now has commands, so can't be re-used without waiting. + resources.needs_fence_wait = true; + + std::unique_lock lock(m_present_mutex); + WaitForPresentComplete(lock); + + if (!submit_on_thread || !m_present_thread.joinable()) + { + DoSubmitCommandBuffer(m_current_frame, wait_semaphore, signal_semaphore); + if (present_swap_chain != VK_NULL_HANDLE) + DoPresent(signal_semaphore, present_swap_chain, present_image_index); + return; + } + + m_queued_present.command_buffer_index = m_current_frame; + m_queued_present.present_swap_chain = present_swap_chain; + m_queued_present.present_image_index = present_image_index; + m_queued_present.wait_semaphore = wait_semaphore; + m_queued_present.signal_semaphore = signal_semaphore; + m_present_done.store(false); + m_present_queued_cv.notify_one(); + } + + void Context::DoSubmitCommandBuffer(u32 index, VkSemaphore wait_semaphore, VkSemaphore signal_semaphore) + { + FrameResources& resources = m_frame_resources[index]; + + uint32_t wait_bits = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + VkSubmitInfo submit_info = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &wait_bits, + resources.init_buffer_used ? 2u : 1u, + resources.init_buffer_used ? resources.command_buffers.data() : &resources.command_buffers[1], 0, nullptr}; + + if (wait_semaphore != VK_NULL_HANDLE) + { + submit_info.pWaitSemaphores = &wait_semaphore; + submit_info.waitSemaphoreCount = 1; + } + + if (signal_semaphore != VK_NULL_HANDLE) + { + submit_info.signalSemaphoreCount = 1; + submit_info.pSignalSemaphores = &signal_semaphore; + } + + VkResult res = vkQueueSubmit(m_graphics_queue, 1, &submit_info, resources.fence); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkQueueSubmit failed: "); + pxFailRel("Failed to submit command buffer."); + } + } + + void Context::DoPresent(VkSemaphore wait_semaphore, VkSwapchainKHR present_swap_chain, uint32_t present_image_index) + { + // Should have a signal semaphore. + pxAssert(wait_semaphore != VK_NULL_HANDLE); + VkPresentInfoKHR present_info = {VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, nullptr, 1, &wait_semaphore, 1, + &present_swap_chain, &present_image_index, nullptr}; + + VkResult res = vkQueuePresentKHR(m_present_queue, &present_info); + if (res != VK_SUCCESS) + { + // VK_ERROR_OUT_OF_DATE_KHR is not fatal, just means we need to recreate our swap chain. + if (res != VK_ERROR_OUT_OF_DATE_KHR && res != VK_SUBOPTIMAL_KHR) + LOG_VULKAN_ERROR(res, "vkQueuePresentKHR failed: "); + + m_last_present_failed.store(true); + } + } + + void Context::WaitForPresentComplete() + { + if (m_present_done.load()) + return; + + std::unique_lock lock(m_present_mutex); + WaitForPresentComplete(lock); + } + + void Context::WaitForPresentComplete(std::unique_lock& lock) + { + if (m_present_done.load()) + return; + + m_present_done_cv.wait(lock, [this]() { return m_present_done.load(); }); + } + + void Context::PresentThread() + { + std::unique_lock lock(m_present_mutex); + while (!m_present_thread_done.load()) + { + m_present_queued_cv.wait(lock, [this]() { return !m_present_done.load() || m_present_thread_done.load(); }); + + if (m_present_done.load()) + continue; + + DoSubmitCommandBuffer(m_queued_present.command_buffer_index, m_queued_present.wait_semaphore, + m_queued_present.signal_semaphore); + DoPresent(m_queued_present.signal_semaphore, m_queued_present.present_swap_chain, + m_queued_present.present_image_index); + m_present_done.store(true); + m_present_done_cv.notify_one(); + } + } + + void Context::StartPresentThread() + { + pxAssert(!m_present_thread.joinable()); + m_present_thread_done.store(false); + m_present_thread = std::thread(&Context::PresentThread, this); + } + + void Context::StopPresentThread() + { + if (!m_present_thread.joinable()) + return; + + { + std::unique_lock lock(m_present_mutex); + WaitForPresentComplete(lock); + m_present_thread_done.store(true); + m_present_queued_cv.notify_one(); + } + + m_present_thread.join(); + } + + void Context::MoveToNextCommandBuffer() { ActivateCommandBuffer((m_current_frame + 1) % NUM_COMMAND_BUFFERS); } + + void Context::ActivateCommandBuffer(u32 index) + { + FrameResources& resources = m_frame_resources[index]; + + if (!m_present_done.load() && m_queued_present.command_buffer_index == index) + WaitForPresentComplete(); + + // Wait for the GPU to finish with all resources for this command buffer. + if (resources.fence_counter > m_completed_fence_counter) + WaitForCommandBufferCompletion(index); + + // Reset fence to unsignaled before starting. + VkResult res = vkResetFences(m_device, 1, &resources.fence); + if (res != VK_SUCCESS) + LOG_VULKAN_ERROR(res, "vkResetFences failed: "); + + // Reset command pools to beginning since we can re-use the memory now + res = vkResetCommandPool(m_device, resources.command_pool, 0); + if (res != VK_SUCCESS) + LOG_VULKAN_ERROR(res, "vkResetCommandPool failed: "); + + // Enable commands to be recorded to the two buffers again. + VkCommandBufferBeginInfo begin_info = { + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr}; + res = vkBeginCommandBuffer(resources.command_buffers[1], &begin_info); + if (res != VK_SUCCESS) + LOG_VULKAN_ERROR(res, "vkBeginCommandBuffer failed: "); + + // Also can do the same for the descriptor pools + res = vkResetDescriptorPool(m_device, resources.descriptor_pool, 0); + if (res != VK_SUCCESS) + LOG_VULKAN_ERROR(res, "vkResetDescriptorPool failed: "); + + m_current_frame = index; + m_current_command_buffer = resources.command_buffers[1]; + resources.fence_counter = m_next_fence_counter++; + resources.init_buffer_used = false; + } + + void Context::ExecuteCommandBuffer(bool wait_for_completion) + { + // If we're waiting for completion, don't bother waking the worker thread. + const u32 current_frame = m_current_frame; + SubmitCommandBuffer(); + MoveToNextCommandBuffer(); + + if (wait_for_completion) + WaitForCommandBufferCompletion(current_frame); + } + + bool Context::CheckLastPresentFail() + { + bool res = m_last_present_failed; + m_last_present_failed = false; + return res; + } + + void Context::DeferBufferDestruction(VkBuffer object) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, object]() { vkDestroyBuffer(m_device, object, nullptr); }); + } + + void Context::DeferBufferViewDestruction(VkBufferView object) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, object]() { vkDestroyBufferView(m_device, object, nullptr); }); + } + + void Context::DeferDeviceMemoryDestruction(VkDeviceMemory object) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, object]() { vkFreeMemory(m_device, object, nullptr); }); + } + + void Context::DeferFramebufferDestruction(VkFramebuffer object) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, object]() { vkDestroyFramebuffer(m_device, object, nullptr); }); + } + + void Context::DeferImageDestruction(VkImage object) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, object]() { vkDestroyImage(m_device, object, nullptr); }); + } + + void Context::DeferImageDestruction(VkImage object, VmaAllocation allocation) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back( + [this, object, allocation]() { vmaDestroyImage(m_allocator, object, allocation); }); + } + + void Context::DeferImageViewDestruction(VkImageView object) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, object]() { vkDestroyImageView(m_device, object, nullptr); }); + } + + void Context::DeferPipelineDestruction(VkPipeline pipeline) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, pipeline]() { vkDestroyPipeline(m_device, pipeline, nullptr); }); + } + + void Context::DeferSamplerDestruction(VkSampler sampler) + { + FrameResources& resources = m_frame_resources[m_current_frame]; + resources.cleanup_resources.push_back([this, sampler]() { vkDestroySampler(m_device, sampler, nullptr); }); + } + + VKAPI_ATTR VkBool32 VKAPI_CALL DebugMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT severity, + VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData) + { + if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) + { + Console.Error("Vulkan debug report: (%s) %s", + pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage); + } + else if (severity & (VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT)) + { + Console.Warning("Vulkan debug report: (%s) %s", + pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage); + } + else if (severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) + { + Console.WriteLn("Vulkan debug report: (%s) %s", + pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage); + } + else + { + DevCon.WriteLn("Vulkan debug report: (%s) %s", + pCallbackData->pMessageIdName ? pCallbackData->pMessageIdName : "", pCallbackData->pMessage); + } + + return VK_FALSE; + } + + bool Context::EnableDebugUtils() + { + // Already enabled? + if (m_debug_messenger_callback != VK_NULL_HANDLE) + return true; + + // Check for presence of the functions before calling + if (!vkCreateDebugUtilsMessengerEXT || !vkDestroyDebugUtilsMessengerEXT || !vkSubmitDebugUtilsMessageEXT) + { + return false; + } + + VkDebugUtilsMessengerCreateInfoEXT messenger_info = {VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + nullptr, 0, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + DebugMessengerCallback, nullptr}; + + VkResult res = + vkCreateDebugUtilsMessengerEXT(m_instance, &messenger_info, nullptr, &m_debug_messenger_callback); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateDebugUtilsMessengerEXT failed: "); + return false; + } + + return true; + } + + void Context::DisableDebugUtils() + { + if (m_debug_messenger_callback != VK_NULL_HANDLE) + { + vkDestroyDebugUtilsMessengerEXT(m_instance, m_debug_messenger_callback, nullptr); + m_debug_messenger_callback = VK_NULL_HANDLE; + } + } + + bool Context::GetMemoryType(u32 bits, VkMemoryPropertyFlags properties, u32* out_type_index) + { + for (u32 i = 0; i < VK_MAX_MEMORY_TYPES; i++) + { + if ((bits & (1 << i)) != 0) + { + u32 supported = m_device_memory_properties.memoryTypes[i].propertyFlags & properties; + if (supported == properties) + { + *out_type_index = i; + return true; + } + } + } + + return false; + } + + u32 Context::GetMemoryType(u32 bits, VkMemoryPropertyFlags properties) + { + u32 type_index = VK_MAX_MEMORY_TYPES; + if (!GetMemoryType(bits, properties, &type_index)) + { + Console.Error("Unable to find memory type for %x:%x", bits, properties); + pxFailRel("Unable to find memory type"); + } + + return type_index; + } + + u32 Context::GetUploadMemoryType(u32 bits, bool* is_coherent) + { + // Try for coherent memory first. + VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + + u32 type_index; + if (!GetMemoryType(bits, flags, &type_index)) + { + Console.Warning("Vulkan: Failed to find a coherent memory type for uploads, this will affect performance."); + + // Try non-coherent memory. + flags &= ~VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + if (!GetMemoryType(bits, flags, &type_index)) + { + // We shouldn't have any memory types that aren't host-visible. + pxFailRel("Unable to get memory type for upload."); + type_index = 0; + } + } + + if (is_coherent) + *is_coherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0); + + return type_index; + } + + u32 Context::GetReadbackMemoryType(u32 bits, bool* is_coherent, bool* is_cached) + { + // Try for cached and coherent memory first. + VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + + u32 type_index; + if (!GetMemoryType(bits, flags, &type_index)) + { + // For readbacks, caching is more important than coherency. + flags &= ~VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + if (!GetMemoryType(bits, flags, &type_index)) + { + Console.Warning("Vulkan: Failed to find a cached memory type for readbacks, this will affect " + "performance."); + + // Remove the cached bit as well. + flags &= ~VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + if (!GetMemoryType(bits, flags, &type_index)) + { + // We shouldn't have any memory types that aren't host-visible. + pxFailRel("Unable to get memory type for upload."); + type_index = 0; + } + } + } + + if (is_coherent) + *is_coherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0); + if (is_cached) + *is_cached = ((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0); + + return type_index; + } + + VkRenderPass Context::CreateCachedRenderPass(RenderPassCacheKey key) + { + VkAttachmentReference color_reference; + VkAttachmentReference* color_reference_ptr = nullptr; + VkAttachmentReference depth_reference; + VkAttachmentReference* depth_reference_ptr = nullptr; + VkAttachmentReference input_reference; + VkAttachmentReference* input_reference_ptr = nullptr; + VkSubpassDependency subpass_dependency; + VkSubpassDependency* subpass_dependency_ptr = nullptr; + std::array attachments; + u32 num_attachments = 0; + if (key.color_format != VK_FORMAT_UNDEFINED) + { + attachments[num_attachments] = {0, static_cast(key.color_format), VK_SAMPLE_COUNT_1_BIT, + static_cast(key.color_load_op), + static_cast(key.color_store_op), VK_ATTACHMENT_LOAD_OP_DONT_CARE, + VK_ATTACHMENT_STORE_OP_DONT_CARE, + key.color_feedback_loop ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + key.color_feedback_loop ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}; + color_reference.attachment = num_attachments; + color_reference.layout = + key.color_feedback_loop ? VK_IMAGE_LAYOUT_GENERAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + color_reference_ptr = &color_reference; + + if (key.color_feedback_loop) + { + input_reference.attachment = num_attachments; + input_reference.layout = VK_IMAGE_LAYOUT_GENERAL; + input_reference_ptr = &input_reference; + + subpass_dependency.srcSubpass = 0; + subpass_dependency.dstSubpass = 0; + subpass_dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + subpass_dependency.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + subpass_dependency.srcAccessMask = + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + subpass_dependency.dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; + subpass_dependency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + subpass_dependency_ptr = &subpass_dependency; + } + + num_attachments++; + } + if (key.depth_format != VK_FORMAT_UNDEFINED) + { + attachments[num_attachments] = {0, static_cast(key.depth_format), VK_SAMPLE_COUNT_1_BIT, + static_cast(key.depth_load_op), + static_cast(key.depth_store_op), + static_cast(key.stencil_load_op), + static_cast(key.stencil_store_op), + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL}; + depth_reference.attachment = num_attachments; + depth_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + depth_reference_ptr = &depth_reference; + num_attachments++; + } + + const VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, input_reference_ptr ? 1u : 0u, + input_reference_ptr ? input_reference_ptr : nullptr, color_reference_ptr ? 1u : 0u, + color_reference_ptr ? color_reference_ptr : nullptr, nullptr, depth_reference_ptr, 0, nullptr}; + const VkRenderPassCreateInfo pass_info = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0u, + num_attachments, attachments.data(), 1u, &subpass, subpass_dependency_ptr ? 1u : 0u, + subpass_dependency_ptr}; + + VkRenderPass pass; + VkResult res = vkCreateRenderPass(m_device, &pass_info, nullptr, &pass); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateRenderPass failed: "); + return VK_NULL_HANDLE; + } + + m_render_pass_cache.emplace(key.key, pass); + return pass; + } + + void Context::DestroyRenderPassCache() + { + for (auto& it : m_render_pass_cache) + vkDestroyRenderPass(m_device, it.second, nullptr); + + m_render_pass_cache.clear(); + } +} // namespace Vulkan diff --git a/common/Vulkan/Context.h b/common/Vulkan/Context.h new file mode 100644 index 0000000000..7b126f666e --- /dev/null +++ b/common/Vulkan/Context.h @@ -0,0 +1,329 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once + +#include "common/Pcsx2Defs.h" + +#include "common/Vulkan/Loader.h" +#include "common/Vulkan/StreamBuffer.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +struct WindowInfo; + +namespace Vulkan +{ + class SwapChain; + + class Context + { + public: + enum : u32 + { + NUM_COMMAND_BUFFERS = 3, + TEXTURE_BUFFER_SIZE = 64 * 1024 * 1024, + }; + + ~Context(); + + // Determines if the Vulkan validation layer is available on the system. + static bool CheckValidationLayerAvailablility(); + + // Helper method to create a Vulkan instance. + static VkInstance CreateVulkanInstance( + const WindowInfo* wi, bool enable_debug_utils, bool enable_validation_layer); + + // Returns a list of Vulkan-compatible GPUs. + using GPUList = std::vector; + using GPUNameList = std::vector; + static GPUList EnumerateGPUs(VkInstance instance); + static GPUNameList EnumerateGPUNames(VkInstance instance); + + // Creates a new context and sets it up as global. + static bool Create(std::string_view gpu_name, const WindowInfo* wi, std::unique_ptr* out_swap_chain, + bool threaded_presentation, bool enable_debug_utils, bool enable_validation_layer); + + // Destroys context. + static void Destroy(); + + // Enable/disable debug message runtime. + bool EnableDebugUtils(); + void DisableDebugUtils(); + + // Global state accessors + __fi VkInstance GetVulkanInstance() const { return m_instance; } + __fi VkPhysicalDevice GetPhysicalDevice() const { return m_physical_device; } + __fi VkDevice GetDevice() const { return m_device; } + __fi VmaAllocator GetAllocator() const { return m_allocator; } + __fi VkQueue GetGraphicsQueue() const { return m_graphics_queue; } + __fi u32 GetGraphicsQueueFamilyIndex() const { return m_graphics_queue_family_index; } + __fi VkQueue GetPresentQueue() const { return m_present_queue; } + __fi u32 GetPresentQueueFamilyIndex() const { return m_present_queue_family_index; } + __fi const VkQueueFamilyProperties& GetGraphicsQueueProperties() const { return m_graphics_queue_properties; } + __fi const VkPhysicalDeviceMemoryProperties& GetDeviceMemoryProperties() const + { + return m_device_memory_properties; + } + __fi const VkPhysicalDeviceProperties& GetDeviceProperties() const { return m_device_properties; } + __fi const VkPhysicalDeviceFeatures& GetDeviceFeatures() const { return m_device_features; } + __fi const VkPhysicalDeviceLimits& GetDeviceLimits() const { return m_device_properties.limits; } + + // Helpers for getting constants + __fi VkDeviceSize GetUniformBufferAlignment() const + { + return m_device_properties.limits.minUniformBufferOffsetAlignment; + } + __fi VkDeviceSize GetTexelBufferAlignment() const + { + return m_device_properties.limits.minTexelBufferOffsetAlignment; + } + __fi VkDeviceSize GetStorageBufferAlignment() const + { + return m_device_properties.limits.minStorageBufferOffsetAlignment; + } + __fi VkDeviceSize GetBufferImageGranularity() const + { + return m_device_properties.limits.bufferImageGranularity; + } + __fi VkDeviceSize GetMaxImageDimension2D() const { return m_device_properties.limits.maxImageDimension2D; } + + // Finds a memory type index for the specified memory properties and the bits returned by + // vkGetImageMemoryRequirements + bool GetMemoryType(u32 bits, VkMemoryPropertyFlags properties, u32* out_type_index); + u32 GetMemoryType(u32 bits, VkMemoryPropertyFlags properties); + + // Finds a memory type for upload or readback buffers. + u32 GetUploadMemoryType(u32 bits, bool* is_coherent = nullptr); + u32 GetReadbackMemoryType(u32 bits, bool* is_coherent = nullptr, bool* is_cached = nullptr); + + // Creates a simple render pass. + __ri VkRenderPass GetRenderPass(VkFormat color_format, VkFormat depth_format, + VkAttachmentLoadOp color_load_op = VK_ATTACHMENT_LOAD_OP_LOAD, + VkAttachmentStoreOp color_store_op = VK_ATTACHMENT_STORE_OP_STORE, + VkAttachmentLoadOp depth_load_op = VK_ATTACHMENT_LOAD_OP_LOAD, + VkAttachmentStoreOp depth_store_op = VK_ATTACHMENT_STORE_OP_STORE, + VkAttachmentLoadOp stencil_load_op = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + VkAttachmentStoreOp stencil_store_op = VK_ATTACHMENT_STORE_OP_DONT_CARE, bool color_feedback_loop = false) + { + RenderPassCacheKey key = {}; + key.color_format = color_format; + key.depth_format = depth_format; + key.color_load_op = color_load_op; + key.color_store_op = color_store_op; + key.depth_load_op = depth_load_op; + key.depth_store_op = depth_store_op; + key.stencil_load_op = stencil_load_op; + key.stencil_store_op = stencil_store_op; + key.color_feedback_loop = color_feedback_loop; + + auto it = m_render_pass_cache.find(key.key); + if (it != m_render_pass_cache.end()) + return it->second; + + return CreateCachedRenderPass(key); + } + + // These command buffers are allocated per-frame. They are valid until the command buffer + // is submitted, after that you should call these functions again. + __fi u32 GetCurrentCommandBufferIndex() const { return m_current_frame; } + __fi VkDescriptorPool GetGlobalDescriptorPool() const { return m_global_descriptor_pool; } + __fi VkCommandBuffer GetCurrentCommandBuffer() const { return m_current_command_buffer; } + __fi StreamBuffer& GetTextureUploadBuffer() { return m_texture_upload_buffer; } + __fi VkDescriptorPool GetCurrentDescriptorPool() const + { + return m_frame_resources[m_current_frame].descriptor_pool; + } + VkCommandBuffer GetCurrentInitCommandBuffer(); + + /// Allocates a descriptor set from the pool reserved for the current frame. + VkDescriptorSet AllocateDescriptorSet(VkDescriptorSetLayout set_layout); + + /// Allocates a descriptor set from the pool reserved for the current frame. + VkDescriptorSet AllocatePersistentDescriptorSet(VkDescriptorSetLayout set_layout); + + /// Frees a descriptor set allocated from the global pool. + void FreeGlobalDescriptorSet(VkDescriptorSet set); + + // Gets the fence that will be signaled when the currently executing command buffer is + // queued and executed. Do not wait for this fence before the buffer is executed. + __fi VkFence GetCurrentCommandBufferFence() const { return m_frame_resources[m_current_frame].fence; } + + // Fence "counters" are used to track which commands have been completed by the GPU. + // If the last completed fence counter is greater or equal to N, it means that the work + // associated counter N has been completed by the GPU. The value of N to associate with + // commands can be retreived by calling GetCurrentFenceCounter(). + u64 GetCompletedFenceCounter() const { return m_completed_fence_counter; } + + // Gets the fence that will be signaled when the currently executing command buffer is + // queued and executed. Do not wait for this fence before the buffer is executed. + u64 GetCurrentFenceCounter() const { return m_frame_resources[m_current_frame].fence_counter; } + + void SubmitCommandBuffer(VkSemaphore wait_semaphore = VK_NULL_HANDLE, + VkSemaphore signal_semaphore = VK_NULL_HANDLE, VkSwapchainKHR present_swap_chain = VK_NULL_HANDLE, + uint32_t present_image_index = 0xFFFFFFFF, bool submit_on_thread = false); + void MoveToNextCommandBuffer(); + + void ExecuteCommandBuffer(bool wait_for_completion); + void WaitForPresentComplete(); + + // Was the last present submitted to the queue a failure? If so, we must recreate our swapchain. + bool CheckLastPresentFail(); + + // Schedule a vulkan resource for destruction later on. This will occur when the command buffer + // is next re-used, and the GPU has finished working with the specified resource. + void DeferBufferDestruction(VkBuffer object); + void DeferBufferViewDestruction(VkBufferView object); + void DeferDeviceMemoryDestruction(VkDeviceMemory object); + void DeferFramebufferDestruction(VkFramebuffer object); + void DeferImageDestruction(VkImage object); + void DeferImageDestruction(VkImage object, VmaAllocation allocation); + void DeferImageViewDestruction(VkImageView object); + void DeferPipelineDestruction(VkPipeline pipeline); + void DeferSamplerDestruction(VkSampler sampler); + + // Wait for a fence to be completed. + // Also invokes callbacks for completion. + void WaitForFenceCounter(u64 fence_counter); + + void WaitForGPUIdle(); + + private: + Context(VkInstance instance, VkPhysicalDevice physical_device); + + union RenderPassCacheKey + { + struct + { + u32 color_format : 8; + u32 depth_format : 8; + u32 color_load_op : 2; + u32 color_store_op : 1; + u32 depth_load_op : 2; + u32 depth_store_op : 1; + u32 stencil_load_op : 2; + u32 stencil_store_op : 1; + u32 color_feedback_loop : 1; + }; + + u32 key; + }; + + using ExtensionList = std::vector; + static bool SelectInstanceExtensions( + ExtensionList* extension_list, const WindowInfo* wi, bool enable_debug_utils); + bool SelectDeviceExtensions(ExtensionList* extension_list, bool enable_surface); + bool SelectDeviceFeatures(const VkPhysicalDeviceFeatures* required_features); + bool CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer, const char** required_device_extensions, + u32 num_required_device_extensions, const char** required_device_layers, u32 num_required_device_layers, + const VkPhysicalDeviceFeatures* required_features); + + bool CreateAllocator(); + void DestroyAllocator(); + bool CreateCommandBuffers(); + void DestroyCommandBuffers(); + bool CreateGlobalDescriptorPool(); + void DestroyGlobalDescriptorPool(); + bool CreateTextureStreamBuffer(); + + VkRenderPass CreateCachedRenderPass(RenderPassCacheKey key); + void DestroyRenderPassCache(); + + void ActivateCommandBuffer(u32 index); + void WaitForCommandBufferCompletion(u32 index); + + void DoSubmitCommandBuffer(u32 index, VkSemaphore wait_semaphore, VkSemaphore signal_semaphore); + void DoPresent(VkSemaphore wait_semaphore, VkSwapchainKHR present_swap_chain, uint32_t present_image_index); + void WaitForPresentComplete(std::unique_lock& lock); + void PresentThread(); + void StartPresentThread(); + void StopPresentThread(); + + struct FrameResources + { + // [0] - Init (upload) command buffer, [1] - draw command buffer + VkCommandPool command_pool = VK_NULL_HANDLE; + std::array command_buffers{VK_NULL_HANDLE, VK_NULL_HANDLE}; + VkDescriptorPool descriptor_pool = VK_NULL_HANDLE; + VkFence fence = VK_NULL_HANDLE; + u64 fence_counter = 0; + bool init_buffer_used = false; + bool needs_fence_wait = false; + + std::vector> cleanup_resources; + }; + + VkInstance m_instance = VK_NULL_HANDLE; + VkPhysicalDevice m_physical_device = VK_NULL_HANDLE; + VkDevice m_device = VK_NULL_HANDLE; + VmaAllocator m_allocator = VK_NULL_HANDLE; + + VkCommandBuffer m_current_command_buffer = VK_NULL_HANDLE; + + VkDescriptorPool m_global_descriptor_pool = VK_NULL_HANDLE; + + VkQueue m_graphics_queue = VK_NULL_HANDLE; + u32 m_graphics_queue_family_index = 0; + VkQueue m_present_queue = VK_NULL_HANDLE; + u32 m_present_queue_family_index = 0; + + std::array m_frame_resources; + u64 m_next_fence_counter = 1; + u64 m_completed_fence_counter = 0; + u32 m_current_frame; + + StreamBuffer m_texture_upload_buffer; + + std::atomic_bool m_last_present_failed{false}; + std::atomic_bool m_present_done{true}; + std::mutex m_present_mutex; + std::condition_variable m_present_queued_cv; + std::condition_variable m_present_done_cv; + std::thread m_present_thread; + std::atomic_bool m_present_thread_done{false}; + + struct QueuedPresent + { + VkSemaphore wait_semaphore; + VkSemaphore signal_semaphore; + VkSwapchainKHR present_swap_chain; + u32 command_buffer_index; + u32 present_image_index; + }; + + QueuedPresent m_queued_present = {}; + + std::map m_render_pass_cache; + + VkDebugUtilsMessengerEXT m_debug_messenger_callback = VK_NULL_HANDLE; + + VkQueueFamilyProperties m_graphics_queue_properties = {}; + VkPhysicalDeviceFeatures m_device_features = {}; + VkPhysicalDeviceProperties m_device_properties = {}; + VkPhysicalDeviceMemoryProperties m_device_memory_properties = {}; + }; + +} // namespace Vulkan + +extern std::unique_ptr g_vulkan_context; diff --git a/common/Vulkan/EntryPoints.h b/common/Vulkan/EntryPoints.h new file mode 100644 index 0000000000..4be8ed877c --- /dev/null +++ b/common/Vulkan/EntryPoints.h @@ -0,0 +1,216 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once + +// We abuse the preprocessor here to only need to specify function names once. +// Function names are prefixed so to not conflict with system symbols at runtime. +#define VULKAN_MODULE_ENTRY_POINT(name, required) extern PFN_##name pcsx2_##name; +#define VULKAN_INSTANCE_ENTRY_POINT(name, required) extern PFN_##name pcsx2_##name; +#define VULKAN_DEVICE_ENTRY_POINT(name, required) extern PFN_##name pcsx2_##name; +#define VULKAN_DEFINE_NAME_PREFIX pcsx2_ +#include "EntryPoints.inl" +#undef VULKAN_DEFINE_NAME_PREFIX +#undef VULKAN_DEVICE_ENTRY_POINT +#undef VULKAN_INSTANCE_ENTRY_POINT +#undef VULKAN_MODULE_ENTRY_POINT + +#define vkCreateInstance pcsx2_vkCreateInstance +#define vkGetInstanceProcAddr pcsx2_vkGetInstanceProcAddr +#define vkEnumerateInstanceExtensionProperties pcsx2_vkEnumerateInstanceExtensionProperties +#define vkEnumerateInstanceLayerProperties pcsx2_vkEnumerateInstanceLayerProperties +#define vkEnumerateInstanceVersion pcsx2_vkEnumerateInstanceVersion + +#define vkGetDeviceProcAddr pcsx2_vkGetDeviceProcAddr +#define vkDestroyInstance pcsx2_vkDestroyInstance +#define vkEnumeratePhysicalDevices pcsx2_vkEnumeratePhysicalDevices +#define vkGetPhysicalDeviceFeatures pcsx2_vkGetPhysicalDeviceFeatures +#define vkGetPhysicalDeviceFormatProperties pcsx2_vkGetPhysicalDeviceFormatProperties +#define vkGetPhysicalDeviceImageFormatProperties pcsx2_vkGetPhysicalDeviceImageFormatProperties +#define vkGetPhysicalDeviceProperties pcsx2_vkGetPhysicalDeviceProperties +#define vkGetPhysicalDeviceQueueFamilyProperties pcsx2_vkGetPhysicalDeviceQueueFamilyProperties +#define vkGetPhysicalDeviceMemoryProperties pcsx2_vkGetPhysicalDeviceMemoryProperties +#define vkGetPhysicalDeviceFeatures2 pcsx2_vkGetPhysicalDeviceFeatures2 +#define vkCreateDevice pcsx2_vkCreateDevice +#define vkEnumerateDeviceExtensionProperties pcsx2_vkEnumerateDeviceExtensionProperties +#define vkEnumerateDeviceLayerProperties pcsx2_vkEnumerateDeviceLayerProperties +#define vkGetPhysicalDeviceSparseImageFormatProperties pcsx2_vkGetPhysicalDeviceSparseImageFormatProperties +#define vkDestroySurfaceKHR pcsx2_vkDestroySurfaceKHR +#define vkGetPhysicalDeviceSurfaceSupportKHR pcsx2_vkGetPhysicalDeviceSurfaceSupportKHR +#define vkGetPhysicalDeviceSurfaceCapabilitiesKHR pcsx2_vkGetPhysicalDeviceSurfaceCapabilitiesKHR +#define vkGetPhysicalDeviceSurfaceFormatsKHR pcsx2_vkGetPhysicalDeviceSurfaceFormatsKHR +#define vkGetPhysicalDeviceSurfacePresentModesKHR pcsx2_vkGetPhysicalDeviceSurfacePresentModesKHR +#define vkCreateWin32SurfaceKHR pcsx2_vkCreateWin32SurfaceKHR +#define vkGetPhysicalDeviceWin32PresentationSupportKHR pcsx2_vkGetPhysicalDeviceWin32PresentationSupportKHR +#define vkCreateXlibSurfaceKHR pcsx2_vkCreateXlibSurfaceKHR +#define vkGetPhysicalDeviceXlibPresentationSupportKHR pcsx2_vkGetPhysicalDeviceXlibPresentationSupportKHR +#define vkCreateWaylandSurfaceKHR pcsx2_vkCreateWaylandSurfaceKHR +#define vkCreateAndroidSurfaceKHR pcsx2_vkCreateAndroidSurfaceKHR +#define vkCreateMacOSSurfaceMVK pcsx2_vkCreateMacOSSurfaceMVK +#define vkCreateMetalSurfaceEXT pcsx2_vkCreateMetalSurfaceEXT + +// VK_EXT_debug_utils +#define vkCmdBeginDebugUtilsLabelEXT pcsx2_vkCmdBeginDebugUtilsLabelEXT +#define vkCmdEndDebugUtilsLabelEXT pcsx2_vkCmdEndDebugUtilsLabelEXT +#define vkCmdInsertDebugUtilsLabelEXT pcsx2_vkCmdInsertDebugUtilsLabelEXT +#define vkCreateDebugUtilsMessengerEXT pcsx2_vkCreateDebugUtilsMessengerEXT +#define vkDestroyDebugUtilsMessengerEXT pcsx2_vkDestroyDebugUtilsMessengerEXT +#define vkQueueBeginDebugUtilsLabelEXT pcsx2_vkQueueBeginDebugUtilsLabelEXT +#define vkQueueEndDebugUtilsLabelEXT pcsx2_vkQueueEndDebugUtilsLabelEXT +#define vkQueueInsertDebugUtilsLabelEXT pcsx2_vkQueueInsertDebugUtilsLabelEXT +#define vkSetDebugUtilsObjectNameEXT pcsx2_vkSetDebugUtilsObjectNameEXT +#define vkSetDebugUtilsObjectTagEXT pcsx2_vkSetDebugUtilsObjectTagEXT +#define vkSubmitDebugUtilsMessageEXT pcsx2_vkSubmitDebugUtilsMessageEXT + +#define vkGetPhysicalDeviceProperties2 pcsx2_vkGetPhysicalDeviceProperties2 +#define vkGetPhysicalDeviceSurfaceCapabilities2KHR pcsx2_vkGetPhysicalDeviceSurfaceCapabilities2KHR +#define vkGetPhysicalDeviceDisplayPropertiesKHR pcsx2_vkGetPhysicalDeviceDisplayPropertiesKHR +#define vkGetPhysicalDeviceDisplayPlanePropertiesKHR pcsx2_vkGetPhysicalDeviceDisplayPlanePropertiesKHR +#define vkGetDisplayPlaneSupportedDisplaysKHR pcsx2_vkGetDisplayPlaneSupportedDisplaysKHR +#define vkGetDisplayModePropertiesKHR pcsx2_vkGetDisplayModePropertiesKHR +#define vkCreateDisplayModeKHR pcsx2_vkCreateDisplayModeKHR +#define vkGetDisplayPlaneCapabilitiesKHR pcsx2_vkGetDisplayPlaneCapabilitiesKHR +#define vkCreateDisplayPlaneSurfaceKHR pcsx2_vkCreateDisplayPlaneSurfaceKHR + +#define vkDestroyDevice pcsx2_vkDestroyDevice +#define vkGetDeviceQueue pcsx2_vkGetDeviceQueue +#define vkQueueSubmit pcsx2_vkQueueSubmit +#define vkQueueWaitIdle pcsx2_vkQueueWaitIdle +#define vkDeviceWaitIdle pcsx2_vkDeviceWaitIdle +#define vkAllocateMemory pcsx2_vkAllocateMemory +#define vkFreeMemory pcsx2_vkFreeMemory +#define vkMapMemory pcsx2_vkMapMemory +#define vkUnmapMemory pcsx2_vkUnmapMemory +#define vkFlushMappedMemoryRanges pcsx2_vkFlushMappedMemoryRanges +#define vkInvalidateMappedMemoryRanges pcsx2_vkInvalidateMappedMemoryRanges +#define vkGetDeviceMemoryCommitment pcsx2_vkGetDeviceMemoryCommitment +#define vkBindBufferMemory pcsx2_vkBindBufferMemory +#define vkBindImageMemory pcsx2_vkBindImageMemory +#define vkGetBufferMemoryRequirements pcsx2_vkGetBufferMemoryRequirements +#define vkGetImageMemoryRequirements pcsx2_vkGetImageMemoryRequirements +#define vkGetImageSparseMemoryRequirements pcsx2_vkGetImageSparseMemoryRequirements +#define vkQueueBindSparse pcsx2_vkQueueBindSparse +#define vkCreateFence pcsx2_vkCreateFence +#define vkDestroyFence pcsx2_vkDestroyFence +#define vkResetFences pcsx2_vkResetFences +#define vkGetFenceStatus pcsx2_vkGetFenceStatus +#define vkWaitForFences pcsx2_vkWaitForFences +#define vkCreateSemaphore pcsx2_vkCreateSemaphore +#define vkDestroySemaphore pcsx2_vkDestroySemaphore +#define vkCreateEvent pcsx2_vkCreateEvent +#define vkDestroyEvent pcsx2_vkDestroyEvent +#define vkGetEventStatus pcsx2_vkGetEventStatus +#define vkSetEvent pcsx2_vkSetEvent +#define vkResetEvent pcsx2_vkResetEvent +#define vkCreateQueryPool pcsx2_vkCreateQueryPool +#define vkDestroyQueryPool pcsx2_vkDestroyQueryPool +#define vkGetQueryPoolResults pcsx2_vkGetQueryPoolResults +#define vkCreateBuffer pcsx2_vkCreateBuffer +#define vkDestroyBuffer pcsx2_vkDestroyBuffer +#define vkCreateBufferView pcsx2_vkCreateBufferView +#define vkDestroyBufferView pcsx2_vkDestroyBufferView +#define vkCreateImage pcsx2_vkCreateImage +#define vkDestroyImage pcsx2_vkDestroyImage +#define vkGetImageSubresourceLayout pcsx2_vkGetImageSubresourceLayout +#define vkCreateImageView pcsx2_vkCreateImageView +#define vkDestroyImageView pcsx2_vkDestroyImageView +#define vkCreateShaderModule pcsx2_vkCreateShaderModule +#define vkDestroyShaderModule pcsx2_vkDestroyShaderModule +#define vkCreatePipelineCache pcsx2_vkCreatePipelineCache +#define vkDestroyPipelineCache pcsx2_vkDestroyPipelineCache +#define vkGetPipelineCacheData pcsx2_vkGetPipelineCacheData +#define vkMergePipelineCaches pcsx2_vkMergePipelineCaches +#define vkCreateGraphicsPipelines pcsx2_vkCreateGraphicsPipelines +#define vkCreateComputePipelines pcsx2_vkCreateComputePipelines +#define vkDestroyPipeline pcsx2_vkDestroyPipeline +#define vkCreatePipelineLayout pcsx2_vkCreatePipelineLayout +#define vkDestroyPipelineLayout pcsx2_vkDestroyPipelineLayout +#define vkCreateSampler pcsx2_vkCreateSampler +#define vkDestroySampler pcsx2_vkDestroySampler +#define vkCreateDescriptorSetLayout pcsx2_vkCreateDescriptorSetLayout +#define vkDestroyDescriptorSetLayout pcsx2_vkDestroyDescriptorSetLayout +#define vkCreateDescriptorPool pcsx2_vkCreateDescriptorPool +#define vkDestroyDescriptorPool pcsx2_vkDestroyDescriptorPool +#define vkResetDescriptorPool pcsx2_vkResetDescriptorPool +#define vkAllocateDescriptorSets pcsx2_vkAllocateDescriptorSets +#define vkFreeDescriptorSets pcsx2_vkFreeDescriptorSets +#define vkUpdateDescriptorSets pcsx2_vkUpdateDescriptorSets +#define vkCreateFramebuffer pcsx2_vkCreateFramebuffer +#define vkDestroyFramebuffer pcsx2_vkDestroyFramebuffer +#define vkCreateRenderPass pcsx2_vkCreateRenderPass +#define vkDestroyRenderPass pcsx2_vkDestroyRenderPass +#define vkGetRenderAreaGranularity pcsx2_vkGetRenderAreaGranularity +#define vkCreateCommandPool pcsx2_vkCreateCommandPool +#define vkDestroyCommandPool pcsx2_vkDestroyCommandPool +#define vkResetCommandPool pcsx2_vkResetCommandPool +#define vkAllocateCommandBuffers pcsx2_vkAllocateCommandBuffers +#define vkFreeCommandBuffers pcsx2_vkFreeCommandBuffers +#define vkBeginCommandBuffer pcsx2_vkBeginCommandBuffer +#define vkEndCommandBuffer pcsx2_vkEndCommandBuffer +#define vkResetCommandBuffer pcsx2_vkResetCommandBuffer +#define vkCmdBindPipeline pcsx2_vkCmdBindPipeline +#define vkCmdSetViewport pcsx2_vkCmdSetViewport +#define vkCmdSetScissor pcsx2_vkCmdSetScissor +#define vkCmdSetLineWidth pcsx2_vkCmdSetLineWidth +#define vkCmdSetDepthBias pcsx2_vkCmdSetDepthBias +#define vkCmdSetBlendConstants pcsx2_vkCmdSetBlendConstants +#define vkCmdSetDepthBounds pcsx2_vkCmdSetDepthBounds +#define vkCmdSetStencilCompareMask pcsx2_vkCmdSetStencilCompareMask +#define vkCmdSetStencilWriteMask pcsx2_vkCmdSetStencilWriteMask +#define vkCmdSetStencilReference pcsx2_vkCmdSetStencilReference +#define vkCmdBindDescriptorSets pcsx2_vkCmdBindDescriptorSets +#define vkCmdBindIndexBuffer pcsx2_vkCmdBindIndexBuffer +#define vkCmdBindVertexBuffers pcsx2_vkCmdBindVertexBuffers +#define vkCmdDraw pcsx2_vkCmdDraw +#define vkCmdDrawIndexed pcsx2_vkCmdDrawIndexed +#define vkCmdDrawIndirect pcsx2_vkCmdDrawIndirect +#define vkCmdDrawIndexedIndirect pcsx2_vkCmdDrawIndexedIndirect +#define vkCmdDispatch pcsx2_vkCmdDispatch +#define vkCmdDispatchIndirect pcsx2_vkCmdDispatchIndirect +#define vkCmdCopyBuffer pcsx2_vkCmdCopyBuffer +#define vkCmdCopyImage pcsx2_vkCmdCopyImage +#define vkCmdBlitImage pcsx2_vkCmdBlitImage +#define vkCmdCopyBufferToImage pcsx2_vkCmdCopyBufferToImage +#define vkCmdCopyImageToBuffer pcsx2_vkCmdCopyImageToBuffer +#define vkCmdUpdateBuffer pcsx2_vkCmdUpdateBuffer +#define vkCmdFillBuffer pcsx2_vkCmdFillBuffer +#define vkCmdClearColorImage pcsx2_vkCmdClearColorImage +#define vkCmdClearDepthStencilImage pcsx2_vkCmdClearDepthStencilImage +#define vkCmdClearAttachments pcsx2_vkCmdClearAttachments +#define vkCmdResolveImage pcsx2_vkCmdResolveImage +#define vkCmdSetEvent pcsx2_vkCmdSetEvent +#define vkCmdResetEvent pcsx2_vkCmdResetEvent +#define vkCmdWaitEvents pcsx2_vkCmdWaitEvents +#define vkCmdPipelineBarrier pcsx2_vkCmdPipelineBarrier +#define vkCmdBeginQuery pcsx2_vkCmdBeginQuery +#define vkCmdEndQuery pcsx2_vkCmdEndQuery +#define vkCmdResetQueryPool pcsx2_vkCmdResetQueryPool +#define vkCmdWriteTimestamp pcsx2_vkCmdWriteTimestamp +#define vkCmdCopyQueryPoolResults pcsx2_vkCmdCopyQueryPoolResults +#define vkCmdPushConstants pcsx2_vkCmdPushConstants +#define vkCmdBeginRenderPass pcsx2_vkCmdBeginRenderPass +#define vkCmdNextSubpass pcsx2_vkCmdNextSubpass +#define vkCmdEndRenderPass pcsx2_vkCmdEndRenderPass +#define vkCmdExecuteCommands pcsx2_vkCmdExecuteCommands +#define vkCreateSwapchainKHR pcsx2_vkCreateSwapchainKHR +#define vkDestroySwapchainKHR pcsx2_vkDestroySwapchainKHR +#define vkGetSwapchainImagesKHR pcsx2_vkGetSwapchainImagesKHR +#define vkAcquireNextImageKHR pcsx2_vkAcquireNextImageKHR +#define vkQueuePresentKHR pcsx2_vkQueuePresentKHR + +#ifdef SUPPORTS_VULKAN_EXCLUSIVE_FULLSCREEN +#define vkAcquireFullScreenExclusiveModeEXT pcsx2_vkAcquireFullScreenExclusiveModeEXT +#define vkReleaseFullScreenExclusiveModeEXT pcsx2_vkReleaseFullScreenExclusiveModeEXT +#endif diff --git a/common/Vulkan/EntryPoints.inl b/common/Vulkan/EntryPoints.inl new file mode 100644 index 0000000000..815477ede3 --- /dev/null +++ b/common/Vulkan/EntryPoints.inl @@ -0,0 +1,239 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +// Expands the VULKAN_ENTRY_POINT macro for each function when this file is included. +// Parameters: Function name, is required +// VULKAN_MODULE_ENTRY_POINT is for functions in vulkan-1.dll +// VULKAN_INSTANCE_ENTRY_POINT is for instance-specific functions. +// VULKAN_DEVICE_ENTRY_POINT is for device-specific functions. + +#ifdef VULKAN_MODULE_ENTRY_POINT + +VULKAN_MODULE_ENTRY_POINT(vkCreateInstance, true) +VULKAN_MODULE_ENTRY_POINT(vkGetInstanceProcAddr, true) +VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceExtensionProperties, true) +VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceLayerProperties, true) +VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceVersion, false) + +#endif // VULKAN_MODULE_ENTRY_POINT + +#ifdef VULKAN_INSTANCE_ENTRY_POINT + +VULKAN_INSTANCE_ENTRY_POINT(vkGetDeviceProcAddr, true) +VULKAN_INSTANCE_ENTRY_POINT(vkDestroyInstance, true) +VULKAN_INSTANCE_ENTRY_POINT(vkEnumeratePhysicalDevices, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFeatures, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFormatProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceImageFormatProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceQueueFamilyProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceMemoryProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFeatures2, false) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateDevice, true) +VULKAN_INSTANCE_ENTRY_POINT(vkEnumerateDeviceExtensionProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkEnumerateDeviceLayerProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSparseImageFormatProperties, true) +VULKAN_INSTANCE_ENTRY_POINT(vkDestroySurfaceKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSurfaceSupportKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSurfaceCapabilitiesKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSurfaceFormatsKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSurfacePresentModesKHR, false) + +#if defined(VK_USE_PLATFORM_WIN32_KHR) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateWin32SurfaceKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceWin32PresentationSupportKHR, false) +#endif + +#if defined(VK_USE_PLATFORM_XLIB_KHR) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateXlibSurfaceKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceXlibPresentationSupportKHR, false) +#endif + +#if defined(VK_USE_PLATFORM_WAYLAND_KHR) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateWaylandSurfaceKHR, false) +#endif + +#if defined(VK_USE_PLATFORM_ANDROID_KHR) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateAndroidSurfaceKHR, false) +#endif + +#if defined(VK_USE_PLATFORM_MACOS_MVK) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateMacOSSurfaceMVK, false) +#endif + +#if defined(VK_USE_PLATFORM_METAL_EXT) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateMetalSurfaceEXT, false) +#endif + +// VK_EXT_debug_utils +VULKAN_INSTANCE_ENTRY_POINT(vkCmdBeginDebugUtilsLabelEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkCmdEndDebugUtilsLabelEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkCmdInsertDebugUtilsLabelEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateDebugUtilsMessengerEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkDestroyDebugUtilsMessengerEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkQueueBeginDebugUtilsLabelEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkQueueEndDebugUtilsLabelEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkQueueInsertDebugUtilsLabelEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkSetDebugUtilsObjectNameEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkSetDebugUtilsObjectTagEXT, false) +VULKAN_INSTANCE_ENTRY_POINT(vkSubmitDebugUtilsMessageEXT, false) + +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceProperties2, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSurfaceCapabilities2KHR, false) + +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceDisplayPropertiesKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceDisplayPlanePropertiesKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetDisplayPlaneSupportedDisplaysKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetDisplayModePropertiesKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateDisplayModeKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkGetDisplayPlaneCapabilitiesKHR, false) +VULKAN_INSTANCE_ENTRY_POINT(vkCreateDisplayPlaneSurfaceKHR, false) + +#endif // VULKAN_INSTANCE_ENTRY_POINT + +#ifdef VULKAN_DEVICE_ENTRY_POINT + +VULKAN_DEVICE_ENTRY_POINT(vkDestroyDevice, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetDeviceQueue, true) +VULKAN_DEVICE_ENTRY_POINT(vkQueueSubmit, true) +VULKAN_DEVICE_ENTRY_POINT(vkQueueWaitIdle, true) +VULKAN_DEVICE_ENTRY_POINT(vkDeviceWaitIdle, true) +VULKAN_DEVICE_ENTRY_POINT(vkAllocateMemory, true) +VULKAN_DEVICE_ENTRY_POINT(vkFreeMemory, true) +VULKAN_DEVICE_ENTRY_POINT(vkMapMemory, true) +VULKAN_DEVICE_ENTRY_POINT(vkUnmapMemory, true) +VULKAN_DEVICE_ENTRY_POINT(vkFlushMappedMemoryRanges, true) +VULKAN_DEVICE_ENTRY_POINT(vkInvalidateMappedMemoryRanges, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetDeviceMemoryCommitment, true) +VULKAN_DEVICE_ENTRY_POINT(vkBindBufferMemory, true) +VULKAN_DEVICE_ENTRY_POINT(vkBindImageMemory, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetBufferMemoryRequirements, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetImageMemoryRequirements, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetImageSparseMemoryRequirements, true) +VULKAN_DEVICE_ENTRY_POINT(vkQueueBindSparse, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateFence, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyFence, true) +VULKAN_DEVICE_ENTRY_POINT(vkResetFences, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetFenceStatus, true) +VULKAN_DEVICE_ENTRY_POINT(vkWaitForFences, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateSemaphore, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroySemaphore, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateEvent, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyEvent, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetEventStatus, true) +VULKAN_DEVICE_ENTRY_POINT(vkSetEvent, true) +VULKAN_DEVICE_ENTRY_POINT(vkResetEvent, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateQueryPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyQueryPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetQueryPoolResults, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateBufferView, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyBufferView, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetImageSubresourceLayout, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateImageView, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyImageView, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateShaderModule, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyShaderModule, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreatePipelineCache, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyPipelineCache, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetPipelineCacheData, true) +VULKAN_DEVICE_ENTRY_POINT(vkMergePipelineCaches, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateGraphicsPipelines, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateComputePipelines, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyPipeline, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreatePipelineLayout, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyPipelineLayout, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateSampler, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroySampler, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateDescriptorSetLayout, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyDescriptorSetLayout, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateDescriptorPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyDescriptorPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkResetDescriptorPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkAllocateDescriptorSets, true) +VULKAN_DEVICE_ENTRY_POINT(vkFreeDescriptorSets, true) +VULKAN_DEVICE_ENTRY_POINT(vkUpdateDescriptorSets, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateFramebuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyFramebuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateRenderPass, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyRenderPass, true) +VULKAN_DEVICE_ENTRY_POINT(vkGetRenderAreaGranularity, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateCommandPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkDestroyCommandPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkResetCommandPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkAllocateCommandBuffers, true) +VULKAN_DEVICE_ENTRY_POINT(vkFreeCommandBuffers, true) +VULKAN_DEVICE_ENTRY_POINT(vkBeginCommandBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkEndCommandBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkResetCommandBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdBindPipeline, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetViewport, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetScissor, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetLineWidth, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetDepthBias, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetBlendConstants, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetDepthBounds, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetStencilCompareMask, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetStencilWriteMask, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetStencilReference, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdBindDescriptorSets, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdBindIndexBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdBindVertexBuffers, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdDraw, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdDrawIndexed, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdDrawIndirect, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdDrawIndexedIndirect, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdDispatch, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdDispatchIndirect, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdCopyBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdCopyImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdBlitImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdCopyBufferToImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdCopyImageToBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdUpdateBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdFillBuffer, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdClearColorImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdClearDepthStencilImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdClearAttachments, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdResolveImage, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdSetEvent, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdResetEvent, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdWaitEvents, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdPipelineBarrier, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdBeginQuery, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdEndQuery, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdResetQueryPool, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdWriteTimestamp, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdCopyQueryPoolResults, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdPushConstants, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdBeginRenderPass, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdNextSubpass, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdEndRenderPass, true) +VULKAN_DEVICE_ENTRY_POINT(vkCmdExecuteCommands, true) +VULKAN_DEVICE_ENTRY_POINT(vkCreateSwapchainKHR, false) +VULKAN_DEVICE_ENTRY_POINT(vkDestroySwapchainKHR, false) +VULKAN_DEVICE_ENTRY_POINT(vkGetSwapchainImagesKHR, false) +VULKAN_DEVICE_ENTRY_POINT(vkAcquireNextImageKHR, false) +VULKAN_DEVICE_ENTRY_POINT(vkQueuePresentKHR, false) + +#ifdef SUPPORTS_VULKAN_EXCLUSIVE_FULLSCREEN +VULKAN_DEVICE_ENTRY_POINT(vkAcquireFullScreenExclusiveModeEXT, false) +VULKAN_DEVICE_ENTRY_POINT(vkReleaseFullScreenExclusiveModeEXT, false) +#endif + +#endif // VULKAN_DEVICE_ENTRY_POINT diff --git a/common/Vulkan/Loader.cpp b/common/Vulkan/Loader.cpp new file mode 100644 index 0000000000..91930f628a --- /dev/null +++ b/common/Vulkan/Loader.cpp @@ -0,0 +1,260 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include +#include +#include +#include +#include +#include + +#include "Loader.h" + +#ifndef _WIN32 +#include +#endif + +#ifdef __APPLE__ +#include +#endif + +#define VULKAN_MODULE_ENTRY_POINT(name, required) PFN_##name pcsx2_##name; +#define VULKAN_INSTANCE_ENTRY_POINT(name, required) PFN_##name pcsx2_##name; +#define VULKAN_DEVICE_ENTRY_POINT(name, required) PFN_##name pcsx2_##name; +#include "EntryPoints.inl" +#undef VULKAN_DEVICE_ENTRY_POINT +#undef VULKAN_INSTANCE_ENTRY_POINT +#undef VULKAN_MODULE_ENTRY_POINT + +namespace Vulkan +{ + void ResetVulkanLibraryFunctionPointers() + { +#define VULKAN_MODULE_ENTRY_POINT(name, required) pcsx2_##name = nullptr; +#define VULKAN_INSTANCE_ENTRY_POINT(name, required) pcsx2_##name = nullptr; +#define VULKAN_DEVICE_ENTRY_POINT(name, required) pcsx2_##name = nullptr; +#include "EntryPoints.inl" +#undef VULKAN_DEVICE_ENTRY_POINT +#undef VULKAN_INSTANCE_ENTRY_POINT +#undef VULKAN_MODULE_ENTRY_POINT + } + +#if defined(_WIN32) + + static HMODULE vulkan_module; + static std::atomic_int vulkan_module_ref_count = {0}; + + bool LoadVulkanLibrary() + { + // Not thread safe if a second thread calls the loader whilst the first is still in-progress. + if (vulkan_module) + { + vulkan_module_ref_count++; + return true; + } + +#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP | WINAPI_PARTITION_SYSTEM | WINAPI_PARTITION_GAMES) + vulkan_module = LoadLibraryA("vulkan-1.dll"); +#else + vulkan_module = NULL; +#endif + if (!vulkan_module) + { + std::fprintf(stderr, "Failed to load vulkan-1.dll\n"); + return false; + } + + bool required_functions_missing = false; + auto LoadFunction = [&](FARPROC* func_ptr, const char* name, bool is_required) { + *func_ptr = GetProcAddress(vulkan_module, name); + if (!(*func_ptr) && is_required) + { + std::fprintf(stderr, "Vulkan: Failed to load required module function %s\n", name); + required_functions_missing = true; + } + }; + +#define VULKAN_MODULE_ENTRY_POINT(name, required) LoadFunction(reinterpret_cast(&name), #name, required); +#include "EntryPoints.inl" +#undef VULKAN_MODULE_ENTRY_POINT + + if (required_functions_missing) + { + ResetVulkanLibraryFunctionPointers(); + FreeLibrary(vulkan_module); + vulkan_module = nullptr; + return false; + } + + vulkan_module_ref_count++; + return true; + } + + void UnloadVulkanLibrary() + { + if ((--vulkan_module_ref_count) > 0) + return; + + ResetVulkanLibraryFunctionPointers(); + FreeLibrary(vulkan_module); + vulkan_module = nullptr; + } + +#else + + static void* vulkan_module; + static std::atomic_int vulkan_module_ref_count = {0}; + + bool LoadVulkanLibrary() + { + // Not thread safe if a second thread calls the loader whilst the first is still in-progress. + if (vulkan_module) + { + vulkan_module_ref_count++; + return true; + } + +#if defined(__APPLE__) + // Check if a path to a specific Vulkan library has been specified. + char* libvulkan_env = getenv("LIBVULKAN_PATH"); + if (libvulkan_env) + vulkan_module = dlopen(libvulkan_env, RTLD_NOW); + if (!vulkan_module) + { + unsigned path_size = 0; + _NSGetExecutablePath(nullptr, &path_size); + std::string path; + path.resize(path_size); + if (_NSGetExecutablePath(path.data(), &path_size) == 0) + { + path[path_size] = 0; + + size_t pos = path.rfind('/'); + if (pos != std::string::npos) + { + path.erase(pos); + path += "/../Frameworks/libvulkan.dylib"; + vulkan_module = dlopen(path.c_str(), RTLD_NOW); + if (!vulkan_module) + { + path.erase(pos); + path += "/../Frameworks/libMoltenVK.dylib"; + vulkan_module = dlopen(path.c_str(), RTLD_NOW); + } + } + } + } + if (!vulkan_module) + { + vulkan_module = dlopen("libvulkan.dylib", RTLD_NOW); + if (!vulkan_module) + vulkan_module = dlopen("libMoltenVK.dylib", RTLD_NOW); + } +#else + // Names of libraries to search. Desktop should use libvulkan.so.1 or libvulkan.so. + static const char* search_lib_names[] = {"libvulkan.so.1", "libvulkan.so"}; + for (size_t i = 0; i < sizeof(search_lib_names) / sizeof(search_lib_names[0]); i++) + { + vulkan_module = dlopen(search_lib_names[i], RTLD_NOW); + if (vulkan_module) + break; + } +#endif + + if (!vulkan_module) + { + std::fprintf(stderr, "Failed to load or locate libvulkan.so\n"); + return false; + } + + bool required_functions_missing = false; + auto LoadFunction = [&](void** func_ptr, const char* name, bool is_required) { + *func_ptr = dlsym(vulkan_module, name); + if (!(*func_ptr) && is_required) + { + std::fprintf(stderr, "Vulkan: Failed to load required module function %s\n", name); + required_functions_missing = true; + } + }; + +#define VULKAN_MODULE_ENTRY_POINT(name, required) LoadFunction(reinterpret_cast(&name), #name, required); +#include "EntryPoints.inl" +#undef VULKAN_MODULE_ENTRY_POINT + + if (required_functions_missing) + { + ResetVulkanLibraryFunctionPointers(); + dlclose(vulkan_module); + vulkan_module = nullptr; + return false; + } + + vulkan_module_ref_count++; + return true; + } + + void UnloadVulkanLibrary() + { + if ((--vulkan_module_ref_count) > 0) + return; + + ResetVulkanLibraryFunctionPointers(); + dlclose(vulkan_module); + vulkan_module = nullptr; + } + +#endif + + bool LoadVulkanInstanceFunctions(VkInstance instance) + { + bool required_functions_missing = false; + auto LoadFunction = [&](PFN_vkVoidFunction* func_ptr, const char* name, bool is_required) { + *func_ptr = vkGetInstanceProcAddr(instance, name); + if (!(*func_ptr) && is_required) + { + std::fprintf(stderr, "Vulkan: Failed to load required instance function %s\n", name); + required_functions_missing = true; + } + }; + +#define VULKAN_INSTANCE_ENTRY_POINT(name, required) \ + LoadFunction(reinterpret_cast(&name), #name, required); +#include "EntryPoints.inl" +#undef VULKAN_INSTANCE_ENTRY_POINT + + return !required_functions_missing; + } + + bool LoadVulkanDeviceFunctions(VkDevice device) + { + bool required_functions_missing = false; + auto LoadFunction = [&](PFN_vkVoidFunction* func_ptr, const char* name, bool is_required) { + *func_ptr = vkGetDeviceProcAddr(device, name); + if (!(*func_ptr) && is_required) + { + std::fprintf(stderr, "Vulkan: Failed to load required device function %s\n", name); + required_functions_missing = true; + } + }; + +#define VULKAN_DEVICE_ENTRY_POINT(name, required) \ + LoadFunction(reinterpret_cast(&name), #name, required); +#include "EntryPoints.inl" +#undef VULKAN_DEVICE_ENTRY_POINT + + return !required_functions_missing; + } + +} // namespace Vulkan diff --git a/common/Vulkan/Loader.h b/common/Vulkan/Loader.h new file mode 100644 index 0000000000..9e65277ac9 --- /dev/null +++ b/common/Vulkan/Loader.h @@ -0,0 +1,117 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once + +#define VK_NO_PROTOTYPES + +#if defined(WIN32) + +#define VK_USE_PLATFORM_WIN32_KHR + +#ifndef NOMINMAX +#define NOMINMAX +#endif + +// vulkan.h pulls in windows.h on Windows, so we need to include our replacement header first +#include "common/RedtapeWindows.h" + +#endif + +#if defined(VULKAN_USE_X11) +#define VK_USE_PLATFORM_XLIB_KHR +#endif + +#if defined(VULKAN_USE_WAYLAND) +#define VK_USE_PLATFORM_WAYLAND_KHR +#endif + +#if defined(__ANDROID__) +#define VK_USE_PLATFORM_ANDROID_KHR +#endif + +#if defined(__APPLE__) +// #define VK_USE_PLATFORM_MACOS_MVK +#define VK_USE_PLATFORM_METAL_EXT +#endif + +#include "vulkan/vulkan.h" + +// Currently, exclusive fullscreen is only supported on Windows. +#if defined(WIN32) +#define SUPPORTS_VULKAN_EXCLUSIVE_FULLSCREEN 1 +#endif + +#if defined(VULKAN_USE_X11) + +// This breaks a bunch of our code. They shouldn't be #defines in the first place. +#ifdef None +#undef None +#endif +#ifdef Status +#undef Status +#endif +#ifdef CursorShape +#undef CursorShape +#endif +#ifdef KeyPress +#undef KeyPress +#endif +#ifdef KeyRelease +#undef KeyRelease +#endif +#ifdef FocusIn +#undef FocusIn +#endif +#ifdef FocusOut +#undef FocusOut +#endif +#ifdef FontChange +#undef FontChange +#endif +#ifdef Expose +#undef Expose +#endif +#ifdef Unsorted +#undef Unsorted +#endif +#ifdef Bool +#undef Bool +#endif + +#endif + +#include "EntryPoints.h" + +// We include vk_mem_alloc globally, so we don't accidentally include it before the vulkan header somewhere. +#ifdef __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wnullability-completeness" +#endif + +#include "vk_mem_alloc.h" + +#ifdef __clang__ +#pragma clang diagnostic pop +#endif + +namespace Vulkan +{ + bool LoadVulkanLibrary(); + bool LoadVulkanInstanceFunctions(VkInstance instance); + bool LoadVulkanDeviceFunctions(VkDevice device); + void UnloadVulkanLibrary(); + void ResetVulkanLibraryFunctionPointers(); +} // namespace Vulkan diff --git a/common/Vulkan/ShaderCache.cpp b/common/Vulkan/ShaderCache.cpp new file mode 100644 index 0000000000..ffa32ae461 --- /dev/null +++ b/common/Vulkan/ShaderCache.cpp @@ -0,0 +1,534 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/ShaderCache.h" +#include "common/Vulkan/ShaderCompiler.h" +#include "common/Vulkan/Context.h" +#include "common/Vulkan/Util.h" +#include "common/Assertions.h" +#include "common/Console.h" +#include "common/FileSystem.h" +#include "common/MD5Digest.h" + +// TODO: store the driver version and stuff in the shader header + +std::unique_ptr g_vulkan_shader_cache; + +namespace Vulkan +{ + using ShaderCompiler::SPIRVCodeType; + using ShaderCompiler::SPIRVCodeVector; + +#pragma pack(push, 4) + struct VK_PIPELINE_CACHE_HEADER + { + u32 header_length; + u32 header_version; + u32 vendor_id; + u32 device_id; + u8 uuid[VK_UUID_SIZE]; + }; + + struct CacheIndexEntry + { + u64 source_hash_low; + u64 source_hash_high; + u32 source_length; + u32 shader_type; + u32 file_offset; + u32 blob_size; + }; +#pragma pack(pop) + + static bool ValidatePipelineCacheHeader(const VK_PIPELINE_CACHE_HEADER& header) + { + if (header.header_length < sizeof(VK_PIPELINE_CACHE_HEADER)) + { + Console.Error("Pipeline cache failed validation: Invalid header length"); + return false; + } + + if (header.header_version != VK_PIPELINE_CACHE_HEADER_VERSION_ONE) + { + Console.Error("Pipeline cache failed validation: Invalid header version"); + return false; + } + + if (header.vendor_id != g_vulkan_context->GetDeviceProperties().vendorID) + { + Console.Error("Pipeline cache failed validation: Incorrect vendor ID (file: 0x%X, device: 0x%X)", + header.vendor_id, g_vulkan_context->GetDeviceProperties().vendorID); + return false; + } + + if (header.device_id != g_vulkan_context->GetDeviceProperties().deviceID) + { + Console.Error("Pipeline cache failed validation: Incorrect device ID (file: 0x%X, device: 0x%X)", + header.device_id, g_vulkan_context->GetDeviceProperties().deviceID); + return false; + } + + if (std::memcmp(header.uuid, g_vulkan_context->GetDeviceProperties().pipelineCacheUUID, VK_UUID_SIZE) != 0) + { + Console.Error("Pipeline cache failed validation: Incorrect UUID"); + return false; + } + + return true; + } + + static void FillPipelineCacheHeader(VK_PIPELINE_CACHE_HEADER* header) + { + header->header_length = sizeof(VK_PIPELINE_CACHE_HEADER); + header->header_version = VK_PIPELINE_CACHE_HEADER_VERSION_ONE; + header->vendor_id = g_vulkan_context->GetDeviceProperties().vendorID; + header->device_id = g_vulkan_context->GetDeviceProperties().deviceID; + std::memcpy(header->uuid, g_vulkan_context->GetDeviceProperties().pipelineCacheUUID, VK_UUID_SIZE); + } + + ShaderCache::ShaderCache() = default; + + ShaderCache::~ShaderCache() + { + CloseShaderCache(); + FlushPipelineCache(); + ClosePipelineCache(); + } + + bool ShaderCache::CacheIndexKey::operator==(const CacheIndexKey& key) const + { + return (source_hash_low == key.source_hash_low && source_hash_high == key.source_hash_high && + source_length == key.source_length && shader_type == key.shader_type); + } + + bool ShaderCache::CacheIndexKey::operator!=(const CacheIndexKey& key) const + { + return (source_hash_low != key.source_hash_low || source_hash_high != key.source_hash_high || + source_length != key.source_length || shader_type != key.shader_type); + } + + void ShaderCache::Create(std::string_view base_path, u32 version, bool debug) + { + pxAssert(!g_vulkan_shader_cache); + g_vulkan_shader_cache.reset(new ShaderCache()); + g_vulkan_shader_cache->Open(base_path, version, debug); + } + + void ShaderCache::Destroy() { g_vulkan_shader_cache.reset(); } + + void ShaderCache::Open(std::string_view directory, u32 version, bool debug) + { + m_version = version; + m_debug = debug; + + if (!directory.empty()) + { + m_pipeline_cache_filename = GetPipelineCacheBaseFileName(directory, debug); + + const std::string base_filename = GetShaderCacheBaseFileName(directory, debug); + const std::string index_filename = base_filename + ".idx"; + const std::string blob_filename = base_filename + ".bin"; + + if (!ReadExistingShaderCache(index_filename, blob_filename)) + CreateNewShaderCache(index_filename, blob_filename); + + if (!ReadExistingPipelineCache()) + CreateNewPipelineCache(); + } + else + { + CreateNewPipelineCache(); + } + } + + VkPipelineCache ShaderCache::GetPipelineCache(bool set_dirty /*= true*/) + { + if (m_pipeline_cache == VK_NULL_HANDLE) + return VK_NULL_HANDLE; + + m_pipeline_cache_dirty |= set_dirty; + return m_pipeline_cache; + } + + bool ShaderCache::CreateNewShaderCache(const std::string& index_filename, const std::string& blob_filename) + { + if (FileSystem::FileExists(index_filename.c_str())) + { + Console.Warning("Removing existing index file '%s'", index_filename.c_str()); + FileSystem::DeleteFilePath(index_filename.c_str()); + } + if (FileSystem::FileExists(blob_filename.c_str())) + { + Console.Warning("Removing existing blob file '%s'", blob_filename.c_str()); + FileSystem::DeleteFilePath(blob_filename.c_str()); + } + + m_index_file = FileSystem::OpenCFile(index_filename.c_str(), "wb"); + if (!m_index_file) + { + Console.Error("Failed to open index file '%s' for writing", index_filename.c_str()); + return false; + } + + const u32 index_version = FILE_VERSION; + VK_PIPELINE_CACHE_HEADER header; + FillPipelineCacheHeader(&header); + + if (std::fwrite(&index_version, sizeof(index_version), 1, m_index_file) != 1 || + std::fwrite(&m_version, sizeof(m_version), 1, m_index_file) != 1 || + std::fwrite(&header, sizeof(header), 1, m_index_file) != 1) + { + Console.Error("Failed to write header to index file '%s'", index_filename.c_str()); + std::fclose(m_index_file); + m_index_file = nullptr; + FileSystem::DeleteFilePath(index_filename.c_str()); + return false; + } + + m_blob_file = FileSystem::OpenCFile(blob_filename.c_str(), "w+b"); + if (!m_blob_file) + { + Console.Error("Failed to open blob file '%s' for writing", blob_filename.c_str()); + std::fclose(m_index_file); + m_index_file = nullptr; + FileSystem::DeleteFilePath(index_filename.c_str()); + return false; + } + + return true; + } + + bool ShaderCache::ReadExistingShaderCache(const std::string& index_filename, const std::string& blob_filename) + { + m_index_file = FileSystem::OpenCFile(index_filename.c_str(), "r+b"); + if (!m_index_file) + return false; + + u32 file_version = 0; + u32 data_version = 0; + if (std::fread(&file_version, sizeof(file_version), 1, m_index_file) != 1 || file_version != FILE_VERSION || + std::fread(&data_version, sizeof(data_version), 1, m_index_file) != 1 || data_version != m_version) + { + Console.Error("Bad file/data version in '%s'", index_filename.c_str()); + std::fclose(m_index_file); + m_index_file = nullptr; + return false; + } + + VK_PIPELINE_CACHE_HEADER header; + if (std::fread(&header, sizeof(header), 1, m_index_file) != 1 || !ValidatePipelineCacheHeader(header)) + { + Console.Error("Mismatched pipeline cache header in '%s' (GPU/driver changed?)", index_filename.c_str()); + std::fclose(m_index_file); + m_index_file = nullptr; + return false; + } + + m_blob_file = FileSystem::OpenCFile(blob_filename.c_str(), "a+b"); + if (!m_blob_file) + { + Console.Error("Blob file '%s' is missing", blob_filename.c_str()); + std::fclose(m_index_file); + m_index_file = nullptr; + return false; + } + + std::fseek(m_blob_file, 0, SEEK_END); + const u32 blob_file_size = static_cast(std::ftell(m_blob_file)); + + for (;;) + { + CacheIndexEntry entry; + if (std::fread(&entry, sizeof(entry), 1, m_index_file) != 1 || + (entry.file_offset + entry.blob_size) > blob_file_size) + { + if (std::feof(m_index_file)) + break; + + Console.Error("Failed to read entry from '%s', corrupt file?", index_filename.c_str()); + m_index.clear(); + std::fclose(m_blob_file); + m_blob_file = nullptr; + std::fclose(m_index_file); + m_index_file = nullptr; + return false; + } + + const CacheIndexKey key{entry.source_hash_low, entry.source_hash_high, entry.source_length, + static_cast(entry.shader_type)}; + const CacheIndexData data{entry.file_offset, entry.blob_size}; + m_index.emplace(key, data); + } + + // ensure we don't write before seeking + std::fseek(m_index_file, 0, SEEK_END); + + Console.WriteLn("Read %zu entries from '%s'", m_index.size(), index_filename.c_str()); + return true; + } + + void ShaderCache::CloseShaderCache() + { + if (m_index_file) + { + std::fclose(m_index_file); + m_index_file = nullptr; + } + if (m_blob_file) + { + std::fclose(m_blob_file); + m_blob_file = nullptr; + } + } + + bool ShaderCache::CreateNewPipelineCache() + { + if (!m_pipeline_cache_filename.empty() && FileSystem::FileExists(m_pipeline_cache_filename.c_str())) + { + Console.Warning("Removing existing pipeline cache '%s'", m_pipeline_cache_filename.c_str()); + FileSystem::DeleteFilePath(m_pipeline_cache_filename.c_str()); + } + + const VkPipelineCacheCreateInfo ci{VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, nullptr, 0, 0, nullptr}; + VkResult res = vkCreatePipelineCache(g_vulkan_context->GetDevice(), &ci, nullptr, &m_pipeline_cache); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreatePipelineCache() failed: "); + return false; + } + + m_pipeline_cache_dirty = true; + return true; + } + + bool ShaderCache::ReadExistingPipelineCache() + { + std::optional> data = FileSystem::ReadBinaryFile(m_pipeline_cache_filename.c_str()); + if (!data.has_value()) + return false; + + if (data->size() < sizeof(VK_PIPELINE_CACHE_HEADER)) + { + Console.Error("Pipeline cache at '%s' is too small", m_pipeline_cache_filename.c_str()); + return false; + } + + VK_PIPELINE_CACHE_HEADER header; + std::memcpy(&header, data->data(), sizeof(header)); + if (!ValidatePipelineCacheHeader(header)) + return false; + + const VkPipelineCacheCreateInfo ci{ + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, nullptr, 0, data->size(), data->data()}; + VkResult res = vkCreatePipelineCache(g_vulkan_context->GetDevice(), &ci, nullptr, &m_pipeline_cache); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreatePipelineCache() failed: "); + return false; + } + + return true; + } + + bool ShaderCache::FlushPipelineCache() + { + if (m_pipeline_cache == VK_NULL_HANDLE || !m_pipeline_cache_dirty || m_pipeline_cache_filename.empty()) + return false; + + size_t data_size; + VkResult res = vkGetPipelineCacheData(g_vulkan_context->GetDevice(), m_pipeline_cache, &data_size, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetPipelineCacheData() failed: "); + return false; + } + + std::vector data(data_size); + res = vkGetPipelineCacheData(g_vulkan_context->GetDevice(), m_pipeline_cache, &data_size, data.data()); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetPipelineCacheData() (2) failed: "); + return false; + } + + data.resize(data_size); + + // Save disk writes if it hasn't changed, think of the poor SSDs. + FILESYSTEM_STAT_DATA sd; + if (!FileSystem::StatFile(m_pipeline_cache_filename.c_str(), &sd) || sd.Size != static_cast(data_size)) + { + Console.WriteLn("Writing %zu bytes to '%s'", data_size, m_pipeline_cache_filename.c_str()); + if (!FileSystem::WriteBinaryFile(m_pipeline_cache_filename.c_str(), data.data(), data.size())) + { + Console.Error("Failed to write pipeline cache to '%s'", m_pipeline_cache_filename.c_str()); + return false; + } + } + else + { + Console.WriteLn( + "Skipping updating pipeline cache '%s' due to no changes.", m_pipeline_cache_filename.c_str()); + } + + m_pipeline_cache_dirty = false; + return true; + } + + void ShaderCache::ClosePipelineCache() + { + if (m_pipeline_cache == VK_NULL_HANDLE) + return; + + vkDestroyPipelineCache(g_vulkan_context->GetDevice(), m_pipeline_cache, nullptr); + m_pipeline_cache = VK_NULL_HANDLE; + } + + std::string ShaderCache::GetShaderCacheBaseFileName(const std::string_view& base_path, bool debug) + { + std::string base_filename(base_path); + base_filename += FS_OSPATH_SEPARATOR_STR "vulkan_shaders"; + + if (debug) + base_filename += "_debug"; + + return base_filename; + } + + std::string ShaderCache::GetPipelineCacheBaseFileName(const std::string_view& base_path, bool debug) + { + std::string base_filename(base_path); + base_filename += FS_OSPATH_SEPARATOR_STR "vulkan_pipelines"; + + if (debug) + base_filename += "_debug"; + + base_filename += ".bin"; + return base_filename; + } + + ShaderCache::CacheIndexKey ShaderCache::GetCacheKey(ShaderCompiler::Type type, const std::string_view& shader_code) + { + union HashParts + { + struct + { + u64 hash_low; + u64 hash_high; + }; + u8 hash[16]; + }; + HashParts h; + + MD5Digest digest; + digest.Update(shader_code.data(), static_cast(shader_code.length())); + digest.Final(h.hash); + + return CacheIndexKey{h.hash_low, h.hash_high, static_cast(shader_code.length()), type}; + } + + std::optional ShaderCache::GetShaderSPV( + ShaderCompiler::Type type, std::string_view shader_code) + { + const auto key = GetCacheKey(type, shader_code); + auto iter = m_index.find(key); + if (iter == m_index.end()) + return CompileAndAddShaderSPV(key, shader_code); + + SPIRVCodeVector spv(iter->second.blob_size); + if (std::fseek(m_blob_file, iter->second.file_offset, SEEK_SET) != 0 || + std::fread(spv.data(), sizeof(SPIRVCodeType), iter->second.blob_size, m_blob_file) != + iter->second.blob_size) + { + Console.Error("Read blob from file failed, recompiling"); + return ShaderCompiler::CompileShader(type, shader_code, m_debug); + } + + return spv; + } + + VkShaderModule ShaderCache::GetShaderModule(ShaderCompiler::Type type, std::string_view shader_code) + { + std::optional spv = GetShaderSPV(type, shader_code); + if (!spv.has_value()) + return VK_NULL_HANDLE; + + const VkShaderModuleCreateInfo ci{ + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, nullptr, 0, spv->size() * sizeof(SPIRVCodeType), spv->data()}; + + VkShaderModule mod; + VkResult res = vkCreateShaderModule(g_vulkan_context->GetDevice(), &ci, nullptr, &mod); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateShaderModule() failed: "); + return VK_NULL_HANDLE; + } + + return mod; + } + + VkShaderModule ShaderCache::GetVertexShader(std::string_view shader_code) + { + return GetShaderModule(ShaderCompiler::Type::Vertex, std::move(shader_code)); + } + + VkShaderModule ShaderCache::GetGeometryShader(std::string_view shader_code) + { + return GetShaderModule(ShaderCompiler::Type::Geometry, std::move(shader_code)); + } + + VkShaderModule ShaderCache::GetFragmentShader(std::string_view shader_code) + { + return GetShaderModule(ShaderCompiler::Type::Fragment, std::move(shader_code)); + } + + VkShaderModule ShaderCache::GetComputeShader(std::string_view shader_code) + { + return GetShaderModule(ShaderCompiler::Type::Compute, std::move(shader_code)); + } + + std::optional ShaderCache::CompileAndAddShaderSPV( + const CacheIndexKey& key, std::string_view shader_code) + { + std::optional spv = ShaderCompiler::CompileShader(key.shader_type, shader_code, m_debug); + if (!spv.has_value()) + return {}; + + if (!m_blob_file || std::fseek(m_blob_file, 0, SEEK_END) != 0) + return spv; + + CacheIndexData data; + data.file_offset = static_cast(std::ftell(m_blob_file)); + data.blob_size = static_cast(spv->size()); + + CacheIndexEntry entry = {}; + entry.source_hash_low = key.source_hash_low; + entry.source_hash_high = key.source_hash_high; + entry.source_length = key.source_length; + entry.shader_type = static_cast(key.shader_type); + entry.blob_size = data.blob_size; + entry.file_offset = data.file_offset; + + if (std::fwrite(spv->data(), sizeof(SPIRVCodeType), entry.blob_size, m_blob_file) != entry.blob_size || + std::fflush(m_blob_file) != 0 || std::fwrite(&entry, sizeof(entry), 1, m_index_file) != 1 || + std::fflush(m_index_file) != 0) + { + Console.Error("Failed to write shader blob to file"); + return spv; + } + + m_index.emplace(key, data); + return spv; + } +} // namespace Vulkan \ No newline at end of file diff --git a/common/Vulkan/ShaderCache.h b/common/Vulkan/ShaderCache.h new file mode 100644 index 0000000000..06a0c3969a --- /dev/null +++ b/common/Vulkan/ShaderCache.h @@ -0,0 +1,118 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once +#include "common/Vulkan/ShaderCompiler.h" +#include "common/Pcsx2Defs.h" +#include "common/HashCombine.h" +#include "common/Vulkan/Loader.h" +#include +#include +#include +#include +#include +#include +#include + +namespace Vulkan +{ + class ShaderCache + { + public: + ~ShaderCache(); + + static void Create(std::string_view directory, u32 version, bool debug); + static void Destroy(); + + /// Returns a handle to the pipeline cache. Set set_dirty to true if you are planning on writing to it externally. + VkPipelineCache GetPipelineCache(bool set_dirty = true); + + /// Writes pipeline cache to file, saving all newly compiled pipelines. + bool FlushPipelineCache(); + + std::optional GetShaderSPV( + ShaderCompiler::Type type, std::string_view shader_code); + VkShaderModule GetShaderModule(ShaderCompiler::Type type, std::string_view shader_code); + + VkShaderModule GetVertexShader(std::string_view shader_code); + VkShaderModule GetGeometryShader(std::string_view shader_code); + VkShaderModule GetFragmentShader(std::string_view shader_code); + VkShaderModule GetComputeShader(std::string_view shader_code); + + private: + static constexpr u32 FILE_VERSION = 2; + + struct CacheIndexKey + { + u64 source_hash_low; + u64 source_hash_high; + u32 source_length; + ShaderCompiler::Type shader_type; + + bool operator==(const CacheIndexKey& key) const; + bool operator!=(const CacheIndexKey& key) const; + }; + + struct CacheIndexEntryHasher + { + std::size_t operator()(const CacheIndexKey& e) const noexcept + { + std::size_t h = 0; + HashCombine(h, e.source_hash_low, e.source_hash_high, e.source_length, e.shader_type); + return h; + } + }; + + struct CacheIndexData + { + u32 file_offset; + u32 blob_size; + }; + + using CacheIndex = std::unordered_map; + + ShaderCache(); + + static std::string GetShaderCacheBaseFileName(const std::string_view& base_path, bool debug); + static std::string GetPipelineCacheBaseFileName(const std::string_view& base_path, bool debug); + static CacheIndexKey GetCacheKey(ShaderCompiler::Type type, const std::string_view& shader_code); + + void Open(std::string_view base_path, u32 version, bool debug); + + bool CreateNewShaderCache(const std::string& index_filename, const std::string& blob_filename); + bool ReadExistingShaderCache(const std::string& index_filename, const std::string& blob_filename); + void CloseShaderCache(); + + bool CreateNewPipelineCache(); + bool ReadExistingPipelineCache(); + void ClosePipelineCache(); + + std::optional CompileAndAddShaderSPV( + const CacheIndexKey& key, std::string_view shader_code); + + std::FILE* m_index_file = nullptr; + std::FILE* m_blob_file = nullptr; + std::string m_pipeline_cache_filename; + + CacheIndex m_index; + + VkPipelineCache m_pipeline_cache = VK_NULL_HANDLE; + u32 m_version = 0; + bool m_debug = false; + bool m_pipeline_cache_dirty = false; + }; +} // namespace Vulkan + +extern std::unique_ptr g_vulkan_shader_cache; diff --git a/common/Vulkan/ShaderCompiler.cpp b/common/Vulkan/ShaderCompiler.cpp new file mode 100644 index 0000000000..4d03a1ce0b --- /dev/null +++ b/common/Vulkan/ShaderCompiler.cpp @@ -0,0 +1,192 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/ShaderCompiler.h" +#include "common/Vulkan/Util.h" +#include "common/Assertions.h" +#include "common/Console.h" +#include "common/StringUtil.h" +#include +#include +#include + +// glslang includes +#include "SPIRV/GlslangToSpv.h" +#include "StandAlone/ResourceLimits.h" +#include "glslang/Public/ShaderLang.h" + +namespace Vulkan::ShaderCompiler +{ + // Registers itself for cleanup via atexit + bool InitializeGlslang(); + + static unsigned s_next_bad_shader_id = 1; + + static bool glslang_initialized = false; + + static std::optional CompileShaderToSPV( + EShLanguage stage, const char* stage_filename, std::string_view source) + { + if (!InitializeGlslang()) + return std::nullopt; + + std::unique_ptr shader = std::make_unique(stage); + std::unique_ptr program; + glslang::TShader::ForbidIncluder includer; + EProfile profile = ECoreProfile; + EShMessages messages = static_cast(EShMsgDefault | EShMsgSpvRules | EShMsgVulkanRules); + int default_version = 450; + + std::string full_source_code; + const char* pass_source_code = source.data(); + int pass_source_code_length = static_cast(source.size()); + shader->setStringsWithLengths(&pass_source_code, &pass_source_code_length, 1); + + auto DumpBadShader = [&](const char* msg) { + std::string filename = StringUtil::StdStringFromFormat("bad_shader_%u.txt", s_next_bad_shader_id++); + Console.Error("CompileShaderToSPV: %s, writing to %s", msg, filename.c_str()); + + std::ofstream ofs(filename.c_str(), std::ofstream::out | std::ofstream::binary); + if (ofs.is_open()) + { + ofs << source; + ofs << "\n"; + + ofs << msg << std::endl; + ofs << "Shader Info Log:" << std::endl; + ofs << shader->getInfoLog() << std::endl; + ofs << shader->getInfoDebugLog() << std::endl; + if (program) + { + ofs << "Program Info Log:" << std::endl; + ofs << program->getInfoLog() << std::endl; + ofs << program->getInfoDebugLog() << std::endl; + } + + ofs.close(); + } + }; + + if (!shader->parse( + &glslang::DefaultTBuiltInResource, default_version, profile, false, true, messages, includer)) + { + DumpBadShader("Failed to parse shader"); + return std::nullopt; + } + + // Even though there's only a single shader, we still need to link it to generate SPV + program = std::make_unique(); + program->addShader(shader.get()); + if (!program->link(messages)) + { + DumpBadShader("Failed to link program"); + return std::nullopt; + } + + glslang::TIntermediate* intermediate = program->getIntermediate(stage); + if (!intermediate) + { + DumpBadShader("Failed to generate SPIR-V"); + return std::nullopt; + } + + SPIRVCodeVector out_code; + spv::SpvBuildLogger logger; + glslang::GlslangToSpv(*intermediate, out_code, &logger); + + // Write out messages + // Temporary: skip if it contains "Warning, version 450 is not yet complete; most version-specific + // features are present, but some are missing." + if (std::strlen(shader->getInfoLog()) > 108) + Console.Warning("Shader info log: %s", shader->getInfoLog()); + if (std::strlen(shader->getInfoDebugLog()) > 0) + Console.Warning("Shader debug info log: %s", shader->getInfoDebugLog()); + if (std::strlen(program->getInfoLog()) > 25) + Console.Warning("Program info log: %s", program->getInfoLog()); + if (std::strlen(program->getInfoDebugLog()) > 0) + Console.Warning("Program debug info log: %s", program->getInfoDebugLog()); + std::string spv_messages = logger.getAllMessages(); + if (!spv_messages.empty()) + Console.Warning("SPIR-V conversion messages: %s", spv_messages.c_str()); + + return out_code; + } + + bool InitializeGlslang() + { + if (glslang_initialized) + return true; + + if (!glslang::InitializeProcess()) + { + pxFailRel("Failed to initialize glslang shader compiler"); + return false; + } + + std::atexit(DeinitializeGlslang); + glslang_initialized = true; + return true; + } + + void DeinitializeGlslang() + { + if (!glslang_initialized) + return; + + glslang::FinalizeProcess(); + glslang_initialized = false; + } + + std::optional CompileVertexShader(std::string_view source_code) + { + return CompileShaderToSPV(EShLangVertex, "vs", source_code); + } + + std::optional CompileGeometryShader(std::string_view source_code) + { + return CompileShaderToSPV(EShLangGeometry, "gs", source_code); + } + + std::optional CompileFragmentShader(std::string_view source_code) + { + return CompileShaderToSPV(EShLangFragment, "ps", source_code); + } + + std::optional CompileComputeShader(std::string_view source_code) + { + return CompileShaderToSPV(EShLangCompute, "cs", source_code); + } + + std::optional CompileShader(Type type, std::string_view source_code, bool debug) + { + switch (type) + { + case Type::Vertex: + return CompileShaderToSPV(EShLangVertex, "vs", source_code); + + case Type::Geometry: + return CompileShaderToSPV(EShLangGeometry, "gs", source_code); + + case Type::Fragment: + return CompileShaderToSPV(EShLangFragment, "ps", source_code); + + case Type::Compute: + return CompileShaderToSPV(EShLangCompute, "cs", source_code); + + default: + return std::nullopt; + } + } +} // namespace Vulkan::ShaderCompiler diff --git a/common/Vulkan/ShaderCompiler.h b/common/Vulkan/ShaderCompiler.h new file mode 100644 index 0000000000..52b663833b --- /dev/null +++ b/common/Vulkan/ShaderCompiler.h @@ -0,0 +1,53 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once + +#include "common/Pcsx2Defs.h" +#include +#include +#include + +namespace Vulkan::ShaderCompiler +{ + // Shader types + enum class Type + { + Vertex, + Geometry, + Fragment, + Compute + }; + + void DeinitializeGlslang(); + + // SPIR-V compiled code type + using SPIRVCodeType = u32; + using SPIRVCodeVector = std::vector; + + // Compile a vertex shader to SPIR-V. + std::optional CompileVertexShader(std::string_view source_code); + + // Compile a geometry shader to SPIR-V. + std::optional CompileGeometryShader(std::string_view source_code); + + // Compile a fragment shader to SPIR-V. + std::optional CompileFragmentShader(std::string_view source_code); + + // Compile a compute shader to SPIR-V. + std::optional CompileComputeShader(std::string_view source_code); + + std::optional CompileShader(Type type, std::string_view source_code, bool debug); +} // namespace Vulkan::ShaderCompiler diff --git a/common/Vulkan/StreamBuffer.cpp b/common/Vulkan/StreamBuffer.cpp new file mode 100644 index 0000000000..174448fc51 --- /dev/null +++ b/common/Vulkan/StreamBuffer.cpp @@ -0,0 +1,379 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/StreamBuffer.h" +#include "common/Vulkan/Context.h" +#include "common/Vulkan/Util.h" +#include "common/Align.h" +#include "common/Assertions.h" +#include "common/Console.h" + +namespace Vulkan +{ + StreamBuffer::StreamBuffer() = default; + + StreamBuffer::StreamBuffer(StreamBuffer&& move) + : m_usage(move.m_usage) + , m_size(move.m_size) + , m_current_offset(move.m_current_offset) + , m_current_space(move.m_current_space) + , m_current_gpu_position(move.m_current_gpu_position) + , m_buffer(move.m_buffer) + , m_memory(move.m_memory) + , m_host_pointer(move.m_host_pointer) + , m_tracked_fences(std::move(move.m_tracked_fences)) + , m_coherent_mapping(move.m_coherent_mapping) + { + } + + StreamBuffer::~StreamBuffer() + { + if (IsValid()) + Destroy(true); + } + + StreamBuffer& StreamBuffer::operator=(StreamBuffer&& move) + { + if (IsValid()) + Destroy(true); + + std::swap(m_usage, move.m_usage); + std::swap(m_size, move.m_size); + std::swap(m_current_offset, move.m_current_offset); + std::swap(m_current_space, move.m_current_space); + std::swap(m_current_gpu_position, move.m_current_gpu_position); + std::swap(m_buffer, move.m_buffer); + std::swap(m_memory, move.m_memory); + std::swap(m_host_pointer, move.m_host_pointer); + std::swap(m_tracked_fences, move.m_tracked_fences); + std::swap(m_coherent_mapping, move.m_coherent_mapping); + + return *this; + } + + bool StreamBuffer::Create(VkBufferUsageFlags usage, u32 size) + { + // TODO: Move this over to vk_mem_alloc. + + // Create the buffer descriptor + const VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, nullptr, 0, + static_cast(size), usage, VK_SHARING_MODE_EXCLUSIVE, 0, nullptr}; + + VkBuffer buffer = VK_NULL_HANDLE; + VkResult res = vkCreateBuffer(g_vulkan_context->GetDevice(), &buffer_create_info, nullptr, &buffer); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateBuffer failed: "); + return false; + } + + // Get memory requirements (types etc) for this buffer + VkMemoryRequirements memory_requirements; + vkGetBufferMemoryRequirements(g_vulkan_context->GetDevice(), buffer, &memory_requirements); + + // Aim for a coherent mapping if possible. + u32 memory_type_index = + g_vulkan_context->GetUploadMemoryType(memory_requirements.memoryTypeBits, &m_coherent_mapping); + + // Allocate memory for backing this buffer + VkMemoryAllocateInfo memory_allocate_info = { + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType + nullptr, // const void* pNext + memory_requirements.size, // VkDeviceSize allocationSize + memory_type_index // uint32_t memoryTypeIndex + }; + VkDeviceMemory memory = VK_NULL_HANDLE; + res = vkAllocateMemory(g_vulkan_context->GetDevice(), &memory_allocate_info, nullptr, &memory); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkAllocateMemory failed: "); + vkDestroyBuffer(g_vulkan_context->GetDevice(), buffer, nullptr); + return false; + } + + // Bind memory to buffer + res = vkBindBufferMemory(g_vulkan_context->GetDevice(), buffer, memory, 0); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkBindBufferMemory failed: "); + vkDestroyBuffer(g_vulkan_context->GetDevice(), buffer, nullptr); + vkFreeMemory(g_vulkan_context->GetDevice(), memory, nullptr); + return false; + } + + // Map this buffer into user-space + void* mapped_ptr = nullptr; + res = vkMapMemory(g_vulkan_context->GetDevice(), memory, 0, size, 0, &mapped_ptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkMapMemory failed: "); + vkDestroyBuffer(g_vulkan_context->GetDevice(), buffer, nullptr); + vkFreeMemory(g_vulkan_context->GetDevice(), memory, nullptr); + return false; + } + + // Unmap current host pointer (if there was a previous buffer) + if (m_host_pointer) + vkUnmapMemory(g_vulkan_context->GetDevice(), m_memory); + + if (IsValid()) + Destroy(true); + + // Replace with the new buffer + m_usage = usage; + m_size = size; + m_buffer = buffer; + m_memory = memory; + m_host_pointer = reinterpret_cast(mapped_ptr); + m_current_offset = 0; + m_current_gpu_position = 0; + m_tracked_fences.clear(); + return true; + } + + void StreamBuffer::Destroy(bool defer) + { + if (m_host_pointer) + { + vkUnmapMemory(g_vulkan_context->GetDevice(), m_memory); + m_host_pointer = nullptr; + } + + if (m_buffer != VK_NULL_HANDLE) + { + if (defer) + g_vulkan_context->DeferBufferDestruction(m_buffer); + else + vkDestroyBuffer(g_vulkan_context->GetDevice(), m_buffer, nullptr); + m_buffer = VK_NULL_HANDLE; + } + if (m_memory != VK_NULL_HANDLE) + { + if (defer) + g_vulkan_context->DeferDeviceMemoryDestruction(m_memory); + else + vkFreeMemory(g_vulkan_context->GetDevice(), m_memory, nullptr); + m_memory = VK_NULL_HANDLE; + } + } + + bool StreamBuffer::ReserveMemory(u32 num_bytes, u32 alignment) + { + const u32 required_bytes = num_bytes + alignment; + + // Check for sane allocations + if (required_bytes > m_size) + { + Console.Error("Attempting to allocate %u bytes from a %u byte stream buffer", static_cast(num_bytes), + static_cast(m_size)); + pxFailRel("Stream buffer overflow"); + return false; + } + + UpdateGPUPosition(); + + // Is the GPU behind or up to date with our current offset? + if (m_current_offset >= m_current_gpu_position) + { + const u32 remaining_bytes = m_size - m_current_offset; + if (required_bytes <= remaining_bytes) + { + // Place at the current position, after the GPU position. + m_current_offset = Common::AlignUp(m_current_offset, alignment); + m_current_space = m_size - m_current_offset; + return true; + } + + // Check for space at the start of the buffer + // We use < here because we don't want to have the case of m_current_offset == + // m_current_gpu_position. That would mean the code above would assume the + // GPU has caught up to us, which it hasn't. + if (required_bytes < m_current_gpu_position) + { + // Reset offset to zero, since we're allocating behind the gpu now + m_current_offset = 0; + m_current_space = m_current_gpu_position - 1; + return true; + } + } + + // Is the GPU ahead of our current offset? + if (m_current_offset < m_current_gpu_position) + { + // We have from m_current_offset..m_current_gpu_position space to use. + const u32 remaining_bytes = m_current_gpu_position - m_current_offset; + if (required_bytes < remaining_bytes) + { + // Place at the current position, since this is still behind the GPU. + m_current_offset = Common::AlignUp(m_current_offset, alignment); + m_current_space = m_current_gpu_position - m_current_offset - 1; + return true; + } + } + + // Can we find a fence to wait on that will give us enough memory? + if (WaitForClearSpace(required_bytes)) + { + const u32 align_diff = Common::AlignUp(m_current_offset, alignment) - m_current_offset; + m_current_offset += align_diff; + m_current_space -= align_diff; + return true; + } + + // We tried everything we could, and still couldn't get anything. This means that too much space + // in the buffer is being used by the command buffer currently being recorded. Therefore, the + // only option is to execute it, and wait until it's done. + return false; + } + + void StreamBuffer::CommitMemory(u32 final_num_bytes) + { + pxAssert((m_current_offset + final_num_bytes) <= m_size); + pxAssert(final_num_bytes <= m_current_space); + + // For non-coherent mappings, flush the memory range + if (!m_coherent_mapping) + { + VkMappedMemoryRange range = { + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory, m_current_offset, final_num_bytes}; + vkFlushMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range); + } + + m_current_offset += final_num_bytes; + m_current_space -= final_num_bytes; + UpdateCurrentFencePosition(); + } + + void StreamBuffer::UpdateCurrentFencePosition() + { + // Has the offset changed since the last fence? + const u64 counter = g_vulkan_context->GetCurrentFenceCounter(); + if (!m_tracked_fences.empty() && m_tracked_fences.back().first == counter) + { + // Still haven't executed a command buffer, so just update the offset. + m_tracked_fences.back().second = m_current_offset; + return; + } + + // New buffer, so update the GPU position while we're at it. + m_tracked_fences.emplace_back(counter, m_current_offset); + } + + void StreamBuffer::UpdateGPUPosition() + { + auto start = m_tracked_fences.begin(); + auto end = start; + + const u64 completed_counter = g_vulkan_context->GetCompletedFenceCounter(); + while (end != m_tracked_fences.end() && completed_counter >= end->first) + { + m_current_gpu_position = end->second; + ++end; + } + + if (start != end) + { + m_tracked_fences.erase(start, end); + if (m_current_offset == m_current_gpu_position) + { + // GPU is all caught up now. + m_current_offset = 0; + m_current_gpu_position = 0; + m_current_space = m_size; + } + } + } + + bool StreamBuffer::WaitForClearSpace(u32 num_bytes) + { + u32 new_offset = 0; + u32 new_space = 0; + u32 new_gpu_position = 0; + + auto iter = m_tracked_fences.begin(); + for (; iter != m_tracked_fences.end(); ++iter) + { + // Would this fence bring us in line with the GPU? + // This is the "last resort" case, where a command buffer execution has been forced + // after no additional data has been written to it, so we can assume that after the + // fence has been signaled the entire buffer is now consumed. + u32 gpu_position = iter->second; + if (m_current_offset == gpu_position) + { + new_offset = 0; + new_space = m_size; + new_gpu_position = 0; + break; + } + + // Assuming that we wait for this fence, are we allocating in front of the GPU? + if (m_current_offset > gpu_position) + { + // This would suggest the GPU has now followed us and wrapped around, so we have from + // m_current_position..m_size free, as well as and 0..gpu_position. + const u32 remaining_space_after_offset = m_size - m_current_offset; + if (remaining_space_after_offset >= num_bytes) + { + // Switch to allocating in front of the GPU, using the remainder of the buffer. + new_offset = m_current_offset; + new_space = m_size - m_current_offset; + new_gpu_position = gpu_position; + break; + } + + // We can wrap around to the start, behind the GPU, if there is enough space. + // We use > here because otherwise we'd end up lining up with the GPU, and then the + // allocator would assume that the GPU has consumed what we just wrote. + if (gpu_position > num_bytes) + { + new_offset = 0; + new_space = gpu_position - 1; + new_gpu_position = gpu_position; + break; + } + } + else + { + // We're currently allocating behind the GPU. This would give us between the current + // offset and the GPU position worth of space to work with. Again, > because we can't + // align the GPU position with the buffer offset. + u32 available_space_inbetween = gpu_position - m_current_offset; + if (available_space_inbetween > num_bytes) + { + // Leave the offset as-is, but update the GPU position. + new_offset = m_current_offset; + new_space = available_space_inbetween - 1; + new_gpu_position = gpu_position; + break; + } + } + } + + // Did any fences satisfy this condition? + // Has the command buffer been executed yet? If not, the caller should execute it. + if (iter == m_tracked_fences.end() || iter->first == g_vulkan_context->GetCurrentFenceCounter()) + return false; + + // Wait until this fence is signaled. This will fire the callback, updating the GPU position. + g_vulkan_context->WaitForFenceCounter(iter->first); + m_tracked_fences.erase( + m_tracked_fences.begin(), m_current_offset == iter->second ? m_tracked_fences.end() : ++iter); + m_current_offset = new_offset; + m_current_space = new_space; + m_current_gpu_position = new_gpu_position; + return true; + } + +} // namespace Vulkan diff --git a/common/Vulkan/StreamBuffer.h b/common/Vulkan/StreamBuffer.h new file mode 100644 index 0000000000..4b26b720a6 --- /dev/null +++ b/common/Vulkan/StreamBuffer.h @@ -0,0 +1,75 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once + +#include "common/Pcsx2Defs.h" +#include "common/Vulkan/Loader.h" +#include +#include + +namespace Vulkan +{ + class StreamBuffer + { + public: + StreamBuffer(); + StreamBuffer(StreamBuffer&& move); + StreamBuffer(const StreamBuffer&) = delete; + ~StreamBuffer(); + + StreamBuffer& operator=(StreamBuffer&& move); + StreamBuffer& operator=(const StreamBuffer&) = delete; + + __fi bool IsValid() const { return (m_buffer != VK_NULL_HANDLE); } + __fi VkBuffer GetBuffer() const { return m_buffer; } + __fi const VkBuffer* GetBufferPointer() const { return &m_buffer; } + __fi VkDeviceMemory GetDeviceMemory() const { return m_memory; } + __fi void* GetHostPointer() const { return m_host_pointer; } + __fi void* GetCurrentHostPointer() const { return m_host_pointer + m_current_offset; } + __fi u32 GetCurrentSize() const { return m_size; } + __fi u32 GetCurrentSpace() const { return m_current_space; } + __fi u32 GetCurrentOffset() const { return m_current_offset; } + + bool Create(VkBufferUsageFlags usage, u32 size); + void Destroy(bool defer); + + bool ReserveMemory(u32 num_bytes, u32 alignment); + void CommitMemory(u32 final_num_bytes); + + private: + bool AllocateBuffer(VkBufferUsageFlags usage, u32 size); + void UpdateCurrentFencePosition(); + void UpdateGPUPosition(); + + // Waits for as many fences as needed to allocate num_bytes bytes from the buffer. + bool WaitForClearSpace(u32 num_bytes); + + VkBufferUsageFlags m_usage = 0; + u32 m_size = 0; + u32 m_current_offset = 0; + u32 m_current_space = 0; + u32 m_current_gpu_position = 0; + + VkBuffer m_buffer = VK_NULL_HANDLE; + VkDeviceMemory m_memory = VK_NULL_HANDLE; + u8* m_host_pointer = nullptr; + + // List of fences and the corresponding positions in the buffer + std::deque> m_tracked_fences; + + bool m_coherent_mapping = false; + }; +} // namespace Vulkan diff --git a/common/Vulkan/SwapChain.cpp b/common/Vulkan/SwapChain.cpp new file mode 100644 index 0000000000..17bd847eba --- /dev/null +++ b/common/Vulkan/SwapChain.cpp @@ -0,0 +1,866 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/SwapChain.h" +#include "common/Assertions.h" +#include "common/Console.h" +#include "common/Vulkan/Context.h" +#include "common/Vulkan/Util.h" +#include +#include +#include + +#if defined(VK_USE_PLATFORM_XLIB_KHR) +#include +#endif + +#if defined(__APPLE__) +#include + +static bool CreateMetalLayer(WindowInfo* wi) +{ + id view = reinterpret_cast(wi->window_handle); + + Class clsCAMetalLayer = objc_getClass("CAMetalLayer"); + if (!clsCAMetalLayer) + { + Console.Error("Failed to get CAMetalLayer class."); + return false; + } + + // [CAMetalLayer layer] + id layer = reinterpret_cast(objc_msgSend)(objc_getClass("CAMetalLayer"), sel_getUid("layer")); + if (!layer) + { + Console.Error("Failed to create Metal layer."); + return false; + } + + // [view setWantsLayer:YES] + reinterpret_cast(objc_msgSend)(view, sel_getUid("setWantsLayer:"), YES); + + // [view setLayer:layer] + reinterpret_cast(objc_msgSend)(view, sel_getUid("setLayer:"), layer); + + // NSScreen* screen = [NSScreen mainScreen] + id screen = reinterpret_cast(objc_msgSend)(objc_getClass("NSScreen"), sel_getUid("mainScreen")); + + // CGFloat factor = [screen backingScaleFactor] + double factor = reinterpret_cast(objc_msgSend)(screen, sel_getUid("backingScaleFactor")); + + // layer.contentsScale = factor + reinterpret_cast(objc_msgSend)(layer, sel_getUid("setContentsScale:"), factor); + + // Store the layer pointer, that way MoltenVK doesn't call [NSView layer] outside the main thread. + wi->surface_handle = layer; + return true; +} + +static void DestroyMetalLayer(WindowInfo* wi) +{ + id view = reinterpret_cast(wi->window_handle); + id layer = reinterpret_cast(wi->surface_handle); + if (layer == nil) + return; + + reinterpret_cast(objc_msgSend)(view, sel_getUid("setLayer:"), nil); + reinterpret_cast(objc_msgSend)(view, sel_getUid("setWantsLayer:"), NO); + reinterpret_cast(objc_msgSend)(layer, sel_getUid("release")); + wi->surface_handle = nullptr; +} + +#endif + +namespace Vulkan +{ + SwapChain::SwapChain(const WindowInfo& wi, VkSurfaceKHR surface, bool vsync) + : m_window_info(wi) + , m_surface(surface) + , m_vsync_enabled(vsync) + { + } + + SwapChain::~SwapChain() + { + DestroySemaphores(); + DestroySwapChainImages(); + DestroySwapChain(); + DestroySurface(); + } + + static VkSurfaceKHR CreateDisplaySurface(VkInstance instance, VkPhysicalDevice physical_device, WindowInfo* wi) + { + Console.WriteLn("Trying to create a VK_KHR_display surface of %ux%u", wi->surface_width, wi->surface_height); + + u32 num_displays; + VkResult res = vkGetPhysicalDeviceDisplayPropertiesKHR(physical_device, &num_displays, nullptr); + if (res != VK_SUCCESS || num_displays == 0) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceDisplayPropertiesKHR() failed:"); + return {}; + } + + std::vector displays(num_displays); + res = vkGetPhysicalDeviceDisplayPropertiesKHR(physical_device, &num_displays, displays.data()); + if (res != VK_SUCCESS || num_displays != displays.size()) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceDisplayPropertiesKHR() failed:"); + return {}; + } + + for (u32 display_index = 0; display_index < num_displays; display_index++) + { + const VkDisplayPropertiesKHR& props = displays[display_index]; + DevCon.WriteLn("Testing display '%s'", props.displayName); + + u32 num_modes; + res = vkGetDisplayModePropertiesKHR(physical_device, props.display, &num_modes, nullptr); + if (res != VK_SUCCESS || num_modes == 0) + { + LOG_VULKAN_ERROR(res, "vkGetDisplayModePropertiesKHR() failed:"); + continue; + } + + std::vector modes(num_modes); + res = vkGetDisplayModePropertiesKHR(physical_device, props.display, &num_modes, modes.data()); + if (res != VK_SUCCESS || num_modes != modes.size()) + { + LOG_VULKAN_ERROR(res, "vkGetDisplayModePropertiesKHR() failed:"); + continue; + } + + const VkDisplayModePropertiesKHR* matched_mode = nullptr; + for (const VkDisplayModePropertiesKHR& mode : modes) + { + const float refresh_rate = static_cast(mode.parameters.refreshRate) / 1000.0f; + DevCon.WriteLn(" Mode %ux%u @ %f", mode.parameters.visibleRegion.width, + mode.parameters.visibleRegion.height, refresh_rate); + + if (!matched_mode && ((wi->surface_width == 0 && wi->surface_height == 0) || + (mode.parameters.visibleRegion.width == wi->surface_width && + mode.parameters.visibleRegion.height == wi->surface_height && + (wi->surface_refresh_rate == 0.0f || + std::abs(refresh_rate - wi->surface_refresh_rate) < 0.1f)))) + { + matched_mode = &mode; + } + } + + if (!matched_mode) + { + DevCon.WriteLn("No modes matched on '%s'", props.displayName); + continue; + } + + u32 num_planes; + res = vkGetPhysicalDeviceDisplayPlanePropertiesKHR(physical_device, &num_planes, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR() failed:"); + continue; + } + if (num_planes == 0) + continue; + + std::vector planes(num_planes); + res = vkGetPhysicalDeviceDisplayPlanePropertiesKHR(physical_device, &num_planes, planes.data()); + if (res != VK_SUCCESS || num_planes != planes.size()) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR() failed:"); + continue; + } + + u32 plane_index = 0; + for (; plane_index < num_planes; plane_index++) + { + u32 supported_display_count; + res = vkGetDisplayPlaneSupportedDisplaysKHR( + physical_device, plane_index, &supported_display_count, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetDisplayPlaneSupportedDisplaysKHR() failed:"); + continue; + } + if (supported_display_count == 0) + continue; + + std::vector supported_displays(supported_display_count); + res = vkGetDisplayPlaneSupportedDisplaysKHR( + physical_device, plane_index, &supported_display_count, supported_displays.data()); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetDisplayPlaneSupportedDisplaysKHR() failed:"); + continue; + } + + const bool is_supported = std::find(supported_displays.begin(), supported_displays.end(), + props.display) != supported_displays.end(); + if (!is_supported) + continue; + + break; + } + + if (plane_index == num_planes) + { + DevCon.WriteLn("No planes matched on '%s'", props.displayName); + continue; + } + + VkDisplaySurfaceCreateInfoKHR info = {}; + info.sType = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR; + info.displayMode = matched_mode->displayMode; + info.planeIndex = plane_index; + info.planeStackIndex = planes[plane_index].currentStackIndex; + info.transform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR; + info.globalAlpha = 1.0f; + info.alphaMode = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR; + info.imageExtent = matched_mode->parameters.visibleRegion; + + VkSurfaceKHR surface; + res = vkCreateDisplayPlaneSurfaceKHR(instance, &info, nullptr, &surface); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateDisplayPlaneSurfaceKHR() failed: "); + continue; + } + + wi->surface_refresh_rate = static_cast(matched_mode->parameters.refreshRate) / 1000.0f; + return surface; + } + + return VK_NULL_HANDLE; + } + + static std::vector GetDisplayModes( + VkInstance instance, VkPhysicalDevice physical_device, const WindowInfo& wi) + { + u32 num_displays; + VkResult res = vkGetPhysicalDeviceDisplayPropertiesKHR(physical_device, &num_displays, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceDisplayPropertiesKHR() failed:"); + return {}; + } + if (num_displays == 0) + { + Console.Error("No displays were returned"); + return {}; + } + + std::vector displays(num_displays); + res = vkGetPhysicalDeviceDisplayPropertiesKHR(physical_device, &num_displays, displays.data()); + if (res != VK_SUCCESS || num_displays != displays.size()) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceDisplayPropertiesKHR() failed:"); + return {}; + } + + std::vector result; + for (u32 display_index = 0; display_index < num_displays; display_index++) + { + const VkDisplayPropertiesKHR& props = displays[display_index]; + + u32 num_modes; + res = vkGetDisplayModePropertiesKHR(physical_device, props.display, &num_modes, nullptr); + if (res != VK_SUCCESS || num_modes == 0) + { + LOG_VULKAN_ERROR(res, "vkGetDisplayModePropertiesKHR() failed:"); + continue; + } + + std::vector modes(num_modes); + res = vkGetDisplayModePropertiesKHR(physical_device, props.display, &num_modes, modes.data()); + if (res != VK_SUCCESS || num_modes != modes.size()) + { + LOG_VULKAN_ERROR(res, "vkGetDisplayModePropertiesKHR() failed:"); + continue; + } + + for (const VkDisplayModePropertiesKHR& mode : modes) + { + const float refresh_rate = static_cast(mode.parameters.refreshRate) / 1000.0f; + if (std::find_if( + result.begin(), result.end(), [&mode, refresh_rate](const SwapChain::FullscreenModeInfo& mi) { + return (mi.width == mode.parameters.visibleRegion.width && + mi.height == mode.parameters.visibleRegion.height && + mode.parameters.refreshRate == refresh_rate); + }) != result.end()) + { + continue; + } + + result.push_back(SwapChain::FullscreenModeInfo{static_cast(mode.parameters.visibleRegion.width), + static_cast(mode.parameters.visibleRegion.height), refresh_rate}); + } + } + + return result; + } + + VkSurfaceKHR SwapChain::CreateVulkanSurface(VkInstance instance, VkPhysicalDevice physical_device, WindowInfo* wi) + { +#if defined(VK_USE_PLATFORM_WIN32_KHR) + if (wi->type == WindowInfo::Type::Win32) + { + VkWin32SurfaceCreateInfoKHR surface_create_info = { + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, // VkStructureType sType + nullptr, // const void* pNext + 0, // VkWin32SurfaceCreateFlagsKHR flags + nullptr, // HINSTANCE hinstance + reinterpret_cast(wi->window_handle) // HWND hwnd + }; + + VkSurfaceKHR surface; + VkResult res = vkCreateWin32SurfaceKHR(instance, &surface_create_info, nullptr, &surface); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateWin32SurfaceKHR failed: "); + return VK_NULL_HANDLE; + } + + return surface; + } +#endif + +#if defined(VK_USE_PLATFORM_XLIB_KHR) + if (wi->type == WindowInfo::Type::X11) + { + VkXlibSurfaceCreateInfoKHR surface_create_info = { + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, // VkStructureType sType + nullptr, // const void* pNext + 0, // VkXlibSurfaceCreateFlagsKHR flags + static_cast(wi->display_connection), // Display* dpy + reinterpret_cast(wi->window_handle) // Window window + }; + + VkSurfaceKHR surface; + VkResult res = vkCreateXlibSurfaceKHR(instance, &surface_create_info, nullptr, &surface); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateXlibSurfaceKHR failed: "); + return VK_NULL_HANDLE; + } + + return surface; + } +#endif + +#if defined(VK_USE_PLATFORM_WAYLAND_KHR) + if (wi->type == WindowInfo::Type::Wayland) + { + VkWaylandSurfaceCreateInfoKHR surface_create_info = {VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, + nullptr, 0, static_cast(wi->display_connection), + static_cast(wi->window_handle)}; + + VkSurfaceKHR surface; + VkResult res = vkCreateWaylandSurfaceKHR(instance, &surface_create_info, nullptr, &surface); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateWaylandSurfaceEXT failed: "); + return VK_NULL_HANDLE; + } + + return surface; + } +#endif + +#if defined(VK_USE_PLATFORM_ANDROID_KHR) + if (wi->type == WindowInfo::Type::Android) + { + VkAndroidSurfaceCreateInfoKHR surface_create_info = { + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, // VkStructureType sType + nullptr, // const void* pNext + 0, // VkAndroidSurfaceCreateFlagsKHR flags + reinterpret_cast(wi->window_handle) // ANativeWindow* window + }; + + VkSurfaceKHR surface; + VkResult res = vkCreateAndroidSurfaceKHR(instance, &surface_create_info, nullptr, &surface); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateAndroidSurfaceKHR failed: "); + return VK_NULL_HANDLE; + } + + return surface; + } +#endif + +#if defined(VK_USE_PLATFORM_METAL_EXT) + if (wi->type == WindowInfo::Type::MacOS) + { + if (!wi->surface_handle && !CreateMetalLayer(wi)) + return VK_NULL_HANDLE; + + VkMetalSurfaceCreateInfoEXT surface_create_info = {VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, nullptr, + 0, static_cast(wi->surface_handle)}; + + VkSurfaceKHR surface; + VkResult res = vkCreateMetalSurfaceEXT(instance, &surface_create_info, nullptr, &surface); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateMetalSurfaceEXT failed: "); + return VK_NULL_HANDLE; + } + + return surface; + } +#elif defined(VK_USE_PLATFORM_MACOS_MVK) + if (wi->type == WindowInfo::Type::MacOS) + { + VkMacOSSurfaceCreateInfoMVK surface_create_info = { + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, nullptr, 0, wi->window_handle}; + + VkSurfaceKHR surface; + VkResult res = vkCreateMacOSSurfaceMVK(instance, &surface_create_info, nullptr, &surface); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateMacOSSurfaceMVK failed: "); + return VK_NULL_HANDLE; + } + + return surface; + } +#endif + +#if 0 + if (wi->type == WindowInfo::Type::Display) + return CreateDisplaySurface(instance, physical_device, wi); +#endif + + return VK_NULL_HANDLE; + } + + void SwapChain::DestroyVulkanSurface(VkInstance instance, WindowInfo* wi, VkSurfaceKHR surface) + { + vkDestroySurfaceKHR(g_vulkan_context->GetVulkanInstance(), surface, nullptr); + +#if defined(__APPLE__) + if (wi->type == WindowInfo::Type::MacOS && wi->surface_handle) + DestroyMetalLayer(wi); +#endif + } + + std::vector SwapChain::GetSurfaceFullscreenModes( + VkInstance instance, VkPhysicalDevice physical_device, const WindowInfo& wi) + { +#if 0 + if (wi.type == WindowInfo::Type::Display) + return GetDisplayModes(instance, physical_device, wi); +#endif + + return {}; + } + + std::unique_ptr SwapChain::Create(const WindowInfo& wi, VkSurfaceKHR surface, bool vsync) + { + std::unique_ptr swap_chain = std::make_unique(wi, surface, vsync); + if (!swap_chain->CreateSwapChain() || !swap_chain->SetupSwapChainImages() || !swap_chain->CreateSemaphores()) + return nullptr; + + return swap_chain; + } + + bool SwapChain::SelectSurfaceFormat() + { + u32 format_count; + VkResult res = vkGetPhysicalDeviceSurfaceFormatsKHR( + g_vulkan_context->GetPhysicalDevice(), m_surface, &format_count, nullptr); + if (res != VK_SUCCESS || format_count == 0) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceSurfaceFormatsKHR failed: "); + return false; + } + + std::vector surface_formats(format_count); + res = vkGetPhysicalDeviceSurfaceFormatsKHR( + g_vulkan_context->GetPhysicalDevice(), m_surface, &format_count, surface_formats.data()); + pxAssert(res == VK_SUCCESS); + + // If there is a single undefined surface format, the device doesn't care, so we'll just use RGBA + if (surface_formats[0].format == VK_FORMAT_UNDEFINED) + { + m_surface_format.format = VK_FORMAT_R8G8B8A8_UNORM; + m_surface_format.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; + return true; + } + + // Try to find a suitable format. + for (const VkSurfaceFormatKHR& surface_format : surface_formats) + { + // Some drivers seem to return a SRGB format here (Intel Mesa). + // This results in gamma correction when presenting to the screen, which we don't want. + // Use a linear format instead, if this is the case. + m_surface_format.format = Util::GetLinearFormat(surface_format.format); + m_surface_format.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; + return true; + } + + pxFailRel("Failed to find a suitable format for swap chain buffers."); + return false; + } + + bool SwapChain::SelectPresentMode() + { + VkResult res; + u32 mode_count; + res = vkGetPhysicalDeviceSurfacePresentModesKHR( + g_vulkan_context->GetPhysicalDevice(), m_surface, &mode_count, nullptr); + if (res != VK_SUCCESS || mode_count == 0) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceSurfaceFormatsKHR failed: "); + return false; + } + + std::vector present_modes(mode_count); + res = vkGetPhysicalDeviceSurfacePresentModesKHR( + g_vulkan_context->GetPhysicalDevice(), m_surface, &mode_count, present_modes.data()); + pxAssert(res == VK_SUCCESS); + + // Checks if a particular mode is supported, if it is, returns that mode. + auto CheckForMode = [&present_modes](VkPresentModeKHR check_mode) { + auto it = std::find_if(present_modes.begin(), present_modes.end(), + [check_mode](VkPresentModeKHR mode) { return check_mode == mode; }); + return it != present_modes.end(); + }; + + // If vsync is enabled, use VK_PRESENT_MODE_FIFO_KHR. + // This check should not fail with conforming drivers, as the FIFO present mode is mandated by + // the specification (VK_KHR_swapchain). In case it isn't though, fall through to any other mode. + if (m_vsync_enabled && CheckForMode(VK_PRESENT_MODE_FIFO_KHR)) + { + m_present_mode = VK_PRESENT_MODE_FIFO_KHR; + return true; + } + + // Prefer screen-tearing, if possible, for lowest latency. + if (CheckForMode(VK_PRESENT_MODE_IMMEDIATE_KHR)) + { + m_present_mode = VK_PRESENT_MODE_IMMEDIATE_KHR; + return true; + } + + // Use optimized-vsync above vsync. + if (CheckForMode(VK_PRESENT_MODE_MAILBOX_KHR)) + { + m_present_mode = VK_PRESENT_MODE_MAILBOX_KHR; + return true; + } + + // Fall back to whatever is available. + m_present_mode = present_modes[0]; + return true; + } + + bool SwapChain::CreateSwapChain() + { + // Look up surface properties to determine image count and dimensions + VkSurfaceCapabilitiesKHR surface_capabilities; + VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + g_vulkan_context->GetPhysicalDevice(), m_surface, &surface_capabilities); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR failed: "); + return false; + } + + // Select swap chain format and present mode + if (!SelectSurfaceFormat() || !SelectPresentMode()) + return false; + + // Select number of images in swap chain, we prefer one buffer in the background to work on + u32 image_count = std::max(surface_capabilities.minImageCount + 1u, 2u); + + // maxImageCount can be zero, in which case there isn't an upper limit on the number of buffers. + if (surface_capabilities.maxImageCount > 0) + image_count = std::min(image_count, surface_capabilities.maxImageCount); + + // Determine the dimensions of the swap chain. Values of -1 indicate the size we specify here + // determines window size? + VkExtent2D size = surface_capabilities.currentExtent; +#ifndef ANDROID + if (size.width == UINT32_MAX) +#endif + { + size.width = m_window_info.surface_width; + size.height = m_window_info.surface_height; + } + size.width = std::clamp( + size.width, surface_capabilities.minImageExtent.width, surface_capabilities.maxImageExtent.width); + size.height = std::clamp( + size.height, surface_capabilities.minImageExtent.height, surface_capabilities.maxImageExtent.height); + + // Prefer identity transform if possible + VkSurfaceTransformFlagBitsKHR transform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR; + if (!(surface_capabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)) + transform = surface_capabilities.currentTransform; + + // Select swap chain flags, we only need a colour attachment + VkImageUsageFlags image_usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + if (!(surface_capabilities.supportedUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) + { + Console.Error("Vulkan: Swap chain does not support usage as color attachment"); + return false; + } + + // Store the old/current swap chain when recreating for resize + VkSwapchainKHR old_swap_chain = m_swap_chain; + m_swap_chain = VK_NULL_HANDLE; + + // Now we can actually create the swap chain + VkSwapchainCreateInfoKHR swap_chain_info = {VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, nullptr, 0, m_surface, + image_count, m_surface_format.format, m_surface_format.colorSpace, size, 1u, image_usage, + VK_SHARING_MODE_EXCLUSIVE, 0, nullptr, transform, VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, m_present_mode, + VK_TRUE, old_swap_chain}; + std::array indices = {{ + g_vulkan_context->GetGraphicsQueueFamilyIndex(), + g_vulkan_context->GetPresentQueueFamilyIndex(), + }}; + if (g_vulkan_context->GetGraphicsQueueFamilyIndex() != g_vulkan_context->GetPresentQueueFamilyIndex()) + { + swap_chain_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT; + swap_chain_info.queueFamilyIndexCount = 2; + swap_chain_info.pQueueFamilyIndices = indices.data(); + } + + if (m_swap_chain == VK_NULL_HANDLE) + { + res = vkCreateSwapchainKHR(g_vulkan_context->GetDevice(), &swap_chain_info, nullptr, &m_swap_chain); + } + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateSwapchainKHR failed: "); + return false; + } + + // Now destroy the old swap chain, since it's been recreated. + // We can do this immediately since all work should have been completed before calling resize. + if (old_swap_chain != VK_NULL_HANDLE) + vkDestroySwapchainKHR(g_vulkan_context->GetDevice(), old_swap_chain, nullptr); + + m_window_info.surface_width = std::max(1u, size.width); + m_window_info.surface_height = std::max(1u, size.height); + return true; + } + + bool SwapChain::SetupSwapChainImages() + { + pxAssert(m_images.empty()); + + u32 image_count; + VkResult res = vkGetSwapchainImagesKHR(g_vulkan_context->GetDevice(), m_swap_chain, &image_count, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetSwapchainImagesKHR failed: "); + return false; + } + + std::vector images(image_count); + res = vkGetSwapchainImagesKHR(g_vulkan_context->GetDevice(), m_swap_chain, &image_count, images.data()); + pxAssert(res == VK_SUCCESS); + + m_load_render_pass = + g_vulkan_context->GetRenderPass(m_surface_format.format, VK_FORMAT_UNDEFINED, VK_ATTACHMENT_LOAD_OP_LOAD); + m_clear_render_pass = + g_vulkan_context->GetRenderPass(m_surface_format.format, VK_FORMAT_UNDEFINED, VK_ATTACHMENT_LOAD_OP_CLEAR); + if (m_load_render_pass == VK_NULL_HANDLE || m_clear_render_pass == VK_NULL_HANDLE) + { + pxFailRel("Failed to get swap chain render passes."); + return false; + } + + m_images.reserve(image_count); + for (u32 i = 0; i < image_count; i++) + { + SwapChainImage image; + image.image = images[i]; + + // Create texture object, which creates a view of the backbuffer + if (!image.texture.Adopt(image.image, VK_IMAGE_VIEW_TYPE_2D, m_window_info.surface_width, + m_window_info.surface_height, 1, 1, m_surface_format.format, VK_SAMPLE_COUNT_1_BIT)) + { + return false; + } + + image.framebuffer = image.texture.CreateFramebuffer(m_load_render_pass); + if (image.framebuffer == VK_NULL_HANDLE) + return false; + + m_images.emplace_back(std::move(image)); + } + + return true; + } + + void SwapChain::DestroySwapChainImages() + { + for (auto& it : m_images) + { + // Images themselves are cleaned up by the swap chain object + vkDestroyFramebuffer(g_vulkan_context->GetDevice(), it.framebuffer, nullptr); + } + m_images.clear(); + } + + void SwapChain::DestroySwapChain() + { + if (m_swap_chain == VK_NULL_HANDLE) + return; + + vkDestroySwapchainKHR(g_vulkan_context->GetDevice(), m_swap_chain, nullptr); + m_swap_chain = VK_NULL_HANDLE; + } + + VkResult SwapChain::AcquireNextImage() + { + if (!m_swap_chain) + return VK_ERROR_SURFACE_LOST_KHR; + + return vkAcquireNextImageKHR(g_vulkan_context->GetDevice(), m_swap_chain, UINT64_MAX, + m_image_available_semaphore, VK_NULL_HANDLE, &m_current_image); + } + + bool SwapChain::ResizeSwapChain(u32 new_width /* = 0 */, u32 new_height /* = 0 */) + { + DestroySwapChainImages(); + DestroySemaphores(); + + if (new_width != 0 && new_height != 0) + { + m_window_info.surface_width = new_width; + m_window_info.surface_height = new_height; + } + + if (!CreateSwapChain() || !SetupSwapChainImages() || !CreateSemaphores()) + { + DestroySemaphores(); + DestroySwapChainImages(); + DestroySwapChain(); + return false; + } + + return true; + } + + bool SwapChain::RecreateSwapChain() + { + DestroySwapChainImages(); + DestroySemaphores(); + + if (!CreateSwapChain() || !SetupSwapChainImages() || !CreateSemaphores()) + { + DestroySemaphores(); + DestroySwapChainImages(); + DestroySwapChain(); + return false; + } + + return true; + } + + bool SwapChain::SetVSync(bool enabled) + { + if (m_vsync_enabled == enabled) + return true; + + // Recreate the swap chain with the new present mode. + m_vsync_enabled = enabled; + return RecreateSwapChain(); + } + + bool SwapChain::RecreateSurface(const WindowInfo& new_wi) + { + // Destroy the old swap chain, images, and surface. + DestroySwapChainImages(); + DestroySwapChain(); + DestroySurface(); + + // Re-create the surface with the new native handle + m_window_info = new_wi; + m_surface = CreateVulkanSurface( + g_vulkan_context->GetVulkanInstance(), g_vulkan_context->GetPhysicalDevice(), &m_window_info); + if (m_surface == VK_NULL_HANDLE) + return false; + + // The validation layers get angry at us if we don't call this before creating the swapchain. + VkBool32 present_supported = VK_TRUE; + VkResult res = vkGetPhysicalDeviceSurfaceSupportKHR(g_vulkan_context->GetPhysicalDevice(), + g_vulkan_context->GetPresentQueueFamilyIndex(), m_surface, &present_supported); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkGetPhysicalDeviceSurfaceSupportKHR failed: "); + return false; + } + if (!present_supported) + { + pxFailRel("Recreated surface does not support presenting."); + return false; + } + + // Finally re-create the swap chain + if (!CreateSwapChain() || !SetupSwapChainImages()) + return false; + + return true; + } + + void SwapChain::DestroySurface() + { + if (m_surface == VK_NULL_HANDLE) + return; + + DestroyVulkanSurface(g_vulkan_context->GetVulkanInstance(), &m_window_info, m_surface); + m_surface = VK_NULL_HANDLE; + } + + bool SwapChain::CreateSemaphores() + { + // Create two semaphores, one that is triggered when the swapchain buffer is ready, another after + // submit and before present + VkSemaphoreCreateInfo semaphore_info = { + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, // VkStructureType sType + nullptr, // const void* pNext + 0 // VkSemaphoreCreateFlags flags + }; + + VkResult res; + if ((res = vkCreateSemaphore(g_vulkan_context->GetDevice(), &semaphore_info, nullptr, + &m_image_available_semaphore)) != VK_SUCCESS || + (res = vkCreateSemaphore(g_vulkan_context->GetDevice(), &semaphore_info, nullptr, + &m_rendering_finished_semaphore)) != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateSemaphore failed: "); + return false; + } + + return true; + } + + void SwapChain::DestroySemaphores() + { + if (m_image_available_semaphore != VK_NULL_HANDLE) + { + vkDestroySemaphore(g_vulkan_context->GetDevice(), m_image_available_semaphore, nullptr); + m_image_available_semaphore = VK_NULL_HANDLE; + } + + if (m_rendering_finished_semaphore != VK_NULL_HANDLE) + { + vkDestroySemaphore(g_vulkan_context->GetDevice(), m_rendering_finished_semaphore, nullptr); + m_rendering_finished_semaphore = VK_NULL_HANDLE; + } + } +} // namespace Vulkan diff --git a/common/Vulkan/SwapChain.h b/common/Vulkan/SwapChain.h new file mode 100644 index 0000000000..951cf708ba --- /dev/null +++ b/common/Vulkan/SwapChain.h @@ -0,0 +1,118 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once + +#include "common/Pcsx2Defs.h" +#include "common/WindowInfo.h" +#include "common/Vulkan/Texture.h" +#include "common/Vulkan/Loader.h" +#include +#include + +namespace Vulkan +{ + class SwapChain + { + public: + SwapChain(const WindowInfo& wi, VkSurfaceKHR surface, bool vsync); + ~SwapChain(); + + // Creates a vulkan-renderable surface for the specified window handle. + static VkSurfaceKHR CreateVulkanSurface(VkInstance instance, VkPhysicalDevice physical_device, WindowInfo* wi); + + // Destroys a previously-created surface. + static void DestroyVulkanSurface(VkInstance instance, WindowInfo* wi, VkSurfaceKHR surface); + + // Enumerates fullscreen modes for window info. + struct FullscreenModeInfo + { + u32 width; + u32 height; + float refresh_rate; + }; + static std::vector GetSurfaceFullscreenModes( + VkInstance instance, VkPhysicalDevice physical_device, const WindowInfo& wi); + + // Create a new swap chain from a pre-existing surface. + static std::unique_ptr Create(const WindowInfo& wi, VkSurfaceKHR surface, bool vsync); + + __fi VkSurfaceKHR GetSurface() const { return m_surface; } + __fi VkSurfaceFormatKHR GetSurfaceFormat() const { return m_surface_format; } + __fi VkFormat GetTextureFormat() const { return m_surface_format.format; } + __fi bool IsVSyncEnabled() const { return m_vsync_enabled; } + __fi VkSwapchainKHR GetSwapChain() const { return m_swap_chain; } + __fi const WindowInfo& GetWindowInfo() const { return m_window_info; } + __fi u32 GetWidth() const { return m_window_info.surface_width; } + __fi u32 GetHeight() const { return m_window_info.surface_height; } + __fi u32 GetCurrentImageIndex() const { return m_current_image; } + __fi u32 GetImageCount() const { return static_cast(m_images.size()); } + __fi VkImage GetCurrentImage() const { return m_images[m_current_image].image; } + __fi const Texture& GetCurrentTexture() const { return m_images[m_current_image].texture; } + __fi Texture& GetCurrentTexture() { return m_images[m_current_image].texture; } + __fi VkFramebuffer GetCurrentFramebuffer() const { return m_images[m_current_image].framebuffer; } + __fi VkRenderPass GetLoadRenderPass() const { return m_load_render_pass; } + __fi VkRenderPass GetClearRenderPass() const { return m_clear_render_pass; } + __fi VkSemaphore GetImageAvailableSemaphore() const { return m_image_available_semaphore; } + __fi VkSemaphore GetRenderingFinishedSemaphore() const { return m_rendering_finished_semaphore; } + VkResult AcquireNextImage(); + + bool RecreateSurface(const WindowInfo& new_wi); + bool ResizeSwapChain(u32 new_width = 0, u32 new_height = 0); + bool RecreateSwapChain(); + + // Change vsync enabled state. This may fail as it causes a swapchain recreation. + bool SetVSync(bool enabled); + + private: + bool SelectSurfaceFormat(); + bool SelectPresentMode(); + + bool CreateSwapChain(); + void DestroySwapChain(); + + bool SetupSwapChainImages(); + void DestroySwapChainImages(); + + void DestroySurface(); + + bool CreateSemaphores(); + void DestroySemaphores(); + + struct SwapChainImage + { + VkImage image; + Texture texture; + VkFramebuffer framebuffer; + }; + + WindowInfo m_window_info; + + VkSurfaceKHR m_surface = VK_NULL_HANDLE; + VkSurfaceFormatKHR m_surface_format = {}; + VkPresentModeKHR m_present_mode = VK_PRESENT_MODE_IMMEDIATE_KHR; + + VkRenderPass m_load_render_pass = VK_NULL_HANDLE; + VkRenderPass m_clear_render_pass = VK_NULL_HANDLE; + + VkSemaphore m_image_available_semaphore = VK_NULL_HANDLE; + VkSemaphore m_rendering_finished_semaphore = VK_NULL_HANDLE; + + VkSwapchainKHR m_swap_chain = VK_NULL_HANDLE; + std::vector m_images; + u32 m_current_image = 0; + bool m_vsync_enabled = false; + }; +} // namespace Vulkan diff --git a/common/Vulkan/Texture.cpp b/common/Vulkan/Texture.cpp new file mode 100644 index 0000000000..1dc871299c --- /dev/null +++ b/common/Vulkan/Texture.cpp @@ -0,0 +1,381 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/Texture.h" +#include "common/Vulkan/Context.h" +#include "common/Vulkan/Util.h" +#include "common/Assertions.h" +#include + +static constexpr VkComponentMapping s_identity_swizzle{VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, + VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY}; + +namespace Vulkan +{ + Texture::Texture() = default; + + Texture::Texture(Texture&& move) + : m_width(move.m_width) + , m_height(move.m_height) + , m_levels(move.m_levels) + , m_layers(move.m_layers) + , m_format(move.m_format) + , m_samples(move.m_samples) + , m_view_type(move.m_view_type) + , m_layout(move.m_layout) + , m_image(move.m_image) + , m_allocation(move.m_allocation) + , m_view(move.m_view) + { + move.m_width = 0; + move.m_height = 0; + move.m_levels = 0; + move.m_layers = 0; + move.m_format = VK_FORMAT_UNDEFINED; + move.m_samples = VK_SAMPLE_COUNT_1_BIT; + move.m_view_type = VK_IMAGE_VIEW_TYPE_2D; + move.m_layout = VK_IMAGE_LAYOUT_UNDEFINED; + move.m_image = VK_NULL_HANDLE; + move.m_allocation = VK_NULL_HANDLE; + move.m_view = VK_NULL_HANDLE; + } + + Texture::~Texture() + { + if (IsValid()) + Destroy(true); + } + + Vulkan::Texture& Texture::operator=(Texture&& move) + { + if (IsValid()) + Destroy(true); + + std::swap(m_width, move.m_width); + std::swap(m_height, move.m_height); + std::swap(m_levels, move.m_levels); + std::swap(m_layers, move.m_layers); + std::swap(m_format, move.m_format); + std::swap(m_samples, move.m_samples); + std::swap(m_view_type, move.m_view_type); + std::swap(m_layout, move.m_layout); + std::swap(m_image, move.m_image); + std::swap(m_allocation, move.m_allocation); + std::swap(m_view, move.m_view); + + return *this; + } + + bool Texture::Create(u32 width, u32 height, u32 levels, u32 layers, VkFormat format, VkSampleCountFlagBits samples, + VkImageViewType view_type, VkImageTiling tiling, VkImageUsageFlags usage, + const VkComponentMapping* swizzle /* = nullptr*/) + { + const VkImageCreateInfo image_info = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, nullptr, 0, VK_IMAGE_TYPE_2D, format, + {width, height, 1}, levels, layers, samples, tiling, usage, VK_SHARING_MODE_EXCLUSIVE, 0, nullptr, + VK_IMAGE_LAYOUT_UNDEFINED}; + + VmaAllocationCreateInfo aci = {}; + aci.usage = VMA_MEMORY_USAGE_GPU_ONLY; + + VkImage image = VK_NULL_HANDLE; + VmaAllocation allocation = VK_NULL_HANDLE; + VkResult res = + vmaCreateImage(g_vulkan_context->GetAllocator(), &image_info, &aci, &image, &allocation, nullptr); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vmaCreateImage failed: "); + return false; + } + + const VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, nullptr, 0, image, view_type, + format, swizzle ? *swizzle : s_identity_swizzle, + {Util::IsDepthFormat(format) ? static_cast(VK_IMAGE_ASPECT_DEPTH_BIT) : + static_cast(VK_IMAGE_ASPECT_COLOR_BIT), + 0, levels, 0, layers}}; + + VkImageView view = VK_NULL_HANDLE; + res = vkCreateImageView(g_vulkan_context->GetDevice(), &view_info, nullptr, &view); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateImageView failed: "); + vmaDestroyImage(g_vulkan_context->GetAllocator(), image, allocation); + return false; + } + + if (IsValid()) + Destroy(true); + + m_width = width; + m_height = height; + m_levels = levels; + m_layers = layers; + m_format = format; + m_samples = samples; + m_view_type = view_type; + m_image = image; + m_allocation = allocation; + m_view = view; + return true; + } + + bool Texture::Adopt(VkImage existing_image, VkImageViewType view_type, u32 width, u32 height, u32 levels, + u32 layers, VkFormat format, VkSampleCountFlagBits samples, const VkComponentMapping* swizzle /* = nullptr*/) + { + // Only need to create the image view, this is mainly for swap chains. + const VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, nullptr, 0, existing_image, + view_type, format, swizzle ? *swizzle : s_identity_swizzle, + {Util::IsDepthFormat(format) ? static_cast(VK_IMAGE_ASPECT_DEPTH_BIT) : + static_cast(VK_IMAGE_ASPECT_COLOR_BIT), + 0, levels, 0, layers}}; + + // Memory is managed by the owner of the image. + VkImageView view = VK_NULL_HANDLE; + VkResult res = vkCreateImageView(g_vulkan_context->GetDevice(), &view_info, nullptr, &view); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateImageView failed: "); + return false; + } + + if (IsValid()) + Destroy(true); + + m_width = width; + m_height = height; + m_levels = levels; + m_layers = layers; + m_format = format; + m_samples = samples; + m_view_type = view_type; + m_image = existing_image; + m_view = view; + return true; + } + + void Texture::Destroy(bool defer /* = true */) + { + if (m_view != VK_NULL_HANDLE) + { + if (defer) + g_vulkan_context->DeferImageViewDestruction(m_view); + else + vkDestroyImageView(g_vulkan_context->GetDevice(), m_view, nullptr); + m_view = VK_NULL_HANDLE; + } + + // If we don't have device memory allocated, the image is not owned by us (e.g. swapchain) + if (m_allocation != VK_NULL_HANDLE) + { + pxAssert(m_image != VK_NULL_HANDLE); + if (defer) + g_vulkan_context->DeferImageDestruction(m_image, m_allocation); + else + vmaDestroyImage(g_vulkan_context->GetAllocator(), m_image, m_allocation); + m_image = VK_NULL_HANDLE; + m_allocation = VK_NULL_HANDLE; + } + + m_width = 0; + m_height = 0; + m_levels = 0; + m_layers = 0; + m_format = VK_FORMAT_UNDEFINED; + m_samples = VK_SAMPLE_COUNT_1_BIT; + m_view_type = VK_IMAGE_VIEW_TYPE_2D; + m_layout = VK_IMAGE_LAYOUT_UNDEFINED; + } + + void Texture::OverrideImageLayout(VkImageLayout new_layout) { m_layout = new_layout; } + + void Texture::TransitionToLayout(VkCommandBuffer command_buffer, VkImageLayout new_layout) + { + if (m_layout == new_layout) + return; + + TransitionSubresourcesToLayout(command_buffer, 0, m_levels, 0, m_layers, m_layout, new_layout); + + m_layout = new_layout; + } + + void Texture::TransitionSubresourcesToLayout(VkCommandBuffer command_buffer, u32 start_level, u32 num_levels, + u32 start_layer, u32 num_layers, VkImageLayout old_layout, VkImageLayout new_layout) + { + VkImageAspectFlags aspect; + if (Util::IsDepthStencilFormat(m_format)) + aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; + else if (Util::IsDepthFormat(m_format)) + aspect = VK_IMAGE_ASPECT_DEPTH_BIT; + else + aspect = VK_IMAGE_ASPECT_COLOR_BIT; + + VkImageMemoryBarrier barrier = { + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType + nullptr, // const void* pNext + 0, // VkAccessFlags srcAccessMask + 0, // VkAccessFlags dstAccessMask + old_layout, // VkImageLayout oldLayout + new_layout, // VkImageLayout newLayout + VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex + VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex + m_image, // VkImage image + {aspect, start_level, num_levels, start_layer, num_layers} // VkImageSubresourceRange subresourceRange + }; + + // srcStageMask -> Stages that must complete before the barrier + // dstStageMask -> Stages that must wait for after the barrier before beginning + VkPipelineStageFlags srcStageMask, dstStageMask; + switch (old_layout) + { + case VK_IMAGE_LAYOUT_UNDEFINED: + // Layout undefined therefore contents undefined, and we don't care what happens to it. + barrier.srcAccessMask = 0; + srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + break; + + case VK_IMAGE_LAYOUT_PREINITIALIZED: + // Image has been pre-initialized by the host, so ensure all writes have completed. + barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT; + srcStageMask = VK_PIPELINE_STAGE_HOST_BIT; + break; + + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: + // Image was being used as a color attachment, so ensure all writes have completed. + barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + break; + + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: + // Image was being used as a depthstencil attachment, so ensure all writes have completed. + barrier.srcAccessMask = + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; + srcStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; + break; + + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: + // Image was being used as a shader resource, make sure all reads have finished. + barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; + srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + break; + + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: + // Image was being used as a copy source, ensure all reads have finished. + barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; + srcStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT; + break; + + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: + // Image was being used as a copy destination, ensure all writes have finished. + barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + srcStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT; + break; + + case VK_IMAGE_LAYOUT_GENERAL: + // General is used for feedback loops. + barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; + srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + break; + + default: + srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + break; + } + + switch (new_layout) + { + case VK_IMAGE_LAYOUT_UNDEFINED: + barrier.dstAccessMask = 0; + dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + break; + + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: + barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + break; + + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: + barrier.dstAccessMask = + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; + dstStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; + break; + + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: + barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; + dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + break; + + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: + barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; + dstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT; + break; + + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: + barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + dstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT; + break; + + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: + srcStageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; + dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + break; + + case VK_IMAGE_LAYOUT_GENERAL: + // General is used for feedback loops. + barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; + dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + break; + + default: + dstStageMask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + break; + } + vkCmdPipelineBarrier(command_buffer, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1, &barrier); + } + + VkFramebuffer Texture::CreateFramebuffer(VkRenderPass render_pass) + { + const VkFramebufferCreateInfo ci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0u, render_pass, 1, + &m_view, m_width, m_height, m_layers}; + VkFramebuffer fb = VK_NULL_HANDLE; + VkResult res = vkCreateFramebuffer(g_vulkan_context->GetDevice(), &ci, nullptr, &fb); + if (res != VK_SUCCESS) + { + LOG_VULKAN_ERROR(res, "vkCreateFramebuffer() failed: "); + return VK_NULL_HANDLE; + } + + return fb; + } + + void Texture::UpdateFromBuffer(VkCommandBuffer cmdbuf, u32 level, u32 layer, u32 x, u32 y, u32 width, u32 height, + u32 row_length, VkBuffer buffer, u32 buffer_offset) + { + const VkImageLayout old_layout = m_layout; + if (old_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) + TransitionSubresourcesToLayout( + cmdbuf, level, 1, layer, 1, old_layout, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); + + const VkBufferImageCopy bic = {static_cast(buffer_offset), row_length, height, + {VK_IMAGE_ASPECT_COLOR_BIT, level, layer, 1u}, {static_cast(x), static_cast(y), 0}, + {width, height, 1u}}; + + vkCmdCopyBufferToImage(cmdbuf, buffer, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &bic); + + if (old_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) + TransitionSubresourcesToLayout( + cmdbuf, level, 1, layer, 1, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, old_layout); + } +} // namespace Vulkan diff --git a/common/Vulkan/Texture.h b/common/Vulkan/Texture.h new file mode 100644 index 0000000000..fa4c480d85 --- /dev/null +++ b/common/Vulkan/Texture.h @@ -0,0 +1,92 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once +#include "common/Pcsx2Defs.h" +#include "common/Vulkan/Loader.h" +#include +#include + +namespace Vulkan +{ + class Texture + { + public: + Texture(); + Texture(Texture&& move); + Texture(const Texture&) = delete; + ~Texture(); + + Texture& operator=(Texture&& move); + Texture& operator=(const Texture&) = delete; + + __fi bool IsValid() const { return (m_image != VK_NULL_HANDLE); } + + /// An image is considered owned/managed if we control the memory. + __fi bool IsOwned() const { return (m_allocation != VK_NULL_HANDLE); } + + __fi u32 GetWidth() const { return m_width; } + __fi u32 GetHeight() const { return m_height; } + __fi u32 GetLevels() const { return m_levels; } + __fi u32 GetLayers() const { return m_layers; } + __fi u32 GetMipWidth(u32 level) const { return std::max(m_width >> level, 1u); } + __fi u32 GetMipHeight(u32 level) const { return std::max(m_height >> level, 1u); } + __fi VkFormat GetFormat() const { return m_format; } + __fi VkSampleCountFlagBits GetSamples() const { return m_samples; } + __fi VkImageLayout GetLayout() const { return m_layout; } + __fi VkImageViewType GetViewType() const { return m_view_type; } + __fi VkImage GetImage() const { return m_image; } + __fi VmaAllocation GetAllocation() const { return m_allocation; } + __fi VkImageView GetView() const { return m_view; } + + bool Create(u32 width, u32 height, u32 levels, u32 layers, VkFormat format, VkSampleCountFlagBits samples, + VkImageViewType view_type, VkImageTiling tiling, VkImageUsageFlags usage, + const VkComponentMapping* swizzle = nullptr); + + bool Adopt(VkImage existing_image, VkImageViewType view_type, u32 width, u32 height, u32 levels, u32 layers, + VkFormat format, VkSampleCountFlagBits samples, const VkComponentMapping* swizzle = nullptr); + + void Destroy(bool defer = true); + + // Used when the render pass is changing the image layout, or to force it to + // VK_IMAGE_LAYOUT_UNDEFINED, if the existing contents of the image is + // irrelevant and will not be loaded. + void OverrideImageLayout(VkImageLayout new_layout); + + void TransitionToLayout(VkCommandBuffer command_buffer, VkImageLayout new_layout); + void TransitionSubresourcesToLayout(VkCommandBuffer command_buffer, u32 start_level, u32 num_levels, + u32 start_layer, u32 num_layers, VkImageLayout old_layout, VkImageLayout new_layout); + + VkFramebuffer CreateFramebuffer(VkRenderPass render_pass); + + void UpdateFromBuffer(VkCommandBuffer cmdbuf, u32 level, u32 layer, u32 x, u32 y, u32 width, u32 height, + u32 row_length, VkBuffer buffer, u32 buffer_offset); + + private: + u32 m_width = 0; + u32 m_height = 0; + u32 m_levels = 0; + u32 m_layers = 0; + VkFormat m_format = VK_FORMAT_UNDEFINED; + VkSampleCountFlagBits m_samples = VK_SAMPLE_COUNT_1_BIT; + VkImageViewType m_view_type = VK_IMAGE_VIEW_TYPE_2D; + VkImageLayout m_layout = VK_IMAGE_LAYOUT_UNDEFINED; + + VkImage m_image = VK_NULL_HANDLE; + VmaAllocation m_allocation = VK_NULL_HANDLE; + VkImageView m_view = VK_NULL_HANDLE; + }; + +} // namespace Vulkan diff --git a/common/Vulkan/Util.cpp b/common/Vulkan/Util.cpp new file mode 100644 index 0000000000..a48ce213b5 --- /dev/null +++ b/common/Vulkan/Util.cpp @@ -0,0 +1,352 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/Vulkan/Util.h" +#include "common/Vulkan/Context.h" +#include "common/Vulkan/ShaderCompiler.h" +#include "common/Assertions.h" +#include "common/Console.h" +#include "common/StringUtil.h" + +#include + +namespace Vulkan +{ + namespace Util + { + bool IsDepthFormat(VkFormat format) + { + switch (format) + { + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D32_SFLOAT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return true; + default: + return false; + } + } + + bool IsDepthStencilFormat(VkFormat format) + { + switch (format) + { + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return true; + default: + return false; + } + } + + VkFormat GetLinearFormat(VkFormat format) + { + switch (format) + { + case VK_FORMAT_R8_SRGB: + return VK_FORMAT_R8_UNORM; + case VK_FORMAT_R8G8_SRGB: + return VK_FORMAT_R8G8_UNORM; + case VK_FORMAT_R8G8B8_SRGB: + return VK_FORMAT_R8G8B8_UNORM; + case VK_FORMAT_R8G8B8A8_SRGB: + return VK_FORMAT_R8G8B8A8_UNORM; + case VK_FORMAT_B8G8R8_SRGB: + return VK_FORMAT_B8G8R8_UNORM; + case VK_FORMAT_B8G8R8A8_SRGB: + return VK_FORMAT_B8G8R8A8_UNORM; + default: + return format; + } + } + + u32 GetTexelSize(VkFormat format) + { + // Only contains pixel formats we use. + switch (format) + { + case VK_FORMAT_R8_UNORM: + return 1; + + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: + case VK_FORMAT_R5G6B5_UNORM_PACK16: + case VK_FORMAT_B5G6R5_UNORM_PACK16: + case VK_FORMAT_R16_UINT: + return 2; + + case VK_FORMAT_R8G8B8A8_UNORM: + case VK_FORMAT_B8G8R8A8_UNORM: + case VK_FORMAT_R32_UINT: + case VK_FORMAT_R32_SFLOAT: + case VK_FORMAT_D32_SFLOAT: + return 4; + + case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: + return 8; + + case VK_FORMAT_BC2_UNORM_BLOCK: + case VK_FORMAT_BC3_UNORM_BLOCK: + case VK_FORMAT_BC7_UNORM_BLOCK: + return 16; + + default: + pxFailRel("Unhandled pixel format"); + return 1; + } + } + + VkBlendFactor GetAlphaBlendFactor(VkBlendFactor factor) + { + switch (factor) + { + case VK_BLEND_FACTOR_SRC_COLOR: + return VK_BLEND_FACTOR_SRC_ALPHA; + case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR: + return VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + case VK_BLEND_FACTOR_DST_COLOR: + return VK_BLEND_FACTOR_DST_ALPHA; + case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR: + return VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA; + default: + return factor; + } + } + + void SetViewport(VkCommandBuffer command_buffer, int x, int y, int width, int height, + float min_depth /*= 0.0f*/, float max_depth /*= 1.0f*/) + { + const VkViewport vp{static_cast(x), static_cast(y), static_cast(width), + static_cast(height), min_depth, max_depth}; + vkCmdSetViewport(command_buffer, 0, 1, &vp); + } + + void SetScissor(VkCommandBuffer command_buffer, int x, int y, int width, int height) + { + const VkRect2D scissor{{x, y}, {static_cast(width), static_cast(height)}}; + vkCmdSetScissor(command_buffer, 0, 1, &scissor); + } + + void SetViewportAndScissor(VkCommandBuffer command_buffer, int x, int y, int width, int height, + float min_depth /* = 0.0f */, float max_depth /* = 1.0f */) + { + } + + void SafeDestroyFramebuffer(VkFramebuffer& fb) + { + if (fb != VK_NULL_HANDLE) + { + vkDestroyFramebuffer(g_vulkan_context->GetDevice(), fb, nullptr); + fb = VK_NULL_HANDLE; + } + } + + void SafeDestroyShaderModule(VkShaderModule& sm) + { + if (sm != VK_NULL_HANDLE) + { + vkDestroyShaderModule(g_vulkan_context->GetDevice(), sm, nullptr); + sm = VK_NULL_HANDLE; + } + } + + void SafeDestroyPipeline(VkPipeline& p) + { + if (p != VK_NULL_HANDLE) + { + vkDestroyPipeline(g_vulkan_context->GetDevice(), p, nullptr); + p = VK_NULL_HANDLE; + } + } + + void SafeDestroyPipelineLayout(VkPipelineLayout& pl) + { + if (pl != VK_NULL_HANDLE) + { + vkDestroyPipelineLayout(g_vulkan_context->GetDevice(), pl, nullptr); + pl = VK_NULL_HANDLE; + } + } + + void SafeDestroyDescriptorSetLayout(VkDescriptorSetLayout& dsl) + { + if (dsl != VK_NULL_HANDLE) + { + vkDestroyDescriptorSetLayout(g_vulkan_context->GetDevice(), dsl, nullptr); + dsl = VK_NULL_HANDLE; + } + } + + void SafeDestroyBufferView(VkBufferView& bv) + { + if (bv != VK_NULL_HANDLE) + { + vkDestroyBufferView(g_vulkan_context->GetDevice(), bv, nullptr); + bv = VK_NULL_HANDLE; + } + } + + void SafeDestroyImageView(VkImageView& iv) + { + if (iv != VK_NULL_HANDLE) + { + vkDestroyImageView(g_vulkan_context->GetDevice(), iv, nullptr); + iv = VK_NULL_HANDLE; + } + } + + void SafeDestroySampler(VkSampler& samp) + { + if (samp != VK_NULL_HANDLE) + { + vkDestroySampler(g_vulkan_context->GetDevice(), samp, nullptr); + samp = VK_NULL_HANDLE; + } + } + + void SafeDestroySemaphore(VkSemaphore& sem) + { + if (sem != VK_NULL_HANDLE) + { + vkDestroySemaphore(g_vulkan_context->GetDevice(), sem, nullptr); + sem = VK_NULL_HANDLE; + } + } + + void SafeFreeGlobalDescriptorSet(VkDescriptorSet& ds) + { + if (ds != VK_NULL_HANDLE) + { + g_vulkan_context->FreeGlobalDescriptorSet(ds); + ds = VK_NULL_HANDLE; + } + } + + void BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer, VkAccessFlags src_access_mask, + VkAccessFlags dst_access_mask, VkDeviceSize offset, VkDeviceSize size, VkPipelineStageFlags src_stage_mask, + VkPipelineStageFlags dst_stage_mask) + { + VkBufferMemoryBarrier buffer_info = { + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType + nullptr, // const void* pNext + src_access_mask, // VkAccessFlags srcAccessMask + dst_access_mask, // VkAccessFlags dstAccessMask + VK_QUEUE_FAMILY_IGNORED, // uint32_t srcQueueFamilyIndex + VK_QUEUE_FAMILY_IGNORED, // uint32_t dstQueueFamilyIndex + buffer, // VkBuffer buffer + offset, // VkDeviceSize offset + size // VkDeviceSize size + }; + + vkCmdPipelineBarrier( + command_buffer, src_stage_mask, dst_stage_mask, 0, 0, nullptr, 1, &buffer_info, 0, nullptr); + } + + const char* VkResultToString(VkResult res) + { + switch (res) + { + case VK_SUCCESS: + return "VK_SUCCESS"; + + case VK_NOT_READY: + return "VK_NOT_READY"; + + case VK_TIMEOUT: + return "VK_TIMEOUT"; + + case VK_EVENT_SET: + return "VK_EVENT_SET"; + + case VK_EVENT_RESET: + return "VK_EVENT_RESET"; + + case VK_INCOMPLETE: + return "VK_INCOMPLETE"; + + case VK_ERROR_OUT_OF_HOST_MEMORY: + return "VK_ERROR_OUT_OF_HOST_MEMORY"; + + case VK_ERROR_OUT_OF_DEVICE_MEMORY: + return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; + + case VK_ERROR_INITIALIZATION_FAILED: + return "VK_ERROR_INITIALIZATION_FAILED"; + + case VK_ERROR_DEVICE_LOST: + return "VK_ERROR_DEVICE_LOST"; + + case VK_ERROR_MEMORY_MAP_FAILED: + return "VK_ERROR_MEMORY_MAP_FAILED"; + + case VK_ERROR_LAYER_NOT_PRESENT: + return "VK_ERROR_LAYER_NOT_PRESENT"; + + case VK_ERROR_EXTENSION_NOT_PRESENT: + return "VK_ERROR_EXTENSION_NOT_PRESENT"; + + case VK_ERROR_FEATURE_NOT_PRESENT: + return "VK_ERROR_FEATURE_NOT_PRESENT"; + + case VK_ERROR_INCOMPATIBLE_DRIVER: + return "VK_ERROR_INCOMPATIBLE_DRIVER"; + + case VK_ERROR_TOO_MANY_OBJECTS: + return "VK_ERROR_TOO_MANY_OBJECTS"; + + case VK_ERROR_FORMAT_NOT_SUPPORTED: + return "VK_ERROR_FORMAT_NOT_SUPPORTED"; + + case VK_ERROR_SURFACE_LOST_KHR: + return "VK_ERROR_SURFACE_LOST_KHR"; + + case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: + return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; + + case VK_SUBOPTIMAL_KHR: + return "VK_SUBOPTIMAL_KHR"; + + case VK_ERROR_OUT_OF_DATE_KHR: + return "VK_ERROR_OUT_OF_DATE_KHR"; + + case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: + return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; + + case VK_ERROR_VALIDATION_FAILED_EXT: + return "VK_ERROR_VALIDATION_FAILED_EXT"; + + case VK_ERROR_INVALID_SHADER_NV: + return "VK_ERROR_INVALID_SHADER_NV"; + + default: + return "UNKNOWN_VK_RESULT"; + } + } + + void LogVulkanResult(const char* func_name, VkResult res, const char* msg, ...) + { + std::va_list ap; + va_start(ap, msg); + std::string real_msg = StringUtil::StdStringFromFormatV(msg, ap); + va_end(ap); + + Console.Error( + "(%s) %s (%d: %s)", func_name, real_msg.c_str(), static_cast(res), VkResultToString(res)); + } + } // namespace Util +} // namespace Vulkan diff --git a/common/Vulkan/Util.h b/common/Vulkan/Util.h new file mode 100644 index 0000000000..e1863d2064 --- /dev/null +++ b/common/Vulkan/Util.h @@ -0,0 +1,135 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#pragma once + +#include "common/Pcsx2Defs.h" +#include "common/StringUtil.h" +#include "common/Vulkan/Loader.h" +#include +#include +#include +#include + +namespace Vulkan +{ + namespace Util + { + bool IsDepthFormat(VkFormat format); + bool IsDepthStencilFormat(VkFormat format); + VkFormat GetLinearFormat(VkFormat format); + u32 GetTexelSize(VkFormat format); + + // Safe destroy helpers + void SafeDestroyFramebuffer(VkFramebuffer& fb); + void SafeDestroyShaderModule(VkShaderModule& sm); + void SafeDestroyPipeline(VkPipeline& p); + void SafeDestroyPipelineLayout(VkPipelineLayout& pl); + void SafeDestroyDescriptorSetLayout(VkDescriptorSetLayout& dsl); + void SafeDestroyBufferView(VkBufferView& bv); + void SafeDestroyImageView(VkImageView& iv); + void SafeDestroySampler(VkSampler& samp); + void SafeDestroySemaphore(VkSemaphore& sem); + void SafeFreeGlobalDescriptorSet(VkDescriptorSet& ds); + + // Wrapper for creating an barrier on a buffer + void BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer, VkAccessFlags src_access_mask, + VkAccessFlags dst_access_mask, VkDeviceSize offset, VkDeviceSize size, VkPipelineStageFlags src_stage_mask, + VkPipelineStageFlags dst_stage_mask); + + const char* VkResultToString(VkResult res); + void LogVulkanResult(const char* func_name, VkResult res, const char* msg, ...) /*printflike(4, 5)*/; + +#define LOG_VULKAN_ERROR(res, ...) ::Vulkan::Util::LogVulkanResult(__func__, res, __VA_ARGS__) + +#if defined(_DEBUG) + +// We can't use the templates below because they're all the same type on 32-bit. +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || \ + defined(__ia64) || defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) +#define ENABLE_VULKAN_DEBUG_OBJECTS 1 +#endif + +#endif + +#ifdef ENABLE_VULKAN_DEBUG_OBJECTS + + // Provides a compile-time mapping between a Vulkan-type into its matching VkObjectType + template + struct VkObjectTypeMap; + + // clang-format off +template<> struct VkObjectTypeMap { using type = VkInstance ; static constexpr VkObjectType value = VK_OBJECT_TYPE_INSTANCE; }; +template<> struct VkObjectTypeMap { using type = VkPhysicalDevice ; static constexpr VkObjectType value = VK_OBJECT_TYPE_PHYSICAL_DEVICE; }; +template<> struct VkObjectTypeMap { using type = VkDevice ; static constexpr VkObjectType value = VK_OBJECT_TYPE_DEVICE; }; +template<> struct VkObjectTypeMap { using type = VkQueue ; static constexpr VkObjectType value = VK_OBJECT_TYPE_QUEUE; }; +template<> struct VkObjectTypeMap { using type = VkSemaphore ; static constexpr VkObjectType value = VK_OBJECT_TYPE_SEMAPHORE; }; +template<> struct VkObjectTypeMap { using type = VkCommandBuffer ; static constexpr VkObjectType value = VK_OBJECT_TYPE_COMMAND_BUFFER; }; +template<> struct VkObjectTypeMap { using type = VkFence ; static constexpr VkObjectType value = VK_OBJECT_TYPE_FENCE; }; +template<> struct VkObjectTypeMap { using type = VkDeviceMemory ; static constexpr VkObjectType value = VK_OBJECT_TYPE_DEVICE_MEMORY; }; +template<> struct VkObjectTypeMap { using type = VkBuffer ; static constexpr VkObjectType value = VK_OBJECT_TYPE_BUFFER; }; +template<> struct VkObjectTypeMap { using type = VkImage ; static constexpr VkObjectType value = VK_OBJECT_TYPE_IMAGE; }; +template<> struct VkObjectTypeMap { using type = VkEvent ; static constexpr VkObjectType value = VK_OBJECT_TYPE_EVENT; }; +template<> struct VkObjectTypeMap { using type = VkQueryPool ; static constexpr VkObjectType value = VK_OBJECT_TYPE_QUERY_POOL; }; +template<> struct VkObjectTypeMap { using type = VkBufferView ; static constexpr VkObjectType value = VK_OBJECT_TYPE_BUFFER_VIEW; }; +template<> struct VkObjectTypeMap { using type = VkImageView ; static constexpr VkObjectType value = VK_OBJECT_TYPE_IMAGE_VIEW; }; +template<> struct VkObjectTypeMap { using type = VkShaderModule ; static constexpr VkObjectType value = VK_OBJECT_TYPE_SHADER_MODULE; }; +template<> struct VkObjectTypeMap { using type = VkPipelineCache ; static constexpr VkObjectType value = VK_OBJECT_TYPE_PIPELINE_CACHE; }; +template<> struct VkObjectTypeMap { using type = VkPipelineLayout ; static constexpr VkObjectType value = VK_OBJECT_TYPE_PIPELINE_LAYOUT; }; +template<> struct VkObjectTypeMap { using type = VkRenderPass ; static constexpr VkObjectType value = VK_OBJECT_TYPE_RENDER_PASS; }; +template<> struct VkObjectTypeMap { using type = VkPipeline ; static constexpr VkObjectType value = VK_OBJECT_TYPE_PIPELINE; }; +template<> struct VkObjectTypeMap { using type = VkDescriptorSetLayout ; static constexpr VkObjectType value = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT; }; +template<> struct VkObjectTypeMap { using type = VkSampler ; static constexpr VkObjectType value = VK_OBJECT_TYPE_SAMPLER; }; +template<> struct VkObjectTypeMap { using type = VkDescriptorPool ; static constexpr VkObjectType value = VK_OBJECT_TYPE_DESCRIPTOR_POOL; }; +template<> struct VkObjectTypeMap { using type = VkDescriptorSet ; static constexpr VkObjectType value = VK_OBJECT_TYPE_DESCRIPTOR_SET; }; +template<> struct VkObjectTypeMap { using type = VkFramebuffer ; static constexpr VkObjectType value = VK_OBJECT_TYPE_FRAMEBUFFER; }; +template<> struct VkObjectTypeMap { using type = VkCommandPool ; static constexpr VkObjectType value = VK_OBJECT_TYPE_COMMAND_POOL; }; +template<> struct VkObjectTypeMap { using type = VkDescriptorUpdateTemplate; static constexpr VkObjectType value = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE; }; +template<> struct VkObjectTypeMap { using type = VkSurfaceKHR ; static constexpr VkObjectType value = VK_OBJECT_TYPE_SURFACE_KHR; }; +template<> struct VkObjectTypeMap { using type = VkSwapchainKHR ; static constexpr VkObjectType value = VK_OBJECT_TYPE_SWAPCHAIN_KHR; }; +template<> struct VkObjectTypeMap { using type = VkDebugUtilsMessengerEXT ; static constexpr VkObjectType value = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT; }; + // clang-format on + +#endif + + static inline void SetObjectName( + VkDevice device, void* object_handle, VkObjectType object_type, const char* format, va_list ap) + { +#ifdef ENABLE_VULKAN_DEBUG_OBJECTS + if (!vkSetDebugUtilsObjectNameEXT) + { + return; + } + + const std::string str(StringUtil::StdStringFromFormatV(format, ap)); + const VkDebugUtilsObjectNameInfoEXT nameInfo{VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, nullptr, + object_type, reinterpret_cast(object_handle), str.c_str()}; + vkSetDebugUtilsObjectNameEXT(device, &nameInfo); +#endif + } + + template + static inline void SetObjectName(VkDevice device, T object_handle, const char* format, ...) + { +#ifdef ENABLE_VULKAN_DEBUG_OBJECTS + std::va_list ap; + va_start(ap, format); + SetObjectName(device, reinterpret_cast((typename VkObjectTypeMap::type)object_handle), + VkObjectTypeMap::value, format, ap); + va_end(ap); +#endif + } + } // namespace Util +} // namespace Vulkan diff --git a/common/Vulkan/vk_mem_alloc.cpp b/common/Vulkan/vk_mem_alloc.cpp new file mode 100644 index 0000000000..2515850139 --- /dev/null +++ b/common/Vulkan/vk_mem_alloc.cpp @@ -0,0 +1,20 @@ +/* PCSX2 - PS2 Emulator for PCs + * Copyright (C) 2002-2021 PCSX2 Dev Team + * + * PCSX2 is free software: you can redistribute it and/or modify it under the terms + * of the GNU Lesser General Public License as published by the Free Software Found- + * ation, either version 3 of the License, or (at your option) any later version. + * + * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + * PURPOSE. See the GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with PCSX2. + * If not, see . + */ + +#include "common/PrecompiledHeader.h" + +#define VMA_IMPLEMENTATION 1 + +#include "common/Vulkan/Loader.h" diff --git a/common/WindowInfo.h b/common/WindowInfo.h index 0d1e80d268..55136dfa9f 100644 --- a/common/WindowInfo.h +++ b/common/WindowInfo.h @@ -37,6 +37,9 @@ struct WindowInfo /// Abstract handle to the window. This depends on the surface type. void* window_handle = nullptr; + /// For platforms where a separate surface/layer handle is needed, it is stored here (e.g. MacOS). + void* surface_handle = nullptr; + /// Width of the surface in pixels. u32 surface_width = 0; diff --git a/common/common.vcxproj b/common/common.vcxproj index 34d6b6e48d..738da06a8a 100644 --- a/common/common.vcxproj +++ b/common/common.vcxproj @@ -33,12 +33,13 @@ - $(SolutionDir)3rdparty\glad\include;%(AdditionalIncludeDirectories) + $(SolutionDir)3rdparty\glad\include;$(SolutionDir)3rdparty\glslang\glslang;%(AdditionalIncludeDirectories) Async Use PrecompiledHeader.h PrecompiledHeader.h WIN32_LEAN_AND_MEAN;NOMINMAX;%(PreprocessorDefinitions) + $(IntDir)%(RelativeDir) @@ -64,6 +65,16 @@ + + + + + + + + + + @@ -103,6 +114,7 @@ _M_X86_32;%(PreprocessorDefinitions) _M_X86_64;%(PreprocessorDefinitions) + @@ -139,6 +151,16 @@ + + + + + + + + + + @@ -174,6 +196,9 @@ {c0293b32-5acf-40f0-aa6c-e6da6f3bf33a} + + {ef6834a9-11f3-4331-bc34-21b325abb180} + {0fae817d-9a32-4830-857e-81da57246e16} @@ -188,4 +213,4 @@ - + \ No newline at end of file diff --git a/common/common.vcxproj.filters b/common/common.vcxproj.filters index f2b229c59e..8ccf0416f3 100644 --- a/common/common.vcxproj.filters +++ b/common/common.vcxproj.filters @@ -142,6 +142,33 @@ Source Files + + Source Files\Vulkan + + + Source Files\Vulkan + + + Source Files\Vulkan + + + Source Files\Vulkan + + + Source Files\Vulkan + + + Source Files\Vulkan + + + Source Files\Vulkan + + + Source Files\Vulkan + + + Source Files\Vulkan + Source Files @@ -151,6 +178,9 @@ Source Files\GL + + Source Files\Vulkan + @@ -342,6 +372,36 @@ Header Files + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + + + Header Files\Vulkan + Header Files @@ -365,15 +425,24 @@ {5e76b340-cb0e-4946-83ec-7d72e397cac8} + + {94154238-8b02-44f8-a7b8-3612e7bfa33c} + + + {46f36c68-0e0e-4acd-a621-3365e3167c4f} + Source Files + + Source Files\Vulkan + Source Files - + \ No newline at end of file diff --git a/common/vsprops/3rdpartyDeps.props b/common/vsprops/3rdpartyDeps.props index 26a85bbd37..5364a9864b 100644 --- a/common/vsprops/3rdpartyDeps.props +++ b/common/vsprops/3rdpartyDeps.props @@ -6,7 +6,7 @@ - $(SolutionDir)3rdparty\;$(SolutionDir)3rdparty\soundtouch\soundtouch\;$(SolutionDir)3rdparty\rapidyaml\rapidyaml\src;$(SolutionDir)3rdparty\rapidyaml\rapidyaml\ext\c4core\src;$(SolutionDir)3rdparty\fmt\fmt\include\;$(SolutionDir)3rdparty\libchdr\libchdr\include;$(SolutionDir)3rdparty\wil\include;$(SolutionDir)3rdparty\include\;%(AdditionalIncludeDirectories) + $(SolutionDir)3rdparty\;$(SolutionDir)3rdparty\soundtouch\soundtouch\;$(SolutionDir)3rdparty\rapidyaml\rapidyaml\src;$(SolutionDir)3rdparty\rapidyaml\rapidyaml\ext\c4core\src;$(SolutionDir)3rdparty\fmt\fmt\include\;$(SolutionDir)3rdparty\libchdr\libchdr\include;$(SolutionDir)3rdparty\wil\include;$(SolutionDir)3rdparty\Vulkan-Headers\include\;$(SolutionDir)3rdparty\include\;%(AdditionalIncludeDirectories) WIL_SUPPRESS_EXCEPTIONS;%(PreprocessorDefinitions)