Vulkan: Hook up VK_KHR_driver_properties

This commit is contained in:
Connor McLaughlin 2022-10-03 13:11:38 +10:00
parent dc008d3cdc
commit 4dc3014fcd
4 changed files with 153 additions and 124 deletions

View File

@ -12,14 +12,12 @@ Log_SetChannel(Vulkan::Context);
std::unique_ptr<Vulkan::Context> g_vulkan_context;
namespace Vulkan {
enum : u32
{
TEXTURE_BUFFER_SIZE = 32 * 1024 * 1024,
};
Context::Context(VkInstance instance, VkPhysicalDevice physical_device, bool owns_device)
Vulkan::Context::Context(VkInstance instance, VkPhysicalDevice physical_device, bool owns_device)
: m_instance(instance), m_physical_device(physical_device)
{
// Read device physical memory properties, we need it for allocating buffers
@ -37,9 +35,9 @@ Context::Context(VkInstance instance, VkPhysicalDevice physical_device, bool own
std::max(m_device_properties.limits.optimalBufferCopyRowPitchAlignment, static_cast<VkDeviceSize>(1));
}
Context::~Context() = default;
Vulkan::Context::~Context() = default;
bool Context::CheckValidationLayerAvailablility()
bool Vulkan::Context::CheckValidationLayerAvailablility()
{
u32 extension_count = 0;
VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr);
@ -75,7 +73,8 @@ bool Context::CheckValidationLayerAvailablility()
}) != layer_list.end());
}
VkInstance Context::CreateVulkanInstance(const WindowInfo* wi, bool enable_debug_utils, bool enable_validation_layer)
VkInstance Vulkan::Context::CreateVulkanInstance(const WindowInfo* wi, bool enable_debug_utils,
bool enable_validation_layer)
{
ExtensionList enabled_extensions;
if (!SelectInstanceExtensions(&enabled_extensions, wi, enable_debug_utils))
@ -119,7 +118,8 @@ VkInstance Context::CreateVulkanInstance(const WindowInfo* wi, bool enable_debug
return instance;
}
bool Context::SelectInstanceExtensions(ExtensionList* extension_list, const WindowInfo* wi, bool enable_debug_utils)
bool Vulkan::Context::SelectInstanceExtensions(ExtensionList* extension_list, const WindowInfo* wi,
bool enable_debug_utils)
{
u32 extension_count = 0;
VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr);
@ -194,7 +194,7 @@ bool Context::SelectInstanceExtensions(ExtensionList* extension_list, const Wind
return true;
}
Context::GPUList Context::EnumerateGPUs(VkInstance instance)
Vulkan::Context::GPUList Vulkan::Context::EnumerateGPUs(VkInstance instance)
{
u32 gpu_count = 0;
VkResult res = vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr);
@ -217,7 +217,7 @@ Context::GPUList Context::EnumerateGPUs(VkInstance instance)
return gpus;
}
Context::GPUNameList Context::EnumerateGPUNames(VkInstance instance)
Vulkan::Context::GPUNameList Vulkan::Context::EnumerateGPUNames(VkInstance instance)
{
u32 gpu_count = 0;
VkResult res = vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr);
@ -267,8 +267,9 @@ Context::GPUNameList Context::EnumerateGPUNames(VkInstance instance)
return gpu_names;
}
bool Context::Create(std::string_view gpu_name, const WindowInfo* wi, std::unique_ptr<SwapChain>* out_swap_chain,
bool threaded_presentation, bool enable_debug_utils, bool enable_validation_layer)
bool Vulkan::Context::Create(std::string_view gpu_name, const WindowInfo* wi,
std::unique_ptr<SwapChain>* out_swap_chain, bool threaded_presentation,
bool enable_debug_utils, bool enable_validation_layer)
{
AssertMsg(!g_vulkan_context, "Has no current context");
@ -364,13 +365,11 @@ bool Context::Create(std::string_view gpu_name, const WindowInfo* wi, std::uniqu
return true;
}
bool Context::CreateFromExistingInstance(VkInstance instance, VkPhysicalDevice gpu, VkSurfaceKHR surface,
bool take_ownership, bool enable_validation_layer, bool enable_debug_utils,
const char** required_device_extensions /* = nullptr */,
u32 num_required_device_extensions /* = 0 */,
const char** required_device_layers /* = nullptr */,
u32 num_required_device_layers /* = 0 */,
const VkPhysicalDeviceFeatures* required_features /* = nullptr */)
bool Vulkan::Context::CreateFromExistingInstance(
VkInstance instance, VkPhysicalDevice gpu, VkSurfaceKHR surface, bool take_ownership, bool enable_validation_layer,
bool enable_debug_utils, const char** required_device_extensions /* = nullptr */,
u32 num_required_device_extensions /* = 0 */, const char** required_device_layers /* = nullptr */,
u32 num_required_device_layers /* = 0 */, const VkPhysicalDeviceFeatures* required_features /* = nullptr */)
{
g_vulkan_context.reset(new Context(instance, gpu, take_ownership));
@ -391,7 +390,7 @@ bool Context::CreateFromExistingInstance(VkInstance instance, VkPhysicalDevice g
return true;
}
void Context::Destroy()
void Vulkan::Context::Destroy()
{
AssertMsg(g_vulkan_context, "Has context");
@ -421,7 +420,7 @@ void Context::Destroy()
g_vulkan_context.reset();
}
bool Context::SelectDeviceExtensions(ExtensionList* extension_list, bool enable_surface)
bool Vulkan::Context::SelectDeviceExtensions(ExtensionList* extension_list, bool enable_surface)
{
u32 extension_count = 0;
VkResult res = vkEnumerateDeviceExtensionProperties(m_physical_device, nullptr, &extension_count, nullptr);
@ -476,7 +475,7 @@ bool Context::SelectDeviceExtensions(ExtensionList* extension_list, bool enable_
return true;
}
bool Context::SelectDeviceFeatures(const VkPhysicalDeviceFeatures* required_features)
bool Vulkan::Context::SelectDeviceFeatures(const VkPhysicalDeviceFeatures* required_features)
{
VkPhysicalDeviceFeatures available_features;
vkGetPhysicalDeviceFeatures(m_physical_device, &available_features);
@ -490,9 +489,10 @@ bool Context::SelectDeviceFeatures(const VkPhysicalDeviceFeatures* required_feat
return true;
}
bool Context::CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer, const char** required_device_extensions,
u32 num_required_device_extensions, const char** required_device_layers,
u32 num_required_device_layers, const VkPhysicalDeviceFeatures* required_features)
bool Vulkan::Context::CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer,
const char** required_device_extensions, u32 num_required_device_extensions,
const char** required_device_layers, u32 num_required_device_layers,
const VkPhysicalDeviceFeatures* required_features)
{
u32 queue_family_count;
vkGetPhysicalDeviceQueueFamilyProperties(m_physical_device, &queue_family_count, nullptr);
@ -639,10 +639,24 @@ bool Context::CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer, c
queue_family_properties[m_graphics_queue_family_index].timestampValidBits,
m_device_properties.limits.timestampPeriod);
ProcessDeviceExtensions();
return true;
}
bool Context::CreateAllocator()
void Vulkan::Context::ProcessDeviceExtensions()
{
VkPhysicalDeviceProperties2 properties2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2};
if (m_optional_extensions.vk_khr_driver_properties)
{
m_device_driver_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
Util::AddPointerToChain(&properties2, &m_device_driver_properties);
}
// query
vkGetPhysicalDeviceProperties2(m_physical_device, &properties2);
}
bool Vulkan::Context::CreateAllocator()
{
VmaAllocatorCreateInfo ci = {};
ci.vulkanApiVersion = VK_API_VERSION_1_1;
@ -664,7 +678,7 @@ bool Context::CreateAllocator()
return true;
}
void Context::DestroyAllocator()
void Vulkan::Context::DestroyAllocator()
{
if (m_allocator == VK_NULL_HANDLE)
return;
@ -673,7 +687,7 @@ void Context::DestroyAllocator()
m_allocator = VK_NULL_HANDLE;
}
bool Context::CreateCommandBuffers()
bool Vulkan::Context::CreateCommandBuffers()
{
VkResult res;
@ -743,7 +757,7 @@ bool Context::CreateCommandBuffers()
return true;
}
void Context::DestroyCommandBuffers()
void Vulkan::Context::DestroyCommandBuffers()
{
for (FrameResources& resources : m_frame_resources)
{
@ -774,7 +788,7 @@ void Context::DestroyCommandBuffers()
}
}
bool Context::CreateGlobalDescriptorPool()
bool Vulkan::Context::CreateGlobalDescriptorPool()
{
// TODO: A better way to choose the number of descriptors.
VkDescriptorPoolSize pool_sizes[] = {{VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1024},
@ -813,7 +827,7 @@ bool Context::CreateGlobalDescriptorPool()
return true;
}
void Context::DestroyGlobalDescriptorPool()
void Vulkan::Context::DestroyGlobalDescriptorPool()
{
if (m_global_descriptor_pool != VK_NULL_HANDLE)
{
@ -822,7 +836,7 @@ void Context::DestroyGlobalDescriptorPool()
}
}
bool Context::CreateTextureStreamBuffer()
bool Vulkan::Context::CreateTextureStreamBuffer()
{
if (!m_texture_upload_buffer.Create(VK_BUFFER_USAGE_TRANSFER_SRC_BIT, TEXTURE_BUFFER_SIZE))
{
@ -833,7 +847,7 @@ bool Context::CreateTextureStreamBuffer()
return true;
}
void Context::DestroyRenderPassCache()
void Vulkan::Context::DestroyRenderPassCache()
{
for (auto& it : m_render_pass_cache)
vkDestroyRenderPass(m_device, it.second, nullptr);
@ -841,7 +855,7 @@ void Context::DestroyRenderPassCache()
m_render_pass_cache.clear();
}
VkDescriptorSet Context::AllocateDescriptorSet(VkDescriptorSetLayout set_layout)
VkDescriptorSet Vulkan::Context::AllocateDescriptorSet(VkDescriptorSetLayout set_layout)
{
VkDescriptorSetAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, nullptr,
m_frame_resources[m_current_frame].descriptor_pool, 1, &set_layout};
@ -858,7 +872,7 @@ VkDescriptorSet Context::AllocateDescriptorSet(VkDescriptorSetLayout set_layout)
return descriptor_set;
}
VkDescriptorSet Context::AllocateGlobalDescriptorSet(VkDescriptorSetLayout set_layout)
VkDescriptorSet Vulkan::Context::AllocateGlobalDescriptorSet(VkDescriptorSetLayout set_layout)
{
VkDescriptorSetAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, nullptr,
m_global_descriptor_pool, 1, &set_layout};
@ -871,12 +885,12 @@ VkDescriptorSet Context::AllocateGlobalDescriptorSet(VkDescriptorSetLayout set_l
return descriptor_set;
}
void Context::FreeGlobalDescriptorSet(VkDescriptorSet set)
void Vulkan::Context::FreeGlobalDescriptorSet(VkDescriptorSet set)
{
vkFreeDescriptorSets(m_device, m_global_descriptor_pool, 1, &set);
}
void Context::WaitForFenceCounter(u64 fence_counter)
void Vulkan::Context::WaitForFenceCounter(u64 fence_counter)
{
if (m_completed_fence_counter >= fence_counter)
return;
@ -895,26 +909,26 @@ void Context::WaitForFenceCounter(u64 fence_counter)
WaitForCommandBufferCompletion(index);
}
void Context::WaitForGPUIdle()
void Vulkan::Context::WaitForGPUIdle()
{
WaitForPresentComplete();
vkDeviceWaitIdle(m_device);
}
float Context::GetAndResetAccumulatedGPUTime()
float Vulkan::Context::GetAndResetAccumulatedGPUTime()
{
const float time = m_accumulated_gpu_time;
m_accumulated_gpu_time = 0.0f;
return time;
}
bool Context::SetEnableGPUTiming(bool enabled)
bool Vulkan::Context::SetEnableGPUTiming(bool enabled)
{
m_gpu_timing_enabled = enabled && m_gpu_timing_supported;
return (enabled == m_gpu_timing_enabled);
}
void Context::WaitForCommandBufferCompletion(u32 index)
void Vulkan::Context::WaitForCommandBufferCompletion(u32 index)
{
// Wait for this command buffer to be completed.
VkResult res = vkWaitForFences(m_device, 1, &m_frame_resources[index].fence, VK_TRUE, UINT64_MAX);
@ -944,10 +958,11 @@ void Context::WaitForCommandBufferCompletion(u32 index)
m_completed_fence_counter = now_completed_counter;
}
void Context::SubmitCommandBuffer(VkSemaphore wait_semaphore /* = VK_NULL_HANDLE */,
void Vulkan::Context::SubmitCommandBuffer(VkSemaphore wait_semaphore /* = VK_NULL_HANDLE */,
VkSemaphore signal_semaphore /* = VK_NULL_HANDLE */,
VkSwapchainKHR present_swap_chain /* = VK_NULL_HANDLE */,
uint32_t present_image_index /* = 0xFFFFFFFF */, bool submit_on_thread /* = false */)
uint32_t present_image_index /* = 0xFFFFFFFF */,
bool submit_on_thread /* = false */)
{
FrameResources& resources = m_frame_resources[m_current_frame];
@ -988,7 +1003,7 @@ void Context::SubmitCommandBuffer(VkSemaphore wait_semaphore /* = VK_NULL_HANDLE
m_present_queued_cv.notify_one();
}
void Context::DoSubmitCommandBuffer(u32 index, VkSemaphore wait_semaphore, VkSemaphore signal_semaphore)
void Vulkan::Context::DoSubmitCommandBuffer(u32 index, VkSemaphore wait_semaphore, VkSemaphore signal_semaphore)
{
FrameResources& resources = m_frame_resources[index];
@ -1017,7 +1032,8 @@ void Context::DoSubmitCommandBuffer(u32 index, VkSemaphore wait_semaphore, VkSem
}
}
void Context::DoPresent(VkSemaphore wait_semaphore, VkSwapchainKHR present_swap_chain, uint32_t present_image_index)
void Vulkan::Context::DoPresent(VkSemaphore wait_semaphore, VkSwapchainKHR present_swap_chain,
uint32_t present_image_index)
{
// Should have a signal semaphore.
Assert(wait_semaphore != VK_NULL_HANDLE);
@ -1041,7 +1057,7 @@ void Context::DoPresent(VkSemaphore wait_semaphore, VkSwapchainKHR present_swap_
}
}
void Context::WaitForPresentComplete()
void Vulkan::Context::WaitForPresentComplete()
{
if (m_present_done.load())
return;
@ -1050,7 +1066,7 @@ void Context::WaitForPresentComplete()
WaitForPresentComplete(lock);
}
void Context::WaitForPresentComplete(std::unique_lock<std::mutex>& lock)
void Vulkan::Context::WaitForPresentComplete(std::unique_lock<std::mutex>& lock)
{
if (m_present_done.load())
return;
@ -1058,7 +1074,7 @@ void Context::WaitForPresentComplete(std::unique_lock<std::mutex>& lock)
m_present_done_cv.wait(lock, [this]() { return m_present_done.load(); });
}
void Context::PresentThread()
void Vulkan::Context::PresentThread()
{
std::unique_lock<std::mutex> lock(m_present_mutex);
while (!m_present_thread_done.load())
@ -1077,14 +1093,14 @@ void Context::PresentThread()
}
}
void Context::StartPresentThread()
void Vulkan::Context::StartPresentThread()
{
Assert(!m_present_thread.joinable());
m_present_thread_done.store(false);
m_present_thread = std::thread(&Context::PresentThread, this);
}
void Context::StopPresentThread()
void Vulkan::Context::StopPresentThread()
{
if (!m_present_thread.joinable())
return;
@ -1099,12 +1115,12 @@ void Context::StopPresentThread()
m_present_thread.join();
}
void Context::MoveToNextCommandBuffer()
void Vulkan::Context::MoveToNextCommandBuffer()
{
ActivateCommandBuffer((m_current_frame + 1) % NUM_COMMAND_BUFFERS);
}
void Context::ActivateCommandBuffer(u32 index)
void Vulkan::Context::ActivateCommandBuffer(u32 index)
{
FrameResources& resources = m_frame_resources[index];
@ -1178,7 +1194,7 @@ void Context::ActivateCommandBuffer(u32 index)
vmaSetCurrentFrameIndex(m_allocator, static_cast<u32>(m_next_fence_counter));
}
void Context::ExecuteCommandBuffer(bool wait_for_completion)
void Vulkan::Context::ExecuteCommandBuffer(bool wait_for_completion)
{
// If we're waiting for completion, don't bother waking the worker thread.
const u32 current_frame = m_current_frame;
@ -1189,70 +1205,70 @@ void Context::ExecuteCommandBuffer(bool wait_for_completion)
WaitForCommandBufferCompletion(current_frame);
}
bool Context::CheckLastPresentFail()
bool Vulkan::Context::CheckLastPresentFail()
{
bool res = m_last_present_failed;
m_last_present_failed = false;
return res;
}
void Context::DeferBufferDestruction(VkBuffer object)
void Vulkan::Context::DeferBufferDestruction(VkBuffer object)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back([this, object]() { vkDestroyBuffer(m_device, object, nullptr); });
}
void Context::DeferBufferDestruction(VkBuffer object, VmaAllocation allocation)
void Vulkan::Context::DeferBufferDestruction(VkBuffer object, VmaAllocation allocation)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back(
[this, object, allocation]() { vmaDestroyBuffer(m_allocator, object, allocation); });
}
void Context::DeferBufferViewDestruction(VkBufferView object)
void Vulkan::Context::DeferBufferViewDestruction(VkBufferView object)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back([this, object]() { vkDestroyBufferView(m_device, object, nullptr); });
}
void Context::DeferDeviceMemoryDestruction(VkDeviceMemory object)
void Vulkan::Context::DeferDeviceMemoryDestruction(VkDeviceMemory object)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back([this, object]() { vkFreeMemory(m_device, object, nullptr); });
}
void Context::DeferFramebufferDestruction(VkFramebuffer object)
void Vulkan::Context::DeferFramebufferDestruction(VkFramebuffer object)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back([this, object]() { vkDestroyFramebuffer(m_device, object, nullptr); });
}
void Context::DeferImageDestruction(VkImage object)
void Vulkan::Context::DeferImageDestruction(VkImage object)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back([this, object]() { vkDestroyImage(m_device, object, nullptr); });
}
void Context::DeferImageDestruction(VkImage object, VmaAllocation allocation)
void Vulkan::Context::DeferImageDestruction(VkImage object, VmaAllocation allocation)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back(
[this, object, allocation]() { vmaDestroyImage(m_allocator, object, allocation); });
}
void Context::DeferImageViewDestruction(VkImageView object)
void Vulkan::Context::DeferImageViewDestruction(VkImageView object)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back([this, object]() { vkDestroyImageView(m_device, object, nullptr); });
}
void Context::DeferPipelineDestruction(VkPipeline pipeline)
void Vulkan::Context::DeferPipelineDestruction(VkPipeline pipeline)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back([this, pipeline]() { vkDestroyPipeline(m_device, pipeline, nullptr); });
}
VKAPI_ATTR VkBool32 VKAPI_CALL DebugMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
static VKAPI_ATTR VkBool32 VKAPI_CALL DebugMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
VkDebugUtilsMessageTypeFlagsEXT messageType,
const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData,
void* pUserData)
@ -1272,7 +1288,7 @@ VKAPI_ATTR VkBool32 VKAPI_CALL DebugMessengerCallback(VkDebugUtilsMessageSeverit
return VK_FALSE;
}
bool Context::EnableDebugUtils()
bool Vulkan::Context::EnableDebugUtils()
{
// Already enabled?
if (m_debug_messenger_callback != VK_NULL_HANDLE)
@ -1305,7 +1321,7 @@ bool Context::EnableDebugUtils()
return true;
}
void Context::DisableDebugUtils()
void Vulkan::Context::DisableDebugUtils()
{
if (m_debug_messenger_callback != VK_NULL_HANDLE)
{
@ -1314,7 +1330,7 @@ void Context::DisableDebugUtils()
}
}
VkRenderPass Context::GetRenderPass(VkFormat color_format, VkFormat depth_format, VkSampleCountFlagBits samples,
VkRenderPass Vulkan::Context::GetRenderPass(VkFormat color_format, VkFormat depth_format, VkSampleCountFlagBits samples,
VkAttachmentLoadOp load_op)
{
auto key = std::tie(color_format, depth_format, samples, load_op);
@ -1392,5 +1408,3 @@ VkRenderPass Context::GetRenderPass(VkFormat color_format, VkFormat depth_format
m_render_pass_cache.emplace(key, pass);
return pass;
}
} // namespace Vulkan

View File

@ -88,6 +88,10 @@ public:
ALWAYS_INLINE const VkPhysicalDeviceProperties& GetDeviceProperties() const { return m_device_properties; }
ALWAYS_INLINE const VkPhysicalDeviceFeatures& GetDeviceFeatures() const { return m_device_features; }
ALWAYS_INLINE const VkPhysicalDeviceLimits& GetDeviceLimits() const { return m_device_properties.limits; }
ALWAYS_INLINE const VkPhysicalDeviceDriverProperties& GetDeviceDriverProperties() const
{
return m_device_driver_properties;
}
// Support bits
ALWAYS_INLINE bool SupportsGeometryShaders() const { return m_device_features.geometryShader == VK_TRUE; }
@ -199,6 +203,7 @@ private:
bool CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer, const char** required_device_extensions,
u32 num_required_device_extensions, const char** required_device_layers,
u32 num_required_device_layers, const VkPhysicalDeviceFeatures* required_features);
void ProcessDeviceExtensions();
bool CreateAllocator();
void DestroyAllocator();

View File

@ -7,9 +7,7 @@
#include <cmath>
namespace Vulkan {
namespace Util {
bool IsDepthFormat(VkFormat format)
bool Vulkan::Util::IsDepthFormat(VkFormat format)
{
switch (format)
{
@ -24,7 +22,7 @@ bool IsDepthFormat(VkFormat format)
}
}
bool IsCompressedFormat(VkFormat format)
bool Vulkan::Util::IsCompressedFormat(VkFormat format)
{
switch (format)
{
@ -39,7 +37,7 @@ bool IsCompressedFormat(VkFormat format)
}
}
VkFormat GetLinearFormat(VkFormat format)
VkFormat Vulkan::Util::GetLinearFormat(VkFormat format)
{
switch (format)
{
@ -60,7 +58,7 @@ VkFormat GetLinearFormat(VkFormat format)
}
}
u32 GetTexelSize(VkFormat format)
u32 Vulkan::Util::GetTexelSize(VkFormat format)
{
// Only contains pixel formats we use.
switch (format)
@ -97,7 +95,7 @@ u32 GetTexelSize(VkFormat format)
}
}
u32 GetBlockSize(VkFormat format)
u32 Vulkan::Util::GetBlockSize(VkFormat format)
{
switch (format)
{
@ -112,7 +110,7 @@ u32 GetBlockSize(VkFormat format)
}
}
VkRect2D ClampRect2D(const VkRect2D& rect, u32 width, u32 height)
VkRect2D Vulkan::Util::ClampRect2D(const VkRect2D& rect, u32 width, u32 height)
{
VkRect2D out;
out.offset.x = std::clamp(rect.offset.x, 0, static_cast<int>(width - 1));
@ -122,7 +120,7 @@ VkRect2D ClampRect2D(const VkRect2D& rect, u32 width, u32 height)
return out;
}
VkBlendFactor GetAlphaBlendFactor(VkBlendFactor factor)
VkBlendFactor Vulkan::Util::GetAlphaBlendFactor(VkBlendFactor factor)
{
switch (factor)
{
@ -139,8 +137,8 @@ VkBlendFactor GetAlphaBlendFactor(VkBlendFactor factor)
}
}
void SetViewport(VkCommandBuffer command_buffer, int x, int y, int width, int height, float min_depth /*= 0.0f*/,
float max_depth /*= 1.0f*/)
void Vulkan::Util::SetViewport(VkCommandBuffer command_buffer, int x, int y, int width, int height,
float min_depth /*= 0.0f*/, float max_depth /*= 1.0f*/)
{
const VkViewport vp{static_cast<float>(x),
static_cast<float>(y),
@ -151,13 +149,13 @@ void SetViewport(VkCommandBuffer command_buffer, int x, int y, int width, int he
vkCmdSetViewport(command_buffer, 0, 1, &vp);
}
void SetScissor(VkCommandBuffer command_buffer, int x, int y, int width, int height)
void Vulkan::Util::SetScissor(VkCommandBuffer command_buffer, int x, int y, int width, int height)
{
const VkRect2D scissor{{x, y}, {static_cast<u32>(width), static_cast<u32>(height)}};
vkCmdSetScissor(command_buffer, 0, 1, &scissor);
}
void SetViewportAndScissor(VkCommandBuffer command_buffer, int x, int y, int width, int height,
void Vulkan::Util::SetViewportAndScissor(VkCommandBuffer command_buffer, int x, int y, int width, int height,
float min_depth /* = 0.0f */, float max_depth /* = 1.0f */)
{
const VkViewport vp{static_cast<float>(x),
@ -171,7 +169,7 @@ void SetViewportAndScissor(VkCommandBuffer command_buffer, int x, int y, int wid
vkCmdSetScissor(command_buffer, 0, 1, &scissor);
}
void SafeDestroyFramebuffer(VkFramebuffer& fb)
void Vulkan::Util::SafeDestroyFramebuffer(VkFramebuffer& fb)
{
if (fb != VK_NULL_HANDLE)
{
@ -180,7 +178,7 @@ void SafeDestroyFramebuffer(VkFramebuffer& fb)
}
}
void SafeDestroyShaderModule(VkShaderModule& sm)
void Vulkan::Util::SafeDestroyShaderModule(VkShaderModule& sm)
{
if (sm != VK_NULL_HANDLE)
{
@ -189,7 +187,7 @@ void SafeDestroyShaderModule(VkShaderModule& sm)
}
}
void SafeDestroyPipeline(VkPipeline& p)
void Vulkan::Util::SafeDestroyPipeline(VkPipeline& p)
{
if (p != VK_NULL_HANDLE)
{
@ -198,7 +196,7 @@ void SafeDestroyPipeline(VkPipeline& p)
}
}
void SafeDestroyPipelineLayout(VkPipelineLayout& pl)
void Vulkan::Util::SafeDestroyPipelineLayout(VkPipelineLayout& pl)
{
if (pl != VK_NULL_HANDLE)
{
@ -207,7 +205,7 @@ void SafeDestroyPipelineLayout(VkPipelineLayout& pl)
}
}
void SafeDestroyDescriptorSetLayout(VkDescriptorSetLayout& dsl)
void Vulkan::Util::SafeDestroyDescriptorSetLayout(VkDescriptorSetLayout& dsl)
{
if (dsl != VK_NULL_HANDLE)
{
@ -216,7 +214,7 @@ void SafeDestroyDescriptorSetLayout(VkDescriptorSetLayout& dsl)
}
}
void SafeDestroyBufferView(VkBufferView& bv)
void Vulkan::Util::SafeDestroyBufferView(VkBufferView& bv)
{
if (bv != VK_NULL_HANDLE)
{
@ -225,7 +223,7 @@ void SafeDestroyBufferView(VkBufferView& bv)
}
}
void SafeDestroyImageView(VkImageView& iv)
void Vulkan::Util::SafeDestroyImageView(VkImageView& iv)
{
if (iv != VK_NULL_HANDLE)
{
@ -234,7 +232,7 @@ void SafeDestroyImageView(VkImageView& iv)
}
}
void SafeDestroySampler(VkSampler& samp)
void Vulkan::Util::SafeDestroySampler(VkSampler& samp)
{
if (samp != VK_NULL_HANDLE)
{
@ -243,7 +241,7 @@ void SafeDestroySampler(VkSampler& samp)
}
}
void SafeDestroySemaphore(VkSemaphore& sem)
void Vulkan::Util::SafeDestroySemaphore(VkSemaphore& sem)
{
if (sem != VK_NULL_HANDLE)
{
@ -252,7 +250,7 @@ void SafeDestroySemaphore(VkSemaphore& sem)
}
}
void SafeFreeGlobalDescriptorSet(VkDescriptorSet& ds)
void Vulkan::Util::SafeFreeGlobalDescriptorSet(VkDescriptorSet& ds)
{
if (ds != VK_NULL_HANDLE)
{
@ -261,7 +259,7 @@ void SafeFreeGlobalDescriptorSet(VkDescriptorSet& ds)
}
}
void BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer, VkAccessFlags src_access_mask,
void Vulkan::Util::BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer, VkAccessFlags src_access_mask,
VkAccessFlags dst_access_mask, VkDeviceSize offset, VkDeviceSize size,
VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask)
{
@ -280,7 +278,21 @@ void BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer, VkAcce
vkCmdPipelineBarrier(command_buffer, src_stage_mask, dst_stage_mask, 0, 0, nullptr, 1, &buffer_info, 0, nullptr);
}
VkShaderModule CreateShaderModule(const u32* spv, size_t spv_word_count)
void Vulkan::Util::AddPointerToChain(void* head, const void* ptr)
{
VkBaseInStructure* last_st = static_cast<VkBaseInStructure*>(head);
while (last_st->pNext)
{
if (last_st->pNext == ptr)
return;
last_st = const_cast<VkBaseInStructure*>(last_st->pNext);
}
last_st->pNext = static_cast<const VkBaseInStructure*>(ptr);
}
VkShaderModule Vulkan::Util::CreateShaderModule(const u32* spv, size_t spv_word_count)
{
VkShaderModuleCreateInfo info = {};
info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
@ -298,7 +310,7 @@ VkShaderModule CreateShaderModule(const u32* spv, size_t spv_word_count)
return module;
}
VkShaderModule CompileAndCreateVertexShader(std::string_view source_code)
VkShaderModule Vulkan::Util::CompileAndCreateVertexShader(std::string_view source_code)
{
std::optional<ShaderCompiler::SPIRVCodeVector> code = ShaderCompiler::CompileVertexShader(source_code);
if (!code)
@ -307,7 +319,7 @@ VkShaderModule CompileAndCreateVertexShader(std::string_view source_code)
return CreateShaderModule(code->data(), code->size());
}
VkShaderModule CompileAndCreateGeometryShader(std::string_view source_code)
VkShaderModule Vulkan::Util::CompileAndCreateGeometryShader(std::string_view source_code)
{
std::optional<ShaderCompiler::SPIRVCodeVector> code = ShaderCompiler::CompileGeometryShader(source_code);
if (!code)
@ -316,7 +328,7 @@ VkShaderModule CompileAndCreateGeometryShader(std::string_view source_code)
return CreateShaderModule(code->data(), code->size());
}
VkShaderModule CompileAndCreateFragmentShader(std::string_view source_code)
VkShaderModule Vulkan::Util::CompileAndCreateFragmentShader(std::string_view source_code)
{
std::optional<ShaderCompiler::SPIRVCodeVector> code = ShaderCompiler::CompileFragmentShader(source_code);
if (!code)
@ -325,7 +337,7 @@ VkShaderModule CompileAndCreateFragmentShader(std::string_view source_code)
return CreateShaderModule(code->data(), code->size());
}
VkShaderModule CompileAndCreateComputeShader(std::string_view source_code)
VkShaderModule Vulkan::Util::CompileAndCreateComputeShader(std::string_view source_code)
{
std::optional<ShaderCompiler::SPIRVCodeVector> code = ShaderCompiler::CompileComputeShader(source_code);
if (!code)
@ -334,7 +346,7 @@ VkShaderModule CompileAndCreateComputeShader(std::string_view source_code)
return CreateShaderModule(code->data(), code->size());
}
const char* VkResultToString(VkResult res)
const char* Vulkan::Util::VkResultToString(VkResult res)
{
switch (res)
{
@ -414,7 +426,8 @@ const char* VkResultToString(VkResult res)
return "UNKNOWN_VK_RESULT";
}
}
const char* VkImageLayoutToString(VkImageLayout layout)
const char* Vulkan::Util::VkImageLayoutToString(VkImageLayout layout)
{
switch (layout)
{
@ -480,7 +493,7 @@ const char* VkImageLayoutToString(VkImageLayout layout)
}
}
void LogVulkanResult(int level, const char* func_name, VkResult res, const char* msg, ...)
void Vulkan::Util::LogVulkanResult(int level, const char* func_name, VkResult res, const char* msg, ...)
{
std::va_list ap;
va_start(ap, msg);
@ -493,8 +506,8 @@ void LogVulkanResult(int level, const char* func_name, VkResult res, const char*
#ifdef ENABLE_VULKAN_DEBUG_OBJECTS
u8 DebugScope<VkCommandBuffer>::depth = 0;
u8 DebugScope<VkQueue>::depth = 0;
u8 Vulkan::Util::DebugScope<VkCommandBuffer>::depth = 0;
u8 Vulkan::Util::DebugScope<VkQueue>::depth = 0;
static std::array<float, 4> Palette(float phase, const std::array<float, 3>& a, const std::array<float, 3>& b,
const std::array<float, 3>& c, const std::array<float, 3>& d)
@ -507,7 +520,8 @@ static std::array<float, 4> Palette(float phase, const std::array<float, 3>& a,
return result;
}
DebugScope<VkCommandBuffer>::DebugScope(VkCommandBuffer context, const char* format, ...) : command_buffer(context)
Vulkan::Util::DebugScope<VkCommandBuffer>::DebugScope(VkCommandBuffer context, const char* format, ...)
: command_buffer(context)
{
if (command_buffer)
{
@ -526,7 +540,7 @@ DebugScope<VkCommandBuffer>::DebugScope(VkCommandBuffer context, const char* for
}
}
DebugScope<VkCommandBuffer>::~DebugScope()
Vulkan::Util::DebugScope<VkCommandBuffer>::~DebugScope()
{
if (command_buffer)
{
@ -535,7 +549,7 @@ DebugScope<VkCommandBuffer>::~DebugScope()
}
}
DebugScope<VkQueue>::DebugScope(VkQueue context, const char* format, ...) : queue(context)
Vulkan::Util::DebugScope<VkQueue>::DebugScope(VkQueue context, const char* format, ...) : queue(context)
{
if (queue)
{
@ -554,7 +568,7 @@ DebugScope<VkQueue>::DebugScope(VkQueue context, const char* format, ...) : queu
}
}
DebugScope<VkQueue>::~DebugScope()
Vulkan::Util::DebugScope<VkQueue>::~DebugScope()
{
if (queue)
{
@ -564,7 +578,3 @@ DebugScope<VkQueue>::~DebugScope()
}
#endif
} // namespace Util
} // namespace Vulkan

View File

@ -8,8 +8,7 @@
#include <array>
#include <cstdarg>
#include <string_view>
namespace Vulkan {
namespace Util {
namespace Vulkan::Util {
inline constexpr u32 MakeRGBA8Color(float r, float g, float b, float a)
{
@ -56,6 +55,9 @@ void BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer, VkAcce
VkAccessFlags dst_access_mask, VkDeviceSize offset, VkDeviceSize size,
VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask);
// Adds a structure to a chain.
void AddPointerToChain(void* head, const void* ptr);
// Create a shader module from the specified SPIR-V.
VkShaderModule CreateShaderModule(const u32* spv, size_t spv_word_count);
@ -296,6 +298,4 @@ private:
};
#endif
} // namespace Util
} // namespace Vulkan
} // namespace Vulkan::Util