VideoBackends:Vulkan: Use VMA for staging buffers
This commit is contained in:
parent
3ffbf94b2a
commit
0532f4a05a
|
@ -546,6 +546,14 @@ void CommandBufferManager::DeferDeviceMemoryDestruction(VkDeviceMemory object)
|
|||
[object]() { vkFreeMemory(g_vulkan_context->GetDevice(), object, nullptr); });
|
||||
}
|
||||
|
||||
void CommandBufferManager::DeferBufferDestruction(VkBuffer buffer, VmaAllocation alloc)
|
||||
{
|
||||
CmdBufferResources& cmd_buffer_resources = GetCurrentCmdBufferResources();
|
||||
cmd_buffer_resources.cleanup_resources.push_back([buffer, alloc]() {
|
||||
vmaDestroyBuffer(g_vulkan_context->GetMemoryAllocator(), buffer, alloc);
|
||||
});
|
||||
}
|
||||
|
||||
void CommandBufferManager::DeferFramebufferDestruction(VkFramebuffer object)
|
||||
{
|
||||
CmdBufferResources& cmd_buffer_resources = GetCurrentCmdBufferResources();
|
||||
|
|
|
@ -89,6 +89,7 @@ public:
|
|||
void DeferBufferDestruction(VkBuffer object);
|
||||
void DeferBufferViewDestruction(VkBufferView object);
|
||||
void DeferDeviceMemoryDestruction(VkDeviceMemory object);
|
||||
void DeferBufferDestruction(VkBuffer buffer, VmaAllocation alloc);
|
||||
void DeferFramebufferDestruction(VkFramebuffer object);
|
||||
void DeferImageDestruction(VkImage object);
|
||||
void DeferImageViewDestruction(VkImageView object);
|
||||
|
|
|
@ -10,12 +10,13 @@
|
|||
|
||||
#include "VideoBackends/Vulkan/CommandBufferManager.h"
|
||||
#include "VideoBackends/Vulkan/VulkanContext.h"
|
||||
#include "VideoCommon/DriverDetails.h"
|
||||
|
||||
namespace Vulkan
|
||||
{
|
||||
StagingBuffer::StagingBuffer(STAGING_BUFFER_TYPE type, VkBuffer buffer, VkDeviceMemory memory,
|
||||
VkDeviceSize size, bool coherent)
|
||||
: m_type(type), m_buffer(buffer), m_memory(memory), m_size(size), m_coherent(coherent)
|
||||
StagingBuffer::StagingBuffer(STAGING_BUFFER_TYPE type, VkBuffer buffer, VmaAllocation alloc,
|
||||
VkDeviceSize size, char* map_ptr)
|
||||
: m_type(type), m_buffer(buffer), m_alloc(alloc), m_size(size), m_map_pointer(map_ptr)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -25,8 +26,7 @@ StagingBuffer::~StagingBuffer()
|
|||
if (m_map_pointer)
|
||||
Unmap();
|
||||
|
||||
g_command_buffer_mgr->DeferDeviceMemoryDestruction(m_memory);
|
||||
g_command_buffer_mgr->DeferBufferDestruction(m_buffer);
|
||||
g_command_buffer_mgr->DeferBufferDestruction(m_buffer, m_alloc);
|
||||
}
|
||||
|
||||
void StagingBuffer::BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer,
|
||||
|
@ -51,49 +51,21 @@ void StagingBuffer::BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer
|
|||
&buffer_info, 0, nullptr);
|
||||
}
|
||||
|
||||
bool StagingBuffer::Map(VkDeviceSize offset, VkDeviceSize size)
|
||||
bool StagingBuffer::Map()
|
||||
{
|
||||
m_map_offset = offset;
|
||||
if (size == VK_WHOLE_SIZE)
|
||||
m_map_size = m_size - offset;
|
||||
else
|
||||
m_map_size = size;
|
||||
|
||||
ASSERT(!m_map_pointer);
|
||||
ASSERT(m_map_offset + m_map_size <= m_size);
|
||||
|
||||
void* map_pointer;
|
||||
VkResult res = vkMapMemory(g_vulkan_context->GetDevice(), m_memory, m_map_offset, m_map_size, 0,
|
||||
&map_pointer);
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkMapMemory failed: ");
|
||||
return false;
|
||||
}
|
||||
|
||||
m_map_pointer = reinterpret_cast<char*>(map_pointer);
|
||||
// The staging buffer is permanently mapped and VMA handles the mapping for us
|
||||
return true;
|
||||
}
|
||||
|
||||
void StagingBuffer::Unmap()
|
||||
{
|
||||
ASSERT(m_map_pointer);
|
||||
|
||||
vkUnmapMemory(g_vulkan_context->GetDevice(), m_memory);
|
||||
m_map_pointer = nullptr;
|
||||
m_map_offset = 0;
|
||||
m_map_size = 0;
|
||||
// The staging buffer is permanently mapped and VMA handles the unmapping for us
|
||||
}
|
||||
|
||||
void StagingBuffer::FlushCPUCache(VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
ASSERT(offset >= m_map_offset);
|
||||
if (m_coherent)
|
||||
return;
|
||||
|
||||
VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory,
|
||||
offset - m_map_offset, size};
|
||||
vkFlushMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range);
|
||||
// vmaFlushAllocation checks whether the allocation uses a coherent memory type internally
|
||||
vmaFlushAllocation(g_vulkan_context->GetMemoryAllocator(), m_alloc, offset, size);
|
||||
}
|
||||
|
||||
void StagingBuffer::InvalidateGPUCache(VkCommandBuffer command_buffer,
|
||||
|
@ -101,7 +73,9 @@ void StagingBuffer::InvalidateGPUCache(VkCommandBuffer command_buffer,
|
|||
VkPipelineStageFlagBits dest_pipeline_stage,
|
||||
VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
if (m_coherent)
|
||||
VkMemoryPropertyFlags flags = 0;
|
||||
vmaGetAllocationMemoryProperties(g_vulkan_context->GetMemoryAllocator(), m_alloc, &flags);
|
||||
if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) [[likely]]
|
||||
return;
|
||||
|
||||
ASSERT((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
|
||||
|
@ -114,7 +88,9 @@ void StagingBuffer::PrepareForGPUWrite(VkCommandBuffer command_buffer,
|
|||
VkPipelineStageFlagBits dst_pipeline_stage,
|
||||
VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
if (m_coherent)
|
||||
VkMemoryPropertyFlags flags = 0;
|
||||
vmaGetAllocationMemoryProperties(g_vulkan_context->GetMemoryAllocator(), m_alloc, &flags);
|
||||
if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) [[likely]]
|
||||
return;
|
||||
|
||||
ASSERT((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
|
||||
|
@ -126,7 +102,9 @@ void StagingBuffer::FlushGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBi
|
|||
VkPipelineStageFlagBits src_pipeline_stage, VkDeviceSize offset,
|
||||
VkDeviceSize size)
|
||||
{
|
||||
if (m_coherent)
|
||||
VkMemoryPropertyFlags flags = 0;
|
||||
vmaGetAllocationMemoryProperties(g_vulkan_context->GetMemoryAllocator(), m_alloc, &flags);
|
||||
if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) [[likely]]
|
||||
return;
|
||||
|
||||
ASSERT((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
|
||||
|
@ -136,39 +114,32 @@ void StagingBuffer::FlushGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBi
|
|||
|
||||
void StagingBuffer::InvalidateCPUCache(VkDeviceSize offset, VkDeviceSize size)
|
||||
{
|
||||
ASSERT(offset >= m_map_offset);
|
||||
if (m_coherent)
|
||||
return;
|
||||
|
||||
VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory,
|
||||
offset - m_map_offset, size};
|
||||
vkInvalidateMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range);
|
||||
// vmaInvalidateAllocation checks whether the allocation uses a coherent memory type internally
|
||||
vmaInvalidateAllocation(g_vulkan_context->GetMemoryAllocator(), m_alloc, offset, size);
|
||||
}
|
||||
|
||||
void StagingBuffer::Read(VkDeviceSize offset, void* data, size_t size, bool invalidate_caches)
|
||||
{
|
||||
ASSERT((offset + size) <= m_size);
|
||||
ASSERT(offset >= m_map_offset && size <= (m_map_size + (offset - m_map_offset)));
|
||||
if (invalidate_caches)
|
||||
InvalidateCPUCache(offset, size);
|
||||
|
||||
memcpy(data, m_map_pointer + (offset - m_map_offset), size);
|
||||
memcpy(data, m_map_pointer + offset, size);
|
||||
}
|
||||
|
||||
void StagingBuffer::Write(VkDeviceSize offset, const void* data, size_t size,
|
||||
bool invalidate_caches)
|
||||
{
|
||||
ASSERT((offset + size) <= m_size);
|
||||
ASSERT(offset >= m_map_offset && size <= (m_map_size + (offset - m_map_offset)));
|
||||
|
||||
memcpy(m_map_pointer + (offset - m_map_offset), data, size);
|
||||
memcpy(m_map_pointer + offset, data, size);
|
||||
if (invalidate_caches)
|
||||
FlushCPUCache(offset, size);
|
||||
}
|
||||
|
||||
bool StagingBuffer::AllocateBuffer(STAGING_BUFFER_TYPE type, VkDeviceSize size,
|
||||
VkBufferUsageFlags usage, VkBuffer* out_buffer,
|
||||
VkDeviceMemory* out_memory, bool* out_coherent)
|
||||
VmaAllocation* out_alloc, char** out_map_ptr)
|
||||
{
|
||||
VkBufferCreateInfo buffer_create_info = {
|
||||
VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType
|
||||
|
@ -180,46 +151,60 @@ bool StagingBuffer::AllocateBuffer(STAGING_BUFFER_TYPE type, VkDeviceSize size,
|
|||
0, // uint32_t queueFamilyIndexCount
|
||||
nullptr // const uint32_t* pQueueFamilyIndices
|
||||
};
|
||||
VkResult res =
|
||||
vkCreateBuffer(g_vulkan_context->GetDevice(), &buffer_create_info, nullptr, out_buffer);
|
||||
if (res != VK_SUCCESS)
|
||||
|
||||
VmaAllocationCreateInfo alloc_create_info = {};
|
||||
alloc_create_info.flags =
|
||||
VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT;
|
||||
alloc_create_info.usage = VMA_MEMORY_USAGE_AUTO;
|
||||
alloc_create_info.pool = VK_NULL_HANDLE;
|
||||
alloc_create_info.pUserData = nullptr;
|
||||
alloc_create_info.priority = 0.0;
|
||||
alloc_create_info.preferredFlags = 0;
|
||||
alloc_create_info.requiredFlags = 0;
|
||||
|
||||
if (DriverDetails::HasBug(DriverDetails::BUG_SLOW_CACHED_READBACK_MEMORY)) [[unlikely]]
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkCreateBuffer failed: ");
|
||||
return false;
|
||||
// If there is no memory type that is both CACHED and COHERENT,
|
||||
// pick the one that is COHERENT
|
||||
alloc_create_info.usage = VMA_MEMORY_USAGE_UNKNOWN;
|
||||
alloc_create_info.requiredFlags =
|
||||
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
||||
alloc_create_info.preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
|
||||
}
|
||||
|
||||
VkMemoryRequirements requirements;
|
||||
vkGetBufferMemoryRequirements(g_vulkan_context->GetDevice(), *out_buffer, &requirements);
|
||||
|
||||
u32 type_index;
|
||||
if (type == STAGING_BUFFER_TYPE_UPLOAD)
|
||||
type_index = g_vulkan_context->GetUploadMemoryType(requirements.memoryTypeBits, out_coherent);
|
||||
else
|
||||
type_index = g_vulkan_context->GetReadbackMemoryType(requirements.memoryTypeBits, out_coherent);
|
||||
{
|
||||
if (type == STAGING_BUFFER_TYPE_UPLOAD)
|
||||
alloc_create_info.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
|
||||
else
|
||||
alloc_create_info.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT;
|
||||
}
|
||||
|
||||
VmaAllocationInfo alloc_info;
|
||||
VkResult res = vmaCreateBuffer(g_vulkan_context->GetMemoryAllocator(), &buffer_create_info,
|
||||
&alloc_create_info, out_buffer, out_alloc, &alloc_info);
|
||||
|
||||
if (type == STAGING_BUFFER_TYPE_UPLOAD)
|
||||
{
|
||||
VkMemoryPropertyFlags flags = 0;
|
||||
vmaGetMemoryTypeProperties(g_vulkan_context->GetMemoryAllocator(), alloc_info.memoryType,
|
||||
&flags);
|
||||
if (!(flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
|
||||
{
|
||||
WARN_LOG_FMT(VIDEO, "Vulkan: Failed to find a coherent memory type for uploads, this will "
|
||||
"affect performance.");
|
||||
}
|
||||
}
|
||||
|
||||
*out_map_ptr = reinterpret_cast<char*>(alloc_info.pMappedData);
|
||||
|
||||
VkMemoryAllocateInfo memory_allocate_info = {
|
||||
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType
|
||||
nullptr, // const void* pNext
|
||||
requirements.size, // VkDeviceSize allocationSize
|
||||
type_index // uint32_t memoryTypeIndex
|
||||
};
|
||||
res = vkAllocateMemory(g_vulkan_context->GetDevice(), &memory_allocate_info, nullptr, out_memory);
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkAllocateMemory failed: ");
|
||||
vkDestroyBuffer(g_vulkan_context->GetDevice(), *out_buffer, nullptr);
|
||||
return false;
|
||||
}
|
||||
|
||||
res = vkBindBufferMemory(g_vulkan_context->GetDevice(), *out_buffer, *out_memory, 0);
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
LOG_VULKAN_ERROR(res, "vkBindBufferMemory failed: ");
|
||||
vkDestroyBuffer(g_vulkan_context->GetDevice(), *out_buffer, nullptr);
|
||||
vkFreeMemory(g_vulkan_context->GetDevice(), *out_memory, nullptr);
|
||||
LOG_VULKAN_ERROR(res, "vmaCreateBuffer failed: ");
|
||||
return false;
|
||||
}
|
||||
|
||||
VkMemoryPropertyFlags flags = 0;
|
||||
vmaGetAllocationMemoryProperties(g_vulkan_context->GetMemoryAllocator(), *out_alloc, &flags);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -227,12 +212,12 @@ std::unique_ptr<StagingBuffer> StagingBuffer::Create(STAGING_BUFFER_TYPE type, V
|
|||
VkBufferUsageFlags usage)
|
||||
{
|
||||
VkBuffer buffer;
|
||||
VkDeviceMemory memory;
|
||||
bool coherent;
|
||||
if (!AllocateBuffer(type, size, usage, &buffer, &memory, &coherent))
|
||||
VmaAllocation alloc;
|
||||
char* map_ptr;
|
||||
if (!AllocateBuffer(type, size, usage, &buffer, &alloc, &map_ptr))
|
||||
return nullptr;
|
||||
|
||||
return std::make_unique<StagingBuffer>(type, buffer, memory, size, coherent);
|
||||
return std::make_unique<StagingBuffer>(type, buffer, alloc, size, map_ptr);
|
||||
}
|
||||
|
||||
} // namespace Vulkan
|
||||
|
|
|
@ -13,8 +13,8 @@ namespace Vulkan
|
|||
class StagingBuffer
|
||||
{
|
||||
public:
|
||||
StagingBuffer(STAGING_BUFFER_TYPE type, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize size,
|
||||
bool coherent);
|
||||
StagingBuffer(STAGING_BUFFER_TYPE type, VkBuffer buffer, VmaAllocation allocation,
|
||||
VkDeviceSize size, char* map_ptr);
|
||||
virtual ~StagingBuffer();
|
||||
|
||||
STAGING_BUFFER_TYPE GetType() const { return m_type; }
|
||||
|
@ -23,9 +23,7 @@ public:
|
|||
bool IsMapped() const { return m_map_pointer != nullptr; }
|
||||
const char* GetMapPointer() const { return m_map_pointer; }
|
||||
char* GetMapPointer() { return m_map_pointer; }
|
||||
VkDeviceSize GetMapOffset() const { return m_map_offset; }
|
||||
VkDeviceSize GetMapSize() const { return m_map_size; }
|
||||
bool Map(VkDeviceSize offset = 0, VkDeviceSize size = VK_WHOLE_SIZE);
|
||||
bool Map();
|
||||
void Unmap();
|
||||
|
||||
// Upload part 1: Prepare from device read from the CPU side
|
||||
|
@ -60,7 +58,7 @@ public:
|
|||
|
||||
// Allocates the resources needed to create a staging buffer.
|
||||
static bool AllocateBuffer(STAGING_BUFFER_TYPE type, VkDeviceSize size, VkBufferUsageFlags usage,
|
||||
VkBuffer* out_buffer, VkDeviceMemory* out_memory, bool* out_coherent);
|
||||
VkBuffer* out_buffer, VmaAllocation* out_alloc, char** out_map_ptr);
|
||||
|
||||
// Wrapper for creating an barrier on a buffer
|
||||
static void BufferMemoryBarrier(VkCommandBuffer command_buffer, VkBuffer buffer,
|
||||
|
@ -72,12 +70,9 @@ public:
|
|||
protected:
|
||||
STAGING_BUFFER_TYPE m_type;
|
||||
VkBuffer m_buffer;
|
||||
VkDeviceMemory m_memory;
|
||||
VmaAllocation m_alloc;
|
||||
VkDeviceSize m_size;
|
||||
bool m_coherent;
|
||||
|
||||
char* m_map_pointer = nullptr;
|
||||
VkDeviceSize m_map_offset = 0;
|
||||
VkDeviceSize m_map_size = 0;
|
||||
};
|
||||
} // namespace Vulkan
|
||||
|
|
|
@ -741,10 +741,10 @@ std::unique_ptr<VKStagingTexture> VKStagingTexture::Create(StagingTextureType ty
|
|||
}
|
||||
|
||||
VkBuffer buffer;
|
||||
VkDeviceMemory memory;
|
||||
bool coherent;
|
||||
if (!StagingBuffer::AllocateBuffer(buffer_type, buffer_size, buffer_usage, &buffer, &memory,
|
||||
&coherent))
|
||||
VmaAllocation alloc;
|
||||
char* map_ptr;
|
||||
if (!StagingBuffer::AllocateBuffer(buffer_type, buffer_size, buffer_usage, &buffer, &alloc,
|
||||
&map_ptr))
|
||||
{
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -759,7 +759,7 @@ std::unique_ptr<VKStagingTexture> VKStagingTexture::Create(StagingTextureType ty
|
|||
}
|
||||
|
||||
std::unique_ptr<StagingBuffer> staging_buffer =
|
||||
std::make_unique<StagingBuffer>(buffer_type, buffer, memory, buffer_size, coherent);
|
||||
std::make_unique<StagingBuffer>(buffer_type, buffer, alloc, buffer_size, map_ptr);
|
||||
std::unique_ptr<VKStagingTexture> staging_tex =
|
||||
std::make_unique<VKStagingTexture>(PrivateTag{}, type, config, std::move(staging_buffer),
|
||||
linear_image, linear_image_device_memory);
|
||||
|
|
Loading…
Reference in New Issue