GPU: Don't use host write bit for texture uploads.
This commit is contained in:
parent
b4ae5b9a01
commit
febe46973f
|
@ -1151,10 +1151,7 @@ bool TextureCache::UploadTexture(VkCommandBuffer command_buffer,
|
||||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||||
barrier.pNext = nullptr;
|
barrier.pNext = nullptr;
|
||||||
barrier.srcAccessMask = 0;
|
barrier.srcAccessMask = 0;
|
||||||
// TODO(gibbed): is this correct? 1D+cube had VK_ACCESS_HOST_WRITE_BIT, but
|
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||||
// not 2D.
|
|
||||||
barrier.dstAccessMask =
|
|
||||||
VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_HOST_WRITE_BIT;
|
|
||||||
barrier.oldLayout = dest->image_layout;
|
barrier.oldLayout = dest->image_layout;
|
||||||
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||||
|
|
Loading…
Reference in New Issue