Skip to content

Commit

Permalink
Vulkan: Remove unused code path for texture copies
Browse files Browse the repository at this point in the history
In 2020 we switched to drawcalls for texture copies replacing the copy-via-buffer path. It's not been used since so lets remove it
  • Loading branch information
Exzap committed Mar 11, 2024
1 parent 1f9b891 commit a50e253
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 146 deletions.
14 changes: 0 additions & 14 deletions src/Cafe/HW/Latte/Renderer/Vulkan/VulkanRenderer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -577,20 +577,6 @@ VulkanRenderer::VulkanRenderer()
for (sint32 i = 0; i < OCCLUSION_QUERY_POOL_SIZE; i++)
m_occlusionQueries.list_availableQueryIndices.emplace_back(i);

// enable surface copies via buffer if we have plenty of memory available (otherwise use drawcalls)
size_t availableSurfaceCopyBufferMem = memoryManager->GetTotalMemoryForBufferType(VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
//m_featureControl.mode.useBufferSurfaceCopies = availableSurfaceCopyBufferMem >= 2000ull * 1024ull * 1024ull; // enable if at least 2000MB VRAM
m_featureControl.mode.useBufferSurfaceCopies = false;

if (m_featureControl.mode.useBufferSurfaceCopies)
{
//cemuLog_log(LogType::Force, "Enable surface copies via buffer");
}
else
{
//cemuLog_log(LogType::Force, "Disable surface copies via buffer (Requires 2GB. Has only {}MB available)", availableSurfaceCopyBufferMem / 1024ull / 1024ull);
}

// start compilation threads
RendererShaderVk::Init();
}
Expand Down
6 changes: 0 additions & 6 deletions src/Cafe/HW/Latte/Renderer/Vulkan/VulkanRenderer.h
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,6 @@ class VulkanRenderer : public Renderer
void surfaceCopy_notifyTextureRelease(LatteTextureVk* hostTexture);

private:
void surfaceCopy_viaBuffer(LatteTextureVk* srcTextureVk, sint32 texSrcMip, sint32 texSrcLevel, LatteTextureVk* dstTextureVk, sint32 texDstMip, sint32 texDstLevel, sint32 effectiveCopyWidth, sint32 effectiveCopyHeight);
void surfaceCopy_viaDrawcall(LatteTextureVk* srcTextureVk, sint32 texSrcMip, sint32 texSrcSlice, LatteTextureVk* dstTextureVk, sint32 texDstMip, sint32 texDstSlice, sint32 effectiveCopyWidth, sint32 effectiveCopyHeight);

void surfaceCopy_cleanup();
Expand All @@ -328,10 +327,6 @@ class VulkanRenderer : public Renderer

std::unordered_map<uint64, struct CopySurfacePipelineInfo*> m_copySurfacePipelineCache;

VkBuffer m_surfaceCopyBuffer = VK_NULL_HANDLE;
VkDeviceMemory m_surfaceCopyBufferMemory = VK_NULL_HANDLE;
size_t m_surfaceCopyBufferSize{};

public:
// renderer interface
void bufferCache_init(const sint32 bufferSize) override;
Expand Down Expand Up @@ -470,7 +465,6 @@ class VulkanRenderer : public Renderer

struct
{
bool useBufferSurfaceCopies; // if GPU has enough VRAM to spare, allow to use a buffer to copy surfaces (instead of drawcalls)
bool useTFEmulationViaSSBO = true; // emulate transform feedback via shader writes to a storage buffer
}mode;

Expand Down
127 changes: 1 addition & 126 deletions src/Cafe/HW/Latte/Renderer/Vulkan/VulkanSurfaceCopy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -763,110 +763,6 @@ bool vkIsBitCompatibleColorDepthFormat(VkFormat format1, VkFormat format2)
return false;
}

void VulkanRenderer::surfaceCopy_viaBuffer(LatteTextureVk* srcTextureVk, sint32 texSrcMip, sint32 texSrcSlice, LatteTextureVk* dstTextureVk, sint32 texDstMip, sint32 texDstSlice, sint32 effectiveCopyWidth, sint32 effectiveCopyHeight)
{
cemu_assert_debug(false); // not used currently

cemu_assert_debug(m_featureControl.mode.useBufferSurfaceCopies);

if (srcTextureVk->dim == Latte::E_DIM::DIM_3D)
{
cemu_assert_debug(false);
return;
}
if (dstTextureVk->dim == Latte::E_DIM::DIM_3D)
{
cemu_assert_debug(false);
return;
}

draw_endRenderPass();

// calculate buffer size required for copy
VkDeviceSize copySize = std::max(srcTextureVk->getAllocation()->getAllocationSize(), dstTextureVk->getAllocation()->getAllocationSize());

// make sure allocated buffer is large enough
if (m_surfaceCopyBuffer == VK_NULL_HANDLE || copySize > m_surfaceCopyBufferSize)
{
if (m_surfaceCopyBuffer != VK_NULL_HANDLE)
{
// free existing buffer
destroyDeviceMemory(m_surfaceCopyBufferMemory);
m_surfaceCopyBufferMemory = VK_NULL_HANDLE;
destroyBuffer(m_surfaceCopyBuffer);
m_surfaceCopyBuffer = VK_NULL_HANDLE;
}
VkDeviceSize allocSize = (copySize + 1024ull * 1024ull - 1ull) & ~(1024ull * 1024ull - 1ull); // align to whole MB
m_surfaceCopyBufferSize = allocSize;
memoryManager->CreateBuffer(m_surfaceCopyBufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, m_surfaceCopyBuffer, m_surfaceCopyBufferMemory);
if (m_surfaceCopyBuffer == VK_NULL_HANDLE)
{
cemuLog_log(LogType::Force, "Vulkan: Failed to allocate surface copy buffer with size {}", allocSize);
return;
}
}
if (m_surfaceCopyBuffer == VK_NULL_HANDLE)
return;

auto vkObjSrcTexture = srcTextureVk->GetImageObj();
auto vkObjDstTexture = dstTextureVk->GetImageObj();
vkObjSrcTexture->flagForCurrentCommandBuffer();
vkObjDstTexture->flagForCurrentCommandBuffer();

VkBufferImageCopy region{};
region.bufferOffset = 0;
region.bufferRowLength = effectiveCopyWidth;
region.bufferImageHeight = effectiveCopyHeight;

if (srcTextureVk->isDepth)
region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
else
region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
region.imageSubresource.baseArrayLayer = texSrcSlice;
region.imageSubresource.layerCount = 1;
region.imageSubresource.mipLevel = texSrcMip;

region.imageOffset = { 0,0,0 };
region.imageExtent = { (uint32)effectiveCopyWidth, (uint32)effectiveCopyHeight, 1 };

// make sure all write operations to the src image have finished
barrier_image<SYNC_OP::IMAGE_WRITE | SYNC_OP::ANY_TRANSFER, SYNC_OP::ANY_TRANSFER>(srcTextureVk, region.imageSubresource, VK_IMAGE_LAYOUT_GENERAL);

vkCmdCopyImageToBuffer(getCurrentCommandBuffer(), vkObjSrcTexture->m_image, VK_IMAGE_LAYOUT_GENERAL, m_surfaceCopyBuffer, 1, &region);

// copy buffer to image

VkBufferImageCopy imageRegion[2]{};
sint32 imageRegionCount = 0;

// color or depth only copy
imageRegion[0].bufferOffset = 0;
imageRegion[0].imageExtent.width = effectiveCopyWidth;
imageRegion[0].imageExtent.height = effectiveCopyHeight;
imageRegion[0].imageExtent.depth = 1;

imageRegion[0].imageSubresource.mipLevel = texDstMip;
if (dstTextureVk->isDepth)
imageRegion[0].imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
else
imageRegion[0].imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageRegion[0].imageSubresource.baseArrayLayer = texDstSlice;
imageRegion[0].imageSubresource.layerCount = 1;

imageRegionCount = 1;

// make sure the transfer to the buffer finished
barrier_bufferRange<SYNC_OP::ANY_TRANSFER, SYNC_OP::ANY_TRANSFER>(m_surfaceCopyBuffer, 0, VK_WHOLE_SIZE);

// make sure all read and write operations to the dst image have finished
barrier_image<SYNC_OP::IMAGE_READ | SYNC_OP::IMAGE_WRITE | SYNC_OP::ANY_TRANSFER, SYNC_OP::ANY_TRANSFER>(dstTextureVk, imageRegion[0].imageSubresource, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);

vkCmdCopyBufferToImage(m_state.currentCommandBuffer, m_surfaceCopyBuffer, vkObjDstTexture->m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, imageRegionCount, imageRegion);

// make sure transfer has finished before any other operation
barrier_image<SYNC_OP::ANY_TRANSFER, SYNC_OP::ANY_TRANSFER | SYNC_OP::IMAGE_READ | SYNC_OP::IMAGE_WRITE>(dstTextureVk, imageRegion[0].imageSubresource, VK_IMAGE_LAYOUT_GENERAL);
}

void VulkanRenderer::surfaceCopy_copySurfaceWithFormatConversion(LatteTexture* sourceTexture, sint32 srcMip, sint32 srcSlice, LatteTexture* destinationTexture, sint32 dstMip, sint32 dstSlice, sint32 width, sint32 height)
{
// scale copy size to effective size
Expand Down Expand Up @@ -899,28 +795,7 @@ void VulkanRenderer::surfaceCopy_copySurfaceWithFormatConversion(LatteTexture* s
return;
}

VkFormat srcFormatVk = srcTextureVk->GetFormat();
VkFormat dstFormatVk = dstTextureVk->GetFormat();

if ((srcTextureVk->isDepth && !dstTextureVk->isDepth) ||
!srcTextureVk->isDepth && dstTextureVk->isDepth)
{
// depth to color or
// color to depth
if (m_featureControl.mode.useBufferSurfaceCopies && vkIsBitCompatibleColorDepthFormat(srcFormatVk, dstFormatVk))
surfaceCopy_viaBuffer(srcTextureVk, texSrcMip, texSrcSlice, dstTextureVk, texDstMip, texDstSlice, effectiveCopyWidth, effectiveCopyHeight);
else
surfaceCopy_viaDrawcall(srcTextureVk, texSrcMip, texSrcSlice, dstTextureVk, texDstMip, texDstSlice, effectiveCopyWidth, effectiveCopyHeight);
}
else
{
// depth to depth or
// color to color
if (m_featureControl.mode.useBufferSurfaceCopies && srcFormatVk == dstFormatVk)
surfaceCopy_viaBuffer(srcTextureVk, texSrcMip, texSrcSlice, dstTextureVk, texDstMip, texDstSlice, effectiveCopyWidth, effectiveCopyHeight);
else
surfaceCopy_viaDrawcall(srcTextureVk, texSrcMip, texSrcSlice, dstTextureVk, texDstMip, texDstSlice, effectiveCopyWidth, effectiveCopyHeight);
}
surfaceCopy_viaDrawcall(srcTextureVk, texSrcMip, texSrcSlice, dstTextureVk, texDstMip, texDstSlice, effectiveCopyWidth, effectiveCopyHeight);
}

// called whenever a texture is destroyed
Expand Down

0 comments on commit a50e253

Please sign in to comment.