git.s-ol.nu ~forks/DiligentCore / b764489
Fixed Vulkan command buffer leak assiduous 6 months ago
9 changed file(s) with 145 addition(s) and 128 deletion(s). Raw diff Collapse all Expand all
3131 #include <atomic>
3232 #include "STDAllocator.hpp"
3333 #include "VulkanUtilities/VulkanObjectWrappers.hpp"
34 #include "VulkanUtilities/VulkanLogicalDevice.hpp"
3435
3536 namespace Diligent
3637 {
3738
38 class RenderDeviceVkImpl;
39
4039 class CommandPoolManager
4140 {
4241 public:
43 CommandPoolManager(RenderDeviceVkImpl& DeviceVkImpl,
44 std::string Name,
45 uint32_t queueFamilyIndex,
46 VkCommandPoolCreateFlags flags) noexcept;
42 CommandPoolManager(const VulkanUtilities::VulkanLogicalDevice& LogicalDevice,
43 std::string Name,
44 uint32_t queueFamilyIndex,
45 VkCommandPoolCreateFlags flags) noexcept;
4746
4847 // clang-format off
4948 CommandPoolManager (const CommandPoolManager&) = delete;
5756 // Allocates Vulkan command pool.
5857 VulkanUtilities::CommandPoolWrapper AllocateCommandPool(const char* DebugName = nullptr);
5958
60 void SafeReleaseCommandPool(VulkanUtilities::CommandPoolWrapper&& CmdPool, Uint32 CmdQueueIndex, Uint64 FenceValue);
61
6259 void DestroyPools();
6360
6461 #ifdef DILIGENT_DEVELOPMENT
6865 }
6966 #endif
7067
68 // Returns command pool to the list of available pools. The GPU must have finished using the pool
69 void RecycleCommandPool(VulkanUtilities::CommandPoolWrapper&& CmdPool);
70
7171 private:
72 // Returns command pool to the list of available pools. The GPU must have finished using the pool
73 void FreeCommandPool(VulkanUtilities::CommandPoolWrapper&& CmdPool);
72 const VulkanUtilities::VulkanLogicalDevice& m_LogicalDevice;
7473
75 RenderDeviceVkImpl& m_DeviceVkImpl;
7674 const std::string m_Name;
7775 const uint32_t m_QueueFamilyIndex;
7876 const VkCommandPoolCreateFlags m_CmdPoolFlags;
8179 std::deque<VulkanUtilities::CommandPoolWrapper, STDAllocatorRawMem<VulkanUtilities::CommandPoolWrapper>> m_CmdPools;
8280
8381 #ifdef DILIGENT_DEVELOPMENT
84 std::atomic_int32_t m_AllocatedPoolCounter;
82 std::atomic_int32_t m_AllocatedPoolCounter{0};
8583 #endif
8684 };
8785
425425 m_State.NumCommands = m_State.NumCommands != 0 ? m_State.NumCommands : 1;
426426 if (m_CommandBuffer.GetVkCmdBuffer() == VK_NULL_HANDLE)
427427 {
428 auto vkCmdBuff = m_CmdPool.GetCommandBuffer();
428 auto vkCmdBuff = m_CmdPool->GetCommandBuffer();
429429 m_CommandBuffer.SetVkCmdBuffer(vkCmdBuff);
430430 }
431431 }
599599 };
600600 std::unordered_map<MappedTextureKey, MappedTexture, MappedTextureKey::Hasher> m_MappedTextures;
601601
602 VulkanUtilities::VulkanCommandBufferPool m_CmdPool;
603 VulkanUploadHeap m_UploadHeap;
604 VulkanDynamicHeap m_DynamicHeap;
605 DynamicDescriptorSetAllocator m_DynamicDescrSetAllocator;
602 std::unique_ptr<VulkanUtilities::VulkanCommandBufferPool> m_CmdPool;
603
604 VulkanUploadHeap m_UploadHeap;
605 VulkanDynamicHeap m_DynamicHeap;
606 DynamicDescriptorSetAllocator m_DynamicDescrSetAllocator;
606607
607608 std::shared_ptr<GenerateMipsVkHelper> m_GenerateMipsHelper;
608609 RefCntAutoPtr<IShaderResourceBinding> m_GenerateMipsSRB;
5555
5656 VkCommandBuffer GetCommandBuffer(const char* DebugName = "");
5757 // The GPU must have finished with the command buffer being returned to the pool
58 void FreeCommandBuffer(VkCommandBuffer&& CmdBuffer);
59
60 CommandPoolWrapper&& Release();
58 void RecycleCommandBuffer(VkCommandBuffer&& CmdBuffer);
6159
6260 #ifdef DILIGENT_DEVELOPMENT
6361 int32_t DvpGetBufferCounter() const
6967 private:
7068 // Shared point to logical device must be defined before the command pool
7169 std::shared_ptr<const VulkanLogicalDevice> m_LogicalDevice;
72 CommandPoolWrapper m_CmdPool;
70
71 CommandPoolWrapper m_CmdPool;
7372
7473 std::mutex m_Mutex;
7574 std::deque<VkCommandBuffer> m_CmdBuffers;
7675 #ifdef DILIGENT_DEVELOPMENT
77 std::atomic_int32_t m_BuffCounter;
76 std::atomic_int32_t m_BuffCounter{0};
7877 #endif
7978 };
8079
170170 void ReleaseVulkanObject(AccelStructWrapper&& AccelStruct) const;
171171
172172 void FreeDescriptorSet(VkDescriptorPool Pool, VkDescriptorSet Set) const;
173
173 void FreeCommandBuffer(VkCommandPool Pool, VkCommandBuffer CmdBuffer) const;
174
174175 VkMemoryRequirements GetBufferMemoryRequirements(VkBuffer vkBuffer) const;
175176 VkMemoryRequirements GetImageMemoryRequirements (VkImage vkImage ) const;
176177 VkDeviceAddress GetAccelerationStructureDeviceAddress(VkAccelerationStructureKHR AS) const;
3131 namespace Diligent
3232 {
3333
34 CommandPoolManager::CommandPoolManager(RenderDeviceVkImpl& DeviceVkImpl,
35 std::string Name,
36 uint32_t queueFamilyIndex,
37 VkCommandPoolCreateFlags flags) noexcept :
34 CommandPoolManager::CommandPoolManager(const VulkanUtilities::VulkanLogicalDevice& LogicalDevice,
35 std::string Name,
36 uint32_t queueFamilyIndex,
37 VkCommandPoolCreateFlags flags) noexcept :
3838 // clang-format off
39 m_DeviceVkImpl {DeviceVkImpl },
39 m_LogicalDevice {LogicalDevice },
4040 m_Name {std::move(Name) },
4141 m_QueueFamilyIndex{queueFamilyIndex },
4242 m_CmdPoolFlags {flags },
4343 m_CmdPools (STD_ALLOCATOR_RAW_MEM(VulkanUtilities::CommandPoolWrapper, GetRawAllocator(), "Allocator for deque<VulkanUtilities::CommandPoolWrapper>"))
4444 // clang-format on
4545 {
46 #ifdef DILIGENT_DEVELOPMENT
47 m_AllocatedPoolCounter = 0;
48 #endif
4946 }
5047
5148 VulkanUtilities::CommandPoolWrapper CommandPoolManager::AllocateCommandPool(const char* DebugName)
5249 {
5350 std::lock_guard<std::mutex> LockGuard{m_Mutex};
54
55 const auto& LogicalDevice = m_DeviceVkImpl.GetLogicalDevice();
5651
5752 VulkanUtilities::CommandPoolWrapper CmdPool;
5853 if (!m_CmdPools.empty())
6055 CmdPool = std::move(m_CmdPools.front());
6156 m_CmdPools.pop_front();
6257
63 LogicalDevice.ResetCommandPool(CmdPool);
58 m_LogicalDevice.ResetCommandPool(CmdPool);
6459 }
6560
6661 if (CmdPool == VK_NULL_HANDLE)
7267 CmdPoolCI.queueFamilyIndex = m_QueueFamilyIndex;
7368 CmdPoolCI.flags = m_CmdPoolFlags;
7469
75 CmdPool = LogicalDevice.CreateCommandPool(CmdPoolCI);
70 CmdPool = m_LogicalDevice.CreateCommandPool(CmdPoolCI);
7671 DEV_CHECK_ERR(CmdPool != VK_NULL_HANDLE, "Failed to create Vulkan command pool");
7772 }
7873
8277 return std::move(CmdPool);
8378 }
8479
85 void CommandPoolManager::SafeReleaseCommandPool(VulkanUtilities::CommandPoolWrapper&& CmdPool, Uint32 CmdQueueIndex, Uint64 FenceValue)
80 void CommandPoolManager::RecycleCommandPool(VulkanUtilities::CommandPoolWrapper&& CmdPool)
8681 {
87 class CommandPoolDeleter
88 {
89 public:
90 CommandPoolDeleter(CommandPoolManager& _CmdPoolMgr, VulkanUtilities::CommandPoolWrapper&& _Pool) :
91 // clang-format off
92 CmdPoolMgr{&_CmdPoolMgr },
93 Pool {std::move(_Pool)}
94 // clang-format on
95 {
96 VERIFY_EXPR(Pool != VK_NULL_HANDLE);
97 }
98
99 // clang-format off
100 CommandPoolDeleter (const CommandPoolDeleter&) = delete;
101 CommandPoolDeleter& operator = (const CommandPoolDeleter&) = delete;
102 CommandPoolDeleter& operator = ( CommandPoolDeleter&&) = delete;
103
104 CommandPoolDeleter(CommandPoolDeleter&& rhs) :
105 CmdPoolMgr{rhs.CmdPoolMgr },
106 Pool {std::move(rhs.Pool)}
107 {
108 rhs.CmdPoolMgr = nullptr;
109 }
110 // clang-format on
111
112 ~CommandPoolDeleter()
113 {
114 if (CmdPoolMgr != nullptr)
115 {
116 CmdPoolMgr->FreeCommandPool(std::move(Pool));
117 }
118 }
119
120 private:
121 CommandPoolManager* CmdPoolMgr;
122 VulkanUtilities::CommandPoolWrapper Pool;
123 };
124
125 // Discard command pool directly to the release queue since we know exactly which queue it was submitted to
126 // as well as the associated FenceValue
127 m_DeviceVkImpl.GetReleaseQueue(CmdQueueIndex).DiscardResource(CommandPoolDeleter{*this, std::move(CmdPool)}, FenceValue);
128 }
129
130 void CommandPoolManager::FreeCommandPool(VulkanUtilities::CommandPoolWrapper&& CmdPool)
131 {
132 std::lock_guard<std::mutex> LockGuard(m_Mutex);
82 std::lock_guard<std::mutex> LockGuard{m_Mutex};
13383 #ifdef DILIGENT_DEVELOPMENT
13484 --m_AllocatedPoolCounter;
13585 #endif
13888
13989 void CommandPoolManager::DestroyPools()
14090 {
141 std::lock_guard<std::mutex> LockGuard(m_Mutex);
142 DEV_CHECK_ERR(m_AllocatedPoolCounter == 0, m_AllocatedPoolCounter, " pool(s) have not been freed. This will cause a crash if the references to these pools are still in release queues when CommandPoolManager::FreeCommandPool() is called for destroyed CommandPoolManager object.");
91 std::lock_guard<std::mutex> LockGuard{m_Mutex};
92 DEV_CHECK_ERR(m_AllocatedPoolCounter == 0, m_AllocatedPoolCounter, " pool(s) have not been recycled. This will cause a crash if the references to these pools are still in release queues when CommandPoolManager::RecycleCommandPool() is called for destroyed CommandPoolManager object.");
14393 LOG_INFO_MESSAGE(m_Name, " allocated descriptor pool count: ", m_CmdPools.size());
14494 m_CmdPools.clear();
14595 }
7979 // potentially running in another thread
8080 m_CmdPool
8181 {
82 pDeviceVkImpl->GetLogicalDevice().GetSharedPtr(),
83 pDeviceVkImpl->GetCommandQueue(CommandQueueId).GetQueueFamilyIndex(),
84 VK_COMMAND_POOL_CREATE_TRANSIENT_BIT | VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
82 new VulkanUtilities::VulkanCommandBufferPool
83 {
84 pDeviceVkImpl->GetLogicalDevice().GetSharedPtr(),
85 pDeviceVkImpl->GetCommandQueue(CommandQueueId).GetQueueFamilyIndex(),
86 VK_COMMAND_POOL_CREATE_TRANSIENT_BIT | VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
87 }
8588 },
8689 // Upload heap must always be thread-safe as Finish() may be called from another thread
8790 m_UploadHeap
162165 DEV_CHECK_ERR(m_DynamicDescrSetAllocator.GetAllocatedPoolCount() == 0, "All allocated dynamic descriptor set pools must have been released at this point");
163166 // clang-format on
164167
165 auto VkCmdPool = m_CmdPool.Release();
166 m_pDevice->SafeReleaseDeviceObject(std::move(VkCmdPool), ~Uint64{0});
167
168 // clang-format off
169 m_pDevice->SafeReleaseDeviceObject(std::move(m_GenerateMipsHelper), ~Uint64{0});
170 m_pDevice->SafeReleaseDeviceObject(std::move(m_GenerateMipsSRB), ~Uint64{0});
171 m_pDevice->SafeReleaseDeviceObject(std::move(m_DummyVB), ~Uint64{0});
172 // clang-format on
173
174 // The main reason we need to idle the GPU is because we need to make sure that all command buffers are returned to the
175 // pool. Upload heap, dynamic heap and dynamic descriptor manager return their resources to global managers and
176 // do not really need to wait for GPU to idle.
177 m_pDevice->IdleGPU();
178 DEV_CHECK_ERR(m_CmdPool.DvpGetBufferCounter() == 0, "All command buffers must have been returned to the pool");
168 // NB: If there are any command buffers in the release queue, they will always be returned to the pool
169 // before the pool itself is released because the pool will always end up later in the queue,
170 // so we do not need to idle the GPU.
171 // Also note that command buffers are disposed directly into the release queue, but
172 // the command pool goes into the stale objects queue and is moved into the release queue
173 // when the next command buffer is submitted.
174 m_pDevice->SafeReleaseDeviceObject(std::move(m_CmdPool), ~Uint64{0});
175
176 // NB: Upload heap, dynamic heap and dynamic descriptor manager return their resources to
177 // global managers and do not need to wait for GPU to idle.
179178 }
180179
181180 void DeviceContextVkImpl::DisposeVkCmdBuffer(Uint32 CmdQueue, VkCommandBuffer vkCmdBuff, Uint64 FenceValue)
182181 {
183182 VERIFY_EXPR(vkCmdBuff != VK_NULL_HANDLE);
184 class CmdBufferDeleter
183 class CmdBufferRecycler
185184 {
186185 public:
187186 // clang-format off
188 CmdBufferDeleter(VkCommandBuffer _vkCmdBuff,
187 CmdBufferRecycler(VkCommandBuffer _vkCmdBuff,
189188 VulkanUtilities::VulkanCommandBufferPool& _Pool) noexcept :
190189 vkCmdBuff {_vkCmdBuff},
191190 Pool {&_Pool }
193192 VERIFY_EXPR(vkCmdBuff != VK_NULL_HANDLE);
194193 }
195194
196 CmdBufferDeleter (const CmdBufferDeleter&) = delete;
197 CmdBufferDeleter& operator = (const CmdBufferDeleter&) = delete;
198 CmdBufferDeleter& operator = ( CmdBufferDeleter&&) = delete;
199
200 CmdBufferDeleter(CmdBufferDeleter&& rhs) noexcept :
195 CmdBufferRecycler (const CmdBufferRecycler&) = delete;
196 CmdBufferRecycler& operator = (const CmdBufferRecycler&) = delete;
197 CmdBufferRecycler& operator = ( CmdBufferRecycler&&) = delete;
198
199 CmdBufferRecycler(CmdBufferRecycler&& rhs) noexcept :
201200 vkCmdBuff {rhs.vkCmdBuff},
202201 Pool {rhs.Pool }
203202 {
206205 }
207206 // clang-format on
208207
209 ~CmdBufferDeleter()
208 ~CmdBufferRecycler()
210209 {
211210 if (Pool != nullptr)
212211 {
213 Pool->FreeCommandBuffer(std::move(vkCmdBuff));
212 Pool->RecycleCommandBuffer(std::move(vkCmdBuff));
214213 }
215214 }
216215
217216 private:
218 VkCommandBuffer vkCmdBuff;
219 VulkanUtilities::VulkanCommandBufferPool* Pool;
217 VkCommandBuffer vkCmdBuff = VK_NULL_HANDLE;
218 VulkanUtilities::VulkanCommandBufferPool* Pool = nullptr;
220219 };
221220
221 // Discard command buffer directly to the release queue since we know exactly which queue it was submitted to
222 // as well as the associated FenceValue.
222223 auto& ReleaseQueue = m_pDevice->GetReleaseQueue(CmdQueue);
223 ReleaseQueue.DiscardResource(CmdBufferDeleter{vkCmdBuff, m_CmdPool}, FenceValue);
224 ReleaseQueue.DiscardResource(CmdBufferRecycler{vkCmdBuff, *m_CmdPool}, FenceValue);
224225 }
225226
226227 inline void DeviceContextVkImpl::DisposeCurrentCmdBuffer(Uint32 CmdQueue, Uint64 FenceValue)
121121 },
122122 m_TransientCmdPoolMgr
123123 {
124 *this,
124 GetLogicalDevice(),
125125 "Transient command buffer pool manager",
126126 CmdQueues[0]->GetQueueFamilyIndex(),
127127 VK_COMMAND_POOL_CREATE_TRANSIENT_BIT
326326 }
327327
328328
329 void RenderDeviceVkImpl::ExecuteAndDisposeTransientCmdBuff(Uint32 QueueIndex, VkCommandBuffer vkCmdBuff, VulkanUtilities::CommandPoolWrapper&& CmdPool)
329 void RenderDeviceVkImpl::ExecuteAndDisposeTransientCmdBuff(Uint32 QueueIndex,
330 VkCommandBuffer vkCmdBuff,
331 VulkanUtilities::CommandPoolWrapper&& CmdPool)
330332 {
331333 VERIFY_EXPR(vkCmdBuff != VK_NULL_HANDLE);
332334
372374 FenceValue = pCmdQueueVk->Submit(SubmitInfo);
373375 } //
374376 );
375 m_TransientCmdPoolMgr.SafeReleaseCommandPool(std::move(CmdPool), QueueIndex, FenceValue);
377
378 class TransientCmdPoolRecycler
379 {
380 public:
381 TransientCmdPoolRecycler(const VulkanUtilities::VulkanLogicalDevice& _LogicalDevice,
382 CommandPoolManager& _CmdPoolMgr,
383 VulkanUtilities::CommandPoolWrapper&& _Pool,
384 VkCommandBuffer&& _vkCmdBuffer) :
385 // clang-format off
386 LogicalDevice{_LogicalDevice },
387 CmdPoolMgr {&_CmdPoolMgr },
388 Pool {std::move(_Pool) },
389 vkCmdBuffer {std::move(_vkCmdBuffer)}
390 // clang-format on
391 {
392 VERIFY_EXPR(Pool != VK_NULL_HANDLE && vkCmdBuffer != VK_NULL_HANDLE);
393 _vkCmdBuffer = VK_NULL_HANDLE;
394 }
395
396 // clang-format off
397 TransientCmdPoolRecycler (const TransientCmdPoolRecycler&) = delete;
398 TransientCmdPoolRecycler& operator = (const TransientCmdPoolRecycler&) = delete;
399 TransientCmdPoolRecycler& operator = ( TransientCmdPoolRecycler&&) = delete;
400
401 TransientCmdPoolRecycler(TransientCmdPoolRecycler&& rhs) :
402 LogicalDevice{rhs.LogicalDevice },
403 CmdPoolMgr {rhs.CmdPoolMgr },
404 Pool {std::move(rhs.Pool) },
405 vkCmdBuffer {std::move(rhs.vkCmdBuffer)}
406 {
407 rhs.CmdPoolMgr = nullptr;
408 rhs.vkCmdBuffer = VK_NULL_HANDLE;
409 }
410 // clang-format on
411
412 ~TransientCmdPoolRecycler()
413 {
414 if (CmdPoolMgr != nullptr)
415 {
416 LogicalDevice.FreeCommandBuffer(Pool, vkCmdBuffer);
417 CmdPoolMgr->RecycleCommandPool(std::move(Pool));
418 }
419 }
420
421 private:
422 const VulkanUtilities::VulkanLogicalDevice& LogicalDevice;
423
424 CommandPoolManager* CmdPoolMgr = nullptr;
425 VulkanUtilities::CommandPoolWrapper Pool;
426 VkCommandBuffer vkCmdBuffer = VK_NULL_HANDLE;
427 };
428
429 // Discard command pool directly to the release queue since we know exactly which queue it was submitted to
430 // as well as the associated FenceValue
431 // clang-format off
432 GetReleaseQueue(QueueIndex).DiscardResource(
433 TransientCmdPoolRecycler
434 {
435 GetLogicalDevice(),
436 m_TransientCmdPoolMgr,
437 std::move(CmdPool),
438 std::move(vkCmdBuff)
439 },
440 FenceValue);
441 // clang-format on
376442 }
377443
378444 void RenderDeviceVkImpl::SubmitCommandBuffer(Uint32 QueueIndex,
4848
4949 m_CmdPool = m_LogicalDevice->CreateCommandPool(CmdPoolCI);
5050 DEV_CHECK_ERR(m_CmdPool != VK_NULL_HANDLE, "Failed to create vulkan command pool");
51 #ifdef DILIGENT_DEVELOPMENT
52 m_BuffCounter = 0;
53 #endif
5451 }
5552
5653 VulkanCommandBufferPool::~VulkanCommandBufferPool()
5754 {
55 DEV_CHECK_ERR(m_BuffCounter == 0, m_BuffCounter,
56 " command buffer(s) have not been returned to the pool. If there are outstanding references to these "
57 "buffers in release queues, VulkanCommandBufferPool::RecycleCommandBuffer() will crash when attempting to "
58 "return the buffer to the pool.");
59
60 for (auto CmdBuff : m_CmdBuffers)
61 m_LogicalDevice->FreeCommandBuffer(m_CmdPool, CmdBuff);
5862 m_CmdPool.Release();
59 DEV_CHECK_ERR(m_BuffCounter == 0, m_BuffCounter, " command buffer(s) have not been returned to the pool. If there are outstanding references to these buffers in release queues, FreeCommandBuffer() will crash when attempting to return a buffer to the pool.");
6063 }
6164
6265 VkCommandBuffer VulkanCommandBufferPool::GetCommandBuffer(const char* DebugName)
113116 return CmdBuffer;
114117 }
115118
116 void VulkanCommandBufferPool::FreeCommandBuffer(VkCommandBuffer&& CmdBuffer)
119 void VulkanCommandBufferPool::RecycleCommandBuffer(VkCommandBuffer&& CmdBuffer)
117120 {
118121 std::lock_guard<std::mutex> Lock{m_Mutex};
119122 m_CmdBuffers.emplace_back(CmdBuffer);
123126 #endif
124127 }
125128
126 CommandPoolWrapper&& VulkanCommandBufferPool::Release()
127 {
128 m_LogicalDevice.reset();
129 m_CmdBuffers.clear();
130 return std::move(m_CmdPool);
131 }
132
133129 } // namespace VulkanUtilities
462462 }
463463
464464
465 void VulkanLogicalDevice::FreeCommandBuffer(VkCommandPool Pool, VkCommandBuffer CmdBuffer) const
466 {
467 VERIFY_EXPR(Pool != VK_NULL_HANDLE && CmdBuffer != VK_NULL_HANDLE);
468 vkFreeCommandBuffers(m_VkDevice, Pool, 1, &CmdBuffer);
469 }
465470
466471
467472 VkMemoryRequirements VulkanLogicalDevice::GetBufferMemoryRequirements(VkBuffer vkBuffer) const