Renamed _DEBUG and DEVELOPMENT macros to DILGENT_DEBUG and DILIGENT_DEVELOPMENT
assiduous
2 years ago
228 | 228 | # target_compile_options(Diligent-BuildSettings INTERFACE "$<$<CONFIG:RELEASE>:/Ot>") |
229 | 229 | # does not work as expected |
230 | 230 | |
231 | set(DEBUG_MACROS DEVELOPMENT) | |
231 | set(DEBUG_MACROS DILIGENT_DEVELOPMENT DILIGENT_DEBUG) | |
232 | 232 | target_compile_definitions(Diligent-BuildSettings INTERFACE "$<$<CONFIG:DEBUG>:${DEBUG_MACROS}>") |
233 | 233 | else() |
234 | 234 | # Todo: use __attribute__((always_inline)), but it needs to be defined in a header file |
235 | 235 | target_compile_definitions(Diligent-BuildSettings INTERFACE __forceinline=inline) |
236 | 236 | |
237 | set(DEBUG_MACROS _DEBUG DEBUG DEVELOPMENT) | |
237 | set(DEBUG_MACROS _DEBUG DEBUG DILIGENT_DEVELOPMENT DILIGENT_DEBUG) | |
238 | 238 | set(RELEASE_MACROS NDEBUG) |
239 | 239 | |
240 | 240 | foreach(DBG_CONFIG ${DEBUG_CONFIGURATIONS}) |
156 | 156 | return SampleInfo; |
157 | 157 | } |
158 | 158 | |
159 | #ifdef _DEBUG | |
159 | #ifdef DILIGENT_DEBUG | |
160 | 160 | template <TEXTURE_ADDRESS_MODE AddressMode> |
161 | 161 | void _DbgVerifyFilterInfo(const LinearTexFilterSampleInfo& FilterInfo, Uint32 Width, const char* Direction, float u) |
162 | 162 | { |
202 | 202 | auto UFilterInfo = GetLinearTexFilterSampleInfo<AddressModeU, IsNormalizedCoord>(Width, u); |
203 | 203 | auto VFilterInfo = GetLinearTexFilterSampleInfo<AddressModeV, IsNormalizedCoord>(Height, v); |
204 | 204 | |
205 | #ifdef _DEBUG | |
205 | #ifdef DILIGENT_DEBUG | |
206 | 206 | { |
207 | 207 | _DbgVerifyFilterInfo<AddressModeU>(UFilterInfo, Width, "horizontal", u); |
208 | 208 | _DbgVerifyFilterInfo<AddressModeV>(VFilterInfo, Height, "horizontal", v); |
42 | 42 | namespace Diligent |
43 | 43 | { |
44 | 44 | |
45 | #ifdef _DEBUG | |
45 | #ifdef DILIGENT_DEBUG | |
46 | 46 | inline void FillWithDebugPattern(void* ptr, Uint8 Pattern, size_t NumBytes) |
47 | 47 | { |
48 | 48 | memset(ptr, Pattern, NumBytes); |
127 | 127 | return reinterpret_cast<Uint8*>(m_pPageStart) + BlockIndex * m_pOwnerAllocator->m_BlockSize; |
128 | 128 | } |
129 | 129 | |
130 | #ifdef _DEBUG | |
130 | #ifdef DILIGENT_DEBUG | |
131 | 131 | void dbgVerifyAddress(const void* pBlockAddr) const |
132 | 132 | { |
133 | 133 | size_t Delta = reinterpret_cast<const Uint8*>(pBlockAddr) - reinterpret_cast<Uint8*>(m_pPageStart); |
240 | 240 | public: |
241 | 241 | static void SetRawAllocator(IMemoryAllocator& Allocator) |
242 | 242 | { |
243 | #ifdef _DEBUG | |
243 | #ifdef DILIGENT_DEBUG | |
244 | 244 | if (m_bPoolInitialized && m_pRawAllocator != &Allocator) |
245 | 245 | { |
246 | 246 | LOG_WARNING_MESSAGE("Setting pool raw allocator after the pool has been initialized has no effect"); |
250 | 250 | } |
251 | 251 | static void SetPageSize(Uint32 NumAllocationsInPage) |
252 | 252 | { |
253 | #ifdef _DEBUG | |
253 | #ifdef DILIGENT_DEBUG | |
254 | 254 | if (m_bPoolInitialized && m_NumAllocationsInPage != NumAllocationsInPage) |
255 | 255 | { |
256 | 256 | LOG_WARNING_MESSAGE("Setting pool page size after the pool has been initialized has no effect"); |
261 | 261 | static ObjectPool& GetPool() |
262 | 262 | { |
263 | 263 | static ObjectPool ThePool; |
264 | #ifdef _DEBUG | |
264 | #ifdef DILIGENT_DEBUG | |
265 | 265 | m_bPoolInitialized = true; |
266 | 266 | #endif |
267 | 267 | return ThePool; |
298 | 298 | ObjectPool() : |
299 | 299 | m_FixedBlockAlloctor(m_pRawAllocator ? *m_pRawAllocator : GetRawAllocator(), sizeof(ObjectType), m_NumAllocationsInPage) |
300 | 300 | {} |
301 | #ifdef _DEBUG | |
301 | #ifdef DILIGENT_DEBUG | |
302 | 302 | static bool m_bPoolInitialized; |
303 | 303 | #endif |
304 | 304 | FixedBlockMemoryAllocator m_FixedBlockAlloctor; |
309 | 309 | template <typename ObjectType> |
310 | 310 | IMemoryAllocator* ObjectPool<ObjectType>::m_pRawAllocator = nullptr; |
311 | 311 | |
312 | #ifdef _DEBUG | |
312 | #ifdef DILIGENT_DEBUG | |
313 | 313 | template <typename ObjectType> |
314 | 314 | bool ObjectPool<ObjectType>::m_bPoolInitialized = false; |
315 | 315 | #endif |
219 | 219 | { |
220 | 220 | m_lNumStrongReferences = 0; |
221 | 221 | m_lNumWeakReferences = 0; |
222 | #ifdef _DEBUG | |
222 | #ifdef DILIGENT_DEBUG | |
223 | 223 | memset(m_ObjectWrapperBuffer, 0, sizeof(m_ObjectWrapperBuffer)); |
224 | 224 | #endif |
225 | 225 | } |
355 | 355 | // | - Increment m_lNumStrongReferences |
356 | 356 | // | 5. Decrement m_lNumStrongReferences |
357 | 357 | |
358 | #ifdef _DEBUG | |
359 | Atomics::Long NumStrongRefs = m_lNumStrongReferences; | |
360 | VERIFY(NumStrongRefs == 0 || NumStrongRefs == 1, "Num strong references (", NumStrongRefs, ") is expected to be 0 or 1"); | |
358 | #ifdef DILIGENT_DEBUG | |
359 | { | |
360 | Atomics::Long NumStrongRefs = m_lNumStrongReferences; | |
361 | VERIFY(NumStrongRefs == 0 || NumStrongRefs == 1, "Num strong references (", NumStrongRefs, ") is expected to be 0 or 1"); | |
362 | } | |
361 | 363 | #endif |
362 | 364 | |
363 | 365 | // Acquire the lock. |
391 | 393 | size_t ObjectWrapperBufferCopy[ObjectWrapperBufferSize]; |
392 | 394 | for (size_t i = 0; i < ObjectWrapperBufferSize; ++i) |
393 | 395 | ObjectWrapperBufferCopy[i] = m_ObjectWrapperBuffer[i]; |
394 | #ifdef _DEBUG | |
396 | #ifdef DILIGENT_DEBUG | |
395 | 397 | memset(m_ObjectWrapperBuffer, 0, sizeof(m_ObjectWrapperBuffer)); |
396 | 398 | #endif |
397 | 399 | auto* pWrapper = reinterpret_cast<ObjectWrapperBase*>(ObjectWrapperBufferCopy); |
613 | 615 | // clang-format off |
614 | 616 | m_pAllocator{&Allocator}, |
615 | 617 | m_pOwner{pOwner} |
616 | #ifdef DEVELOPMENT | |
618 | #ifdef DILIGENT_DEVELOPMENT | |
617 | 619 | , m_dvpDescription{Description} |
618 | 620 | , m_dvpFileName {FileName } |
619 | 621 | , m_dvpLineNumber {LineNumber } |
626 | 628 | // clang-format off |
627 | 629 | m_pAllocator {nullptr}, |
628 | 630 | m_pOwner {pOwner } |
629 | #ifdef DEVELOPMENT | |
631 | #ifdef DILIGENT_DEVELOPMENT | |
630 | 632 | , m_dvpDescription{nullptr} |
631 | 633 | , m_dvpFileName {nullptr} |
632 | 634 | , m_dvpLineNumber {0 } |
658 | 660 | ObjectType* pObj = nullptr; |
659 | 661 | try |
660 | 662 | { |
661 | #ifndef DEVELOPMENT | |
663 | #ifndef DILIGENT_DEVELOPMENT | |
662 | 664 | static constexpr const char* m_dvpDescription = "<Unavailable in release build>"; |
663 | 665 | static constexpr const char* m_dvpFileName = "<Unavailable in release build>"; |
664 | 666 | static constexpr Int32 m_dvpLineNumber = -1; |
685 | 687 | AllocatorType* const m_pAllocator; |
686 | 688 | IObject* const m_pOwner; |
687 | 689 | |
688 | #ifdef DEVELOPMENT | |
690 | #ifdef DILIGENT_DEVELOPMENT | |
689 | 691 | const Char* const m_dvpDescription; |
690 | 692 | const char* const m_dvpFileName; |
691 | 693 | Int32 const m_dvpLineNumber; |
62 | 62 | STDAllocator(AllocatorType& Allocator, const Char* Description, const Char* FileName, const Int32 LineNumber) noexcept : |
63 | 63 | // clang-format off |
64 | 64 | m_Allocator {Allocator} |
65 | #ifdef DEVELOPMENT | |
65 | #ifdef DILIGENT_DEVELOPMENT | |
66 | 66 | , m_dvpDescription{Description} |
67 | 67 | , m_dvpFileName {FileName } |
68 | 68 | , m_dvpLineNumber {LineNumber } |
75 | 75 | STDAllocator(const STDAllocator<U, AllocatorType>& other) noexcept : |
76 | 76 | // clang-format off |
77 | 77 | m_Allocator {other.m_Allocator} |
78 | #ifdef DEVELOPMENT | |
78 | #ifdef DILIGENT_DEVELOPMENT | |
79 | 79 | , m_dvpDescription{other.m_dvpDescription} |
80 | 80 | , m_dvpFileName {other.m_dvpFileName } |
81 | 81 | , m_dvpLineNumber {other.m_dvpLineNumber } |
88 | 88 | STDAllocator(STDAllocator<U, AllocatorType>&& other) noexcept : |
89 | 89 | // clang-format off |
90 | 90 | m_Allocator {other.m_Allocator} |
91 | #ifdef DEVELOPMENT | |
91 | #ifdef DILIGENT_DEVELOPMENT | |
92 | 92 | , m_dvpDescription{other.m_dvpDescription} |
93 | 93 | , m_dvpFileName {other.m_dvpFileName } |
94 | 94 | , m_dvpLineNumber {other.m_dvpLineNumber } |
104 | 104 | // There is no default constructor to create null allocator, so all fields must be |
105 | 105 | // initialized. |
106 | 106 | DEV_CHECK_ERR(&m_Allocator == &other.m_Allocator, "Inconsistent allocators"); |
107 | #ifdef DEVELOPMENT | |
107 | #ifdef DILIGENT_DEVELOPMENT | |
108 | 108 | DEV_CHECK_ERR(m_dvpDescription == other.m_dvpDescription, "Incosistent allocator descriptions"); |
109 | 109 | DEV_CHECK_ERR(m_dvpFileName == other.m_dvpFileName, "Incosistent allocator file names"); |
110 | 110 | DEV_CHECK_ERR(m_dvpLineNumber == other.m_dvpLineNumber, "Incosistent allocator line numbers"); |
119 | 119 | |
120 | 120 | T* allocate(std::size_t count) |
121 | 121 | { |
122 | #ifndef DEVELOPMENT | |
122 | #ifndef DILIGENT_DEVELOPMENT | |
123 | 123 | static constexpr const char* m_dvpDescription = "<Unavailable in release build>"; |
124 | 124 | static constexpr const char* m_dvpFileName = "<Unavailable in release build>"; |
125 | 125 | static constexpr Int32 m_dvpLineNumber = -1; |
153 | 153 | } |
154 | 154 | |
155 | 155 | AllocatorType& m_Allocator; |
156 | #ifdef DEVELOPMENT | |
156 | #ifdef DILIGENT_DEVELOPMENT | |
157 | 157 | const Char* const m_dvpDescription; |
158 | 158 | const Char* const m_dvpFileName; |
159 | 159 | Int32 const m_dvpLineNumber; |
31 | 31 | template <typename DstType, typename SrcType> |
32 | 32 | DstType* ValidatedCast(SrcType* Ptr) |
33 | 33 | { |
34 | #ifdef _DEBUG | |
34 | #ifdef DILIGENT_DEBUG | |
35 | 35 | if (Ptr != nullptr) |
36 | 36 | { |
37 | 37 | CHECK_DYNAMIC_TYPE(DstType, Ptr); |
36 | 36 | |
37 | 37 | void* DefaultRawMemoryAllocator::Allocate(size_t Size, const Char* dbgDescription, const char* dbgFileName, const Int32 dbgLineNumber) |
38 | 38 | { |
39 | #ifdef _DEBUG | |
40 | return new Uint8[Size + 16] + 16; | |
41 | #else | |
42 | 39 | return new Uint8[Size]; |
43 | #endif | |
44 | 40 | } |
45 | 41 | |
46 | 42 | void DefaultRawMemoryAllocator::Free(void* Ptr) |
47 | 43 | { |
48 | #ifdef _DEBUG | |
49 | delete[](reinterpret_cast<Uint8*>(Ptr) - 16); | |
50 | #else | |
51 | 44 | delete[] reinterpret_cast<Uint8*>(Ptr); |
52 | #endif | |
53 | 45 | } |
54 | 46 | |
55 | 47 | DefaultRawMemoryAllocator& DefaultRawMemoryAllocator::GetAllocator() |
55 | 55 | |
56 | 56 | FixedBlockMemoryAllocator::~FixedBlockMemoryAllocator() |
57 | 57 | { |
58 | #ifdef _DEBUG | |
58 | #ifdef DILIGENT_DEBUG | |
59 | 59 | for (size_t p = 0; p < m_PagePool.size(); ++p) |
60 | 60 | { |
61 | 61 | VERIFY(!m_PagePool[p].HasAllocations(), "Memory leak detected: memory page has allocated block"); |
449 | 449 | SamplerInd); |
450 | 450 | if (ResType == SPIRVShaderResourceAttribs::ResourceType::SeparateImage && pNewSepImg->IsValidSepSamplerAssigned()) |
451 | 451 | { |
452 | #ifdef DEVELOPMENT | |
452 | #ifdef DILIGENT_DEVELOPMENT | |
453 | 453 | const auto& SepSmplr = GetSepSmplr(pNewSepImg->GetAssignedSepSamplerInd()); |
454 | 454 | DEV_CHECK_ERR(SepSmplr.ArraySize == 1 || SepSmplr.ArraySize == pNewSepImg->ArraySize, |
455 | 455 | "Array size (", SepSmplr.ArraySize, ") of separate sampler variable '", |
487 | 487 | |
488 | 488 | //LOG_INFO_MESSAGE(DumpResources()); |
489 | 489 | |
490 | #ifdef DEVELOPMENT | |
490 | #ifdef DILIGENT_DEVELOPMENT | |
491 | 491 | if (CombinedSamplerSuffix != nullptr) |
492 | 492 | { |
493 | 493 | for (Uint32 n = 0; n < GetNumSepSmplrs(); ++n) |
179 | 179 | // See http://diligentgraphics.com/diligent-engine/architecture/d3d12/managing-resource-lifetimes/ |
180 | 180 | void FinishCurrentFrame(Uint64 FenceValue) |
181 | 181 | { |
182 | #ifdef _DEBUG | |
182 | #ifdef DILIGENT_DEBUG | |
183 | 183 | if (!m_CompletedFrameHeads.empty()) |
184 | 184 | VERIFY(FenceValue >= m_CompletedFrameHeads.back().FenceValue, "Current frame fence value (", FenceValue, ") is lower than the fence value of the previous frame (", m_CompletedFrameHeads.back().FenceValue, ")"); |
185 | 185 | #endif |
207 | 207 | |
208 | 208 | if (IsEmpty()) |
209 | 209 | { |
210 | #ifdef _DEBUG | |
210 | #ifdef DILIGENT_DEBUG | |
211 | 211 | VERIFY(m_CompletedFrameHeads.empty(), "Zero-size heads are not added to the list, and since the buffer is empty, there must be no heads in the list"); |
212 | 212 | for (const auto& head : m_CompletedFrameHeads) |
213 | 213 | VERIFY(head.Size == 0, "Non zero-size head found"); |
107 | 107 | AddNewBlock(0, m_MaxSize); |
108 | 108 | ResetCurrAlignment(); |
109 | 109 | |
110 | #ifdef _DEBUG | |
110 | #ifdef DILIGENT_DEBUG | |
111 | 111 | DbgVerifyList(); |
112 | 112 | #endif |
113 | 113 | } |
114 | 114 | |
115 | 115 | ~VariableSizeAllocationsManager() |
116 | 116 | { |
117 | #ifdef _DEBUG | |
117 | #ifdef DILIGENT_DEBUG | |
118 | 118 | if (!m_FreeBlocksByOffset.empty() || !m_FreeBlocksBySize.empty()) |
119 | 119 | { |
120 | 120 | VERIFY(m_FreeBlocksByOffset.size() == 1, "Single free block is expected"); |
235 | 235 | } |
236 | 236 | } |
237 | 237 | |
238 | #ifdef _DEBUG | |
238 | #ifdef DILIGENT_DEBUG | |
239 | 239 | DbgVerifyList(); |
240 | 240 | #endif |
241 | 241 | return Allocation{Offset, AdjustedSize}; |
255 | 255 | // upper_bound() returns an iterator pointing to the first element in the |
256 | 256 | // container whose key is considered to go after k. |
257 | 257 | auto NextBlockIt = m_FreeBlocksByOffset.upper_bound(Offset); |
258 | #ifdef _DEBUG | |
258 | #ifdef DILIGENT_DEBUG | |
259 | 259 | { |
260 | 260 | auto LowBnd = m_FreeBlocksByOffset.lower_bound(Offset); // First element whose offset is >= |
261 | 261 | // Since zero-size allocations are not allowed, lower bound must always be equal to the upper bound |
338 | 338 | ResetCurrAlignment(); |
339 | 339 | } |
340 | 340 | |
341 | #ifdef _DEBUG | |
341 | #ifdef DILIGENT_DEBUG | |
342 | 342 | DbgVerifyList(); |
343 | 343 | #endif |
344 | 344 | } |
371 | 371 | {} |
372 | 372 | } |
373 | 373 | |
374 | #ifdef _DEBUG | |
374 | #ifdef DILIGENT_DEBUG | |
375 | 375 | void DbgVerifyList() |
376 | 376 | { |
377 | 377 | OffsetType TotalFreeSize = 0; |
381 | 381 | // clang-format on |
382 | 382 | static_assert(TEX_FORMAT_NUM_FORMATS == TEX_FORMAT_BC7_UNORM_SRGB + 1, "Not all texture formats initialized."); |
383 | 383 | |
384 | #ifdef _DEBUG | |
384 | #ifdef DILIGENT_DEBUG | |
385 | 385 | for (Uint32 Fmt = TEX_FORMAT_UNKNOWN; Fmt < TEX_FORMAT_NUM_FORMATS; ++Fmt) |
386 | 386 | VERIFY(FmtAttribs[Fmt].Format == static_cast<TEXTURE_FORMAT>(Fmt), "Uninitialized format"); |
387 | 387 | #endif |
70 | 70 | const BufferDesc& BuffDesc, |
71 | 71 | bool bIsDeviceInternal) : |
72 | 72 | TDeviceObjectBase{pRefCounters, pDevice, BuffDesc, bIsDeviceInternal}, |
73 | #ifdef _DEBUG | |
73 | #ifdef DILIGENT_DEBUG | |
74 | 74 | m_dbgBuffViewAllocator{BuffViewObjAllocator}, |
75 | 75 | #endif |
76 | 76 | m_pDefaultUAV{nullptr, STDDeleter<BufferViewImplType, TBuffViewObjAllocator>(BuffViewObjAllocator)}, |
163 | 163 | /// Corrects buffer view description and validates view parameters. |
164 | 164 | void CorrectBufferViewDesc(struct BufferViewDesc& ViewDesc); |
165 | 165 | |
166 | #ifdef _DEBUG | |
166 | #ifdef DILIGENT_DEBUG | |
167 | 167 | TBuffViewObjAllocator& m_dbgBuffViewAllocator; |
168 | 168 | #endif |
169 | 169 |
224 | 224 | |
225 | 225 | bool EndQuery(IQuery* pQuery, int); |
226 | 226 | |
227 | #ifdef DEVELOPMENT | |
227 | #ifdef DILIGENT_DEVELOPMENT | |
228 | 228 | // clang-format off |
229 | 229 | bool DvpVerifyDrawArguments (const DrawAttribs& Attribs)const; |
230 | 230 | bool DvpVerifyDrawIndexedArguments (const DrawIndexedAttribs& Attribs)const; |
310 | 310 | |
311 | 311 | const bool m_bIsDeferred = false; |
312 | 312 | |
313 | #ifdef _DEBUG | |
313 | #ifdef DILIGENT_DEBUG | |
314 | 314 | // std::unordered_map is unbelievably slow. Keeping track of mapped buffers |
315 | 315 | // in release builds is not feasible |
316 | 316 | struct DbgMappedBufferInfo |
331 | 331 | RESOURCE_STATE_TRANSITION_MODE StateTransitionMode, |
332 | 332 | SET_VERTEX_BUFFERS_FLAGS Flags) |
333 | 333 | { |
334 | #ifdef DEVELOPMENT | |
334 | #ifdef DILIGENT_DEVELOPMENT | |
335 | 335 | if (StartSlot >= MAX_BUFFER_SLOTS) |
336 | 336 | { |
337 | 337 | LOG_ERROR_MESSAGE("Start vertex buffer slot ", StartSlot, " is out of allowed range [0, ", MAX_BUFFER_SLOTS - 1, "]."); |
363 | 363 | auto& CurrStream = m_VertexStreams[StartSlot + Buff]; |
364 | 364 | CurrStream.pBuffer = ppBuffers ? ValidatedCast<BufferImplType>(ppBuffers[Buff]) : nullptr; |
365 | 365 | CurrStream.Offset = pOffsets ? pOffsets[Buff] : 0; |
366 | #ifdef DEVELOPMENT | |
366 | #ifdef DILIGENT_DEVELOPMENT | |
367 | 367 | if (CurrStream.pBuffer) |
368 | 368 | { |
369 | 369 | const auto& BuffDesc = CurrStream.pBuffer->GetDesc(); |
390 | 390 | inline bool DeviceContextBase<BaseInterface, ImplementationTraits>:: |
391 | 391 | CommitShaderResources(IShaderResourceBinding* pShaderResourceBinding, RESOURCE_STATE_TRANSITION_MODE StateTransitionMode, int) |
392 | 392 | { |
393 | #ifdef DEVELOPMENT | |
393 | #ifdef DILIGENT_DEVELOPMENT | |
394 | 394 | if (!m_pPipelineState) |
395 | 395 | { |
396 | 396 | LOG_ERROR_MESSAGE("No pipeline state is bound to the pipeline"); |
421 | 421 | { |
422 | 422 | m_pIndexBuffer = ValidatedCast<BufferImplType>(pIndexBuffer); |
423 | 423 | m_IndexDataStartOffset = ByteOffset; |
424 | #ifdef DEVELOPMENT | |
424 | #ifdef DILIGENT_DEVELOPMENT | |
425 | 425 | if (m_pIndexBuffer) |
426 | 426 | { |
427 | 427 | const auto& BuffDesc = m_pIndexBuffer->GetDesc(); |
568 | 568 | if (pRTView) |
569 | 569 | { |
570 | 570 | const auto& RTVDesc = pRTView->GetDesc(); |
571 | #ifdef DEVELOPMENT | |
571 | #ifdef DILIGENT_DEVELOPMENT | |
572 | 572 | if (RTVDesc.ViewType != TEXTURE_VIEW_RENDER_TARGET) |
573 | 573 | LOG_ERROR("Texture view object named '", RTVDesc.Name ? RTVDesc.Name : "", "' has incorrect view type (", GetTexViewTypeLiteralName(RTVDesc.ViewType), "). Render target view is expected"); |
574 | 574 | #endif |
583 | 583 | } |
584 | 584 | else |
585 | 585 | { |
586 | #ifdef DEVELOPMENT | |
586 | #ifdef DILIGENT_DEVELOPMENT | |
587 | 587 | const auto& TexDesc = pRTView->GetTexture()->GetDesc(); |
588 | 588 | if (m_FramebufferWidth != std::max(TexDesc.Width >> RTVDesc.MostDetailedMip, 1U)) |
589 | 589 | LOG_ERROR("Render target width (", std::max(TexDesc.Width >> RTVDesc.MostDetailedMip, 1U), ") specified by RTV '", RTVDesc.Name, "' is inconsistent with the width of previously bound render targets (", m_FramebufferWidth, ")"); |
608 | 608 | if (pDepthStencil != nullptr) |
609 | 609 | { |
610 | 610 | const auto& DSVDesc = pDepthStencil->GetDesc(); |
611 | #ifdef DEVELOPMENT | |
611 | #ifdef DILIGENT_DEVELOPMENT | |
612 | 612 | if (DSVDesc.ViewType != TEXTURE_VIEW_DEPTH_STENCIL) |
613 | 613 | LOG_ERROR("Texture view object named '", DSVDesc.Name ? DSVDesc.Name : "", "' has incorrect view type (", GetTexViewTypeLiteralName(DSVDesc.ViewType), "). Depth stencil view is expected"); |
614 | 614 | #endif |
624 | 624 | } |
625 | 625 | else |
626 | 626 | { |
627 | #ifdef DEVELOPMENT | |
627 | #ifdef DILIGENT_DEVELOPMENT | |
628 | 628 | const auto& TexDesc = pDepthStencil->GetTexture()->GetDesc(); |
629 | 629 | if (m_FramebufferWidth != std::max(TexDesc.Width >> DSVDesc.MostDetailedMip, 1U)) |
630 | 630 | LOG_ERROR("Depth-stencil target width (", std::max(TexDesc.Width >> DSVDesc.MostDetailedMip, 1U), ") specified by DSV '", DSVDesc.Name, "' is inconsistent with the width of previously bound render targets (", m_FramebufferWidth, ")"); |
687 | 687 | { |
688 | 688 | for (Uint32 stream = 0; stream < m_NumVertexStreams; ++stream) |
689 | 689 | m_VertexStreams[stream] = VertexStreamInfo<BufferImplType>{}; |
690 | #ifdef _DEBUG | |
690 | #ifdef DILIGENT_DEBUG | |
691 | 691 | for (Uint32 stream = m_NumVertexStreams; stream < _countof(m_VertexStreams); ++stream) |
692 | 692 | { |
693 | 693 | VERIFY(m_VertexStreams[stream].pBuffer == nullptr, "Unexpected non-null buffer"); |
800 | 800 | { |
801 | 801 | for (Uint32 rt = 0; rt < m_NumBoundRenderTargets; ++rt) |
802 | 802 | m_pBoundRenderTargets[rt].Release(); |
803 | #ifdef _DEBUG | |
803 | #ifdef DILIGENT_DEBUG | |
804 | 804 | for (Uint32 rt = m_NumBoundRenderTargets; rt < _countof(m_pBoundRenderTargets); ++rt) |
805 | 805 | { |
806 | 806 | VERIFY(m_pBoundRenderTargets[rt] == nullptr, "Non-null render target found"); |
824 | 824 | return false; |
825 | 825 | } |
826 | 826 | |
827 | #ifdef DEVELOPMENT | |
827 | #ifdef DILIGENT_DEVELOPMENT | |
828 | 828 | { |
829 | 829 | const auto& ViewDesc = pView->GetDesc(); |
830 | 830 | if (ViewDesc.ViewType != TEXTURE_VIEW_DEPTH_STENCIL) |
866 | 866 | return false; |
867 | 867 | } |
868 | 868 | |
869 | #ifdef DEVELOPMENT | |
869 | #ifdef DILIGENT_DEVELOPMENT | |
870 | 870 | { |
871 | 871 | const auto& ViewDesc = pView->GetDesc(); |
872 | 872 | if (ViewDesc.ViewType != TEXTURE_VIEW_RENDER_TARGET) |
956 | 956 | UpdateBuffer(IBuffer* pBuffer, Uint32 Offset, Uint32 Size, const void* pData, RESOURCE_STATE_TRANSITION_MODE StateTransitionMode) |
957 | 957 | { |
958 | 958 | VERIFY(pBuffer != nullptr, "Buffer must not be null"); |
959 | #ifdef DEVELOPMENT | |
960 | const auto& BuffDesc = ValidatedCast<BufferImplType>(pBuffer)->GetDesc(); | |
961 | DEV_CHECK_ERR(BuffDesc.Usage == USAGE_DEFAULT, "Unable to update buffer '", BuffDesc.Name, "': only USAGE_DEFAULT buffers can be updated with UpdateData()"); | |
962 | DEV_CHECK_ERR(Offset < BuffDesc.uiSizeInBytes, "Unable to update buffer '", BuffDesc.Name, "': offset (", Offset, ") exceeds the buffer size (", BuffDesc.uiSizeInBytes, ")"); | |
963 | DEV_CHECK_ERR(Size + Offset <= BuffDesc.uiSizeInBytes, "Unable to update buffer '", BuffDesc.Name, "': Update region [", Offset, ",", Size + Offset, ") is out of buffer bounds [0,", BuffDesc.uiSizeInBytes, ")"); | |
959 | #ifdef DILIGENT_DEVELOPMENT | |
960 | { | |
961 | const auto& BuffDesc = ValidatedCast<BufferImplType>(pBuffer)->GetDesc(); | |
962 | DEV_CHECK_ERR(BuffDesc.Usage == USAGE_DEFAULT, "Unable to update buffer '", BuffDesc.Name, "': only USAGE_DEFAULT buffers can be updated with UpdateData()"); | |
963 | DEV_CHECK_ERR(Offset < BuffDesc.uiSizeInBytes, "Unable to update buffer '", BuffDesc.Name, "': offset (", Offset, ") exceeds the buffer size (", BuffDesc.uiSizeInBytes, ")"); | |
964 | DEV_CHECK_ERR(Size + Offset <= BuffDesc.uiSizeInBytes, "Unable to update buffer '", BuffDesc.Name, "': Update region [", Offset, ",", Size + Offset, ") is out of buffer bounds [0,", BuffDesc.uiSizeInBytes, ")"); | |
965 | } | |
964 | 966 | #endif |
965 | 967 | } |
966 | 968 | |
976 | 978 | { |
977 | 979 | VERIFY(pSrcBuffer != nullptr, "Source buffer must not be null"); |
978 | 980 | VERIFY(pDstBuffer != nullptr, "Destination buffer must not be null"); |
979 | #ifdef DEVELOPMENT | |
980 | const auto& SrcBufferDesc = ValidatedCast<BufferImplType>(pSrcBuffer)->GetDesc(); | |
981 | const auto& DstBufferDesc = ValidatedCast<BufferImplType>(pDstBuffer)->GetDesc(); | |
982 | DEV_CHECK_ERR(DstOffset + Size <= DstBufferDesc.uiSizeInBytes, "Failed to copy buffer '", SrcBufferDesc.Name, "' to '", DstBufferDesc.Name, "': Destination range [", DstOffset, ",", DstOffset + Size, ") is out of buffer bounds [0,", DstBufferDesc.uiSizeInBytes, ")"); | |
983 | DEV_CHECK_ERR(SrcOffset + Size <= SrcBufferDesc.uiSizeInBytes, "Failed to copy buffer '", SrcBufferDesc.Name, "' to '", DstBufferDesc.Name, "': Source range [", SrcOffset, ",", SrcOffset + Size, ") is out of buffer bounds [0,", SrcBufferDesc.uiSizeInBytes, ")"); | |
981 | #ifdef DILIGENT_DEVELOPMENT | |
982 | { | |
983 | const auto& SrcBufferDesc = ValidatedCast<BufferImplType>(pSrcBuffer)->GetDesc(); | |
984 | const auto& DstBufferDesc = ValidatedCast<BufferImplType>(pDstBuffer)->GetDesc(); | |
985 | DEV_CHECK_ERR(DstOffset + Size <= DstBufferDesc.uiSizeInBytes, "Failed to copy buffer '", SrcBufferDesc.Name, "' to '", DstBufferDesc.Name, "': Destination range [", DstOffset, ",", DstOffset + Size, ") is out of buffer bounds [0,", DstBufferDesc.uiSizeInBytes, ")"); | |
986 | DEV_CHECK_ERR(SrcOffset + Size <= SrcBufferDesc.uiSizeInBytes, "Failed to copy buffer '", SrcBufferDesc.Name, "' to '", DstBufferDesc.Name, "': Source range [", SrcOffset, ",", SrcOffset + Size, ") is out of buffer bounds [0,", SrcBufferDesc.uiSizeInBytes, ")"); | |
987 | } | |
984 | 988 | #endif |
985 | 989 | } |
986 | 990 | |
992 | 996 | |
993 | 997 | const auto& BuffDesc = pBuffer->GetDesc(); |
994 | 998 | |
995 | #ifdef _DEBUG | |
996 | VERIFY(m_DbgMappedBuffers.find(pBuffer) == m_DbgMappedBuffers.end(), "Buffer '", BuffDesc.Name, "' has already been mapped"); | |
997 | m_DbgMappedBuffers[pBuffer] = DbgMappedBufferInfo{MapType}; | |
999 | #ifdef DILIGENT_DEBUG | |
1000 | { | |
1001 | VERIFY(m_DbgMappedBuffers.find(pBuffer) == m_DbgMappedBuffers.end(), "Buffer '", BuffDesc.Name, "' has already been mapped"); | |
1002 | m_DbgMappedBuffers[pBuffer] = DbgMappedBufferInfo{MapType}; | |
1003 | } | |
998 | 1004 | #endif |
999 | 1005 | |
1000 | 1006 | pMappedData = nullptr; |
1039 | 1045 | UnmapBuffer(IBuffer* pBuffer, MAP_TYPE MapType) |
1040 | 1046 | { |
1041 | 1047 | VERIFY(pBuffer, "pBuffer must not be null"); |
1042 | #ifdef _DEBUG | |
1043 | auto MappedBufferIt = m_DbgMappedBuffers.find(pBuffer); | |
1044 | VERIFY(MappedBufferIt != m_DbgMappedBuffers.end(), "Buffer '", pBuffer->GetDesc().Name, "' has not been mapped."); | |
1045 | VERIFY(MappedBufferIt->second.MapType == MapType, "MapType (", MapType, ") does not match the map type that was used to map the buffer ", MappedBufferIt->second.MapType); | |
1046 | m_DbgMappedBuffers.erase(MappedBufferIt); | |
1048 | #ifdef DILIGENT_DEBUG | |
1049 | { | |
1050 | auto MappedBufferIt = m_DbgMappedBuffers.find(pBuffer); | |
1051 | VERIFY(MappedBufferIt != m_DbgMappedBuffers.end(), "Buffer '", pBuffer->GetDesc().Name, "' has not been mapped."); | |
1052 | VERIFY(MappedBufferIt->second.MapType == MapType, "MapType (", MapType, ") does not match the map type that was used to map the buffer ", MappedBufferIt->second.MapType); | |
1053 | m_DbgMappedBuffers.erase(MappedBufferIt); | |
1054 | } | |
1047 | 1055 | #endif |
1048 | 1056 | } |
1049 | 1057 | |
1093 | 1101 | GenerateMips(ITextureView* pTexView) |
1094 | 1102 | { |
1095 | 1103 | VERIFY(pTexView != nullptr, "pTexView must not be null"); |
1096 | #ifdef DEVELOPMENT | |
1097 | const auto& ViewDesc = pTexView->GetDesc(); | |
1098 | DEV_CHECK_ERR(ViewDesc.ViewType == TEXTURE_VIEW_SHADER_RESOURCE, "Shader resource view '", ViewDesc.Name, | |
1099 | "' can't be used to generate mipmaps because its type is ", GetTexViewTypeLiteralName(ViewDesc.ViewType), ". Required view type: TEXTURE_VIEW_SHADER_RESOURCE."); | |
1100 | DEV_CHECK_ERR((ViewDesc.Flags & TEXTURE_VIEW_FLAG_ALLOW_MIP_MAP_GENERATION) != 0, "Shader resource view '", ViewDesc.Name, | |
1101 | "' was not created with TEXTURE_VIEW_FLAG_ALLOW_MIP_MAP_GENERATION flag and can't be used to generate mipmaps."); | |
1104 | #ifdef DILIGENT_DEVELOPMENT | |
1105 | { | |
1106 | const auto& ViewDesc = pTexView->GetDesc(); | |
1107 | DEV_CHECK_ERR(ViewDesc.ViewType == TEXTURE_VIEW_SHADER_RESOURCE, "Shader resource view '", ViewDesc.Name, | |
1108 | "' can't be used to generate mipmaps because its type is ", GetTexViewTypeLiteralName(ViewDesc.ViewType), ". Required view type: TEXTURE_VIEW_SHADER_RESOURCE."); | |
1109 | DEV_CHECK_ERR((ViewDesc.Flags & TEXTURE_VIEW_FLAG_ALLOW_MIP_MAP_GENERATION) != 0, "Shader resource view '", ViewDesc.Name, | |
1110 | "' was not created with TEXTURE_VIEW_FLAG_ALLOW_MIP_MAP_GENERATION flag and can't be used to generate mipmaps."); | |
1111 | } | |
1102 | 1112 | #endif |
1103 | 1113 | } |
1104 | 1114 | |
1109 | 1119 | ITexture* pDstTexture, |
1110 | 1120 | const ResolveTextureSubresourceAttribs& ResolveAttribs) |
1111 | 1121 | { |
1112 | #ifdef DEVELOPMENT | |
1122 | #ifdef DILIGENT_DEVELOPMENT | |
1113 | 1123 | VERIFY_EXPR(pSrcTexture != nullptr && pDstTexture != nullptr); |
1114 | 1124 | const auto& SrcTexDesc = pSrcTexture->GetDesc(); |
1115 | 1125 | const auto& DstTexDesc = pDstTexture->GetDesc(); |
1150 | 1160 | #endif |
1151 | 1161 | } |
1152 | 1162 | |
1153 | #ifdef DEVELOPMENT | |
1163 | #ifdef DILIGENT_DEVELOPMENT | |
1154 | 1164 | template <typename BaseInterface, typename ImplementationTraits> |
1155 | 1165 | inline bool DeviceContextBase<BaseInterface, ImplementationTraits>:: |
1156 | 1166 | DvpVerifyDrawArguments(const DrawAttribs& Attribs) const |
1521 | 1531 | return true; |
1522 | 1532 | } |
1523 | 1533 | |
1524 | #endif // DEVELOPMENT | |
1534 | #endif // DILIGENT_DEVELOPMENT | |
1525 | 1535 | |
1526 | 1536 | } // namespace Diligent |
121 | 121 | for (Uint32 i = 0; i < SrcLayout.NumStaticSamplers; ++i) |
122 | 122 | { |
123 | 123 | VERIFY(SrcLayout.StaticSamplers[i].SamplerOrTextureName != nullptr, "Static sampler or texture name can't be null"); |
124 | #ifdef DEVELOPMENT | |
125 | const auto& BorderColor = SrcLayout.StaticSamplers[i].Desc.BorderColor; | |
126 | if (!((BorderColor[0] == 0 && BorderColor[1] == 0 && BorderColor[2] == 0 && BorderColor[3] == 0) || | |
127 | (BorderColor[0] == 0 && BorderColor[1] == 0 && BorderColor[2] == 0 && BorderColor[3] == 1) || | |
128 | (BorderColor[0] == 1 && BorderColor[1] == 1 && BorderColor[2] == 1 && BorderColor[3] == 1))) | |
129 | { | |
130 | LOG_WARNING_MESSAGE("Static sampler for variable \"", SrcLayout.StaticSamplers[i].SamplerOrTextureName, "\" specifies border color (", | |
131 | BorderColor[0], ", ", BorderColor[1], ", ", BorderColor[2], ", ", BorderColor[3], | |
132 | "). D3D12 static samplers only allow transparent black (0,0,0,0), opaque black (0,0,0,1) or opaque white (1,1,1,1) as border colors"); | |
124 | #ifdef DILIGENT_DEVELOPMENT | |
125 | { | |
126 | const auto& BorderColor = SrcLayout.StaticSamplers[i].Desc.BorderColor; | |
127 | if (!((BorderColor[0] == 0 && BorderColor[1] == 0 && BorderColor[2] == 0 && BorderColor[3] == 0) || | |
128 | (BorderColor[0] == 0 && BorderColor[1] == 0 && BorderColor[2] == 0 && BorderColor[3] == 1) || | |
129 | (BorderColor[0] == 1 && BorderColor[1] == 1 && BorderColor[2] == 1 && BorderColor[3] == 1))) | |
130 | { | |
131 | LOG_WARNING_MESSAGE("Static sampler for variable \"", SrcLayout.StaticSamplers[i].SamplerOrTextureName, "\" specifies border color (", | |
132 | BorderColor[0], ", ", BorderColor[1], ", ", BorderColor[2], ", ", BorderColor[3], | |
133 | "). D3D12 static samplers only allow transparent black (0,0,0,0), opaque black (0,0,0,1) or opaque white (1,1,1,1) as border colors"); | |
134 | } | |
133 | 135 | } |
134 | 136 | #endif |
135 | 137 |
52 | 52 | |
53 | 53 | Int32 ShaderIndex = PlatformMisc::GetLSB(Type); |
54 | 54 | |
55 | #ifdef _DEBUG | |
55 | #ifdef DILIGENT_DEBUG | |
56 | 56 | switch (Type) |
57 | 57 | { |
58 | 58 | // clang-format off |
83 | 83 | const TextureDesc& Desc, |
84 | 84 | bool bIsDeviceInternal = false) : |
85 | 85 | TDeviceObjectBase(pRefCounters, pDevice, Desc, bIsDeviceInternal), |
86 | #ifdef _DEBUG | |
86 | #ifdef DILIGENT_DEBUG | |
87 | 87 | m_dbgTexViewObjAllocator(TexViewObjAllocator), |
88 | 88 | #endif |
89 | 89 | m_pDefaultSRV(nullptr, STDDeleter<TTextureViewImpl, TTexViewObjAllocator>(TexViewObjAllocator)), |
199 | 199 | /// Pure virtual function that creates texture view for the specific engine implementation. |
200 | 200 | virtual void CreateViewInternal(const struct TextureViewDesc& ViewDesc, ITextureView** ppView, bool bIsDefaultView) = 0; |
201 | 201 | |
202 | #ifdef _DEBUG | |
202 | #ifdef DILIGENT_DEBUG | |
203 | 203 | TTexViewObjAllocator& m_dbgTexViewObjAllocator; |
204 | 204 | #endif |
205 | 205 | // WARNING! We cannot use ITextureView here, because ITextureView has no virtual dtor! |
78 | 78 | /// Implementation of ITextureView::SetSampler() |
79 | 79 | virtual void DILIGENT_CALL_TYPE SetSampler(ISampler* pSampler) override final |
80 | 80 | { |
81 | #ifdef DEVELOPMENT | |
81 | #ifdef DILIGENT_DEVELOPMENT | |
82 | 82 | if (this->m_Desc.ViewType != TEXTURE_VIEW_SHADER_RESOURCE) |
83 | 83 | LOG_ERROR("Texture view \"", this->m_Desc.Name, "\": a sampler can be attached to a shader resource view only. The view type is ", GetTexViewTypeLiteralName(this->m_Desc.ViewType)); |
84 | 84 | #endif |
141 | 141 | } \ |
142 | 142 | } while (false) |
143 | 143 | |
144 | #ifdef DEVELOPMENT | |
144 | #ifdef DILIGENT_DEVELOPMENT | |
145 | 145 | VERIFY_TEX_PARAMS(MipLevel < TexDesc.MipLevels, "Mip level (", MipLevel, ") is out of allowed range [0, ", TexDesc.MipLevels - 1, "]"); |
146 | 146 | VERIFY_TEX_PARAMS(Box.MinX < Box.MaxX, "Invalid X range: ", Box.MinX, "..", Box.MaxX); |
147 | 147 | VERIFY_TEX_PARAMS(Box.MinY < Box.MaxY, "Invalid Y range: ", Box.MinY, "..", Box.MaxY); |
207 | 207 | VERIFY((SubresData.pData != nullptr) ^ (SubresData.pSrcBuffer != nullptr), "Either CPU data pointer (pData) or GPU buffer (pSrcBuffer) must not be null, but not both"); |
208 | 208 | ValidateTextureRegion(TexDesc, MipLevel, Slice, DstBox); |
209 | 209 | |
210 | #ifdef DEVELOPMENT | |
210 | #ifdef DILIGENT_DEVELOPMENT | |
211 | 211 | VERIFY_TEX_PARAMS(TexDesc.SampleCount == 1, "Only non-multisampled textures can be updated with UpdateData()"); |
212 | 212 | VERIFY_TEX_PARAMS((SubresData.Stride & 0x03) == 0, "Texture data stride (", SubresData.Stride, ") must be at least 32-bit aligned"); |
213 | 213 | VERIFY_TEX_PARAMS((SubresData.DepthStride & 0x03) == 0, "Texture data depth stride (", SubresData.DepthStride, ") must be at least 32-bit aligned"); |
38 | 38 | #include "QueryD3D11Impl.hpp" |
39 | 39 | #include "DisjointQueryPool.hpp" |
40 | 40 | |
41 | #ifdef _DEBUG | |
41 | #ifdef DILIGENT_DEBUG | |
42 | 42 | # define VERIFY_CONTEXT_BINDINGS |
43 | 43 | #endif |
44 | 44 |
26 | 26 | |
27 | 27 | #pragma once |
28 | 28 | |
29 | #ifdef _DEBUG | |
29 | #ifdef DILIGENT_DEBUG | |
30 | 30 | # define VERIFY_SHADER_BINDINGS |
31 | 31 | #endif |
326 | 326 | |
327 | 327 | Uint8* m_pResourceData = nullptr; |
328 | 328 | |
329 | #ifdef _DEBUG | |
329 | #ifdef DILIGENT_DEBUG | |
330 | 330 | IMemoryAllocator* m_pdbgMemoryAllocator = nullptr; |
331 | 331 | #endif |
332 | 332 | }; |
258 | 258 | // while Layout is alive |
259 | 259 | void BindResources(IResourceMapping* pResourceMapping, Uint32 Flags, const ShaderResourceCacheD3D11& dbgResourceCache); |
260 | 260 | |
261 | #ifdef DEVELOPMENT | |
261 | #ifdef DILIGENT_DEVELOPMENT | |
262 | 262 | bool dvpVerifyBindings() const; |
263 | 263 | #endif |
264 | 264 |
110 | 110 | __forceinline Int32 GetMaxUAVBindPoint() const { return m_MaxUAVBindPoint; } |
111 | 111 | // clang-format on |
112 | 112 | |
113 | #ifdef DEVELOPMENT | |
113 | #ifdef DILIGENT_DEVELOPMENT | |
114 | 114 | void dvpVerifyCommittedResources(ID3D11Buffer* CommittedD3D11CBs[], |
115 | 115 | ID3D11ShaderResourceView* CommittedD3D11SRVs[], |
116 | 116 | ID3D11Resource* CommittedD3D11SRVResources[], |
172 | 172 | |
173 | 173 | if (pShaderResourceBinding == nullptr) |
174 | 174 | { |
175 | #ifdef DEVELOPMENT | |
175 | #ifdef DILIGENT_DEVELOPMENT | |
176 | 176 | bool ResourcesPresent = false; |
177 | 177 | for (Uint32 s = 0; s < pPipelineStateD3D11->GetNumShaders(); ++s) |
178 | 178 | { |
192 | 192 | |
193 | 193 | |
194 | 194 | auto pShaderResBindingD3D11 = ValidatedCast<ShaderResourceBindingD3D11Impl>(pShaderResourceBinding); |
195 | #ifdef DEVELOPMENT | |
195 | #ifdef DILIGENT_DEVELOPMENT | |
196 | 196 | if (pPipelineStateD3D11->IsIncompatibleWith(pShaderResourceBinding->GetPipelineState())) |
197 | 197 | { |
198 | 198 | LOG_ERROR_MESSAGE("Shader resource binding does not match Pipeline State"); |
203 | 203 | auto NumShaders = pShaderResBindingD3D11->GetNumActiveShaders(); |
204 | 204 | VERIFY(NumShaders == pPipelineStateD3D11->GetNumShaders(), "Number of active shaders in shader resource binding is not consistent with the number of shaders in the pipeline state"); |
205 | 205 | |
206 | #ifdef DEVELOPMENT | |
207 | bool StaticResourcesPresent = false; | |
208 | for (Uint32 s = 0; s < NumShaders; ++s) | |
209 | { | |
210 | const auto& StaticResLayout = pPipelineStateD3D11->GetStaticResourceLayout(s); | |
211 | if (StaticResLayout.GetTotalResourceCount() > 0) | |
212 | StaticResourcesPresent = true; | |
213 | } | |
214 | // Static resource bindings are verified in BindStaticShaderResources() | |
215 | if (StaticResourcesPresent && !pShaderResBindingD3D11->IsStaticResourcesBound()) | |
216 | { | |
217 | LOG_ERROR_MESSAGE("Static resources have not been initialized in the shader resource binding object being committed for PSO '", pPSO->GetDesc().Name, "'. Please call IShaderResourceBinding::InitializeStaticResources()."); | |
206 | #ifdef DILIGENT_DEVELOPMENT | |
207 | { | |
208 | bool StaticResourcesPresent = false; | |
209 | for (Uint32 s = 0; s < NumShaders; ++s) | |
210 | { | |
211 | const auto& StaticResLayout = pPipelineStateD3D11->GetStaticResourceLayout(s); | |
212 | if (StaticResLayout.GetTotalResourceCount() > 0) | |
213 | StaticResourcesPresent = true; | |
214 | } | |
215 | // Static resource bindings are verified in BindStaticShaderResources() | |
216 | if (StaticResourcesPresent && !pShaderResBindingD3D11->IsStaticResourcesBound()) | |
217 | { | |
218 | LOG_ERROR_MESSAGE("Static resources have not been initialized in the shader resource binding object being committed for PSO '", pPSO->GetDesc().Name, "'. Please call IShaderResourceBinding::InitializeStaticResources()."); | |
219 | } | |
218 | 220 | } |
219 | 221 | #endif |
220 | 222 | |
224 | 226 | { |
225 | 227 | auto ShaderTypeInd = pShaderResBindingD3D11->GetActiveShaderTypeIndex(s); |
226 | 228 | |
227 | #ifdef DEVELOPMENT | |
229 | #ifdef DILIGENT_DEVELOPMENT | |
228 | 230 | auto* pShaderD3D11 = pPipelineStateD3D11->GetShader<ShaderD3D11Impl>(s); |
229 | 231 | VERIFY_EXPR(ShaderTypeInd == static_cast<Int32>(GetShaderTypeIndex(pShaderD3D11->GetDesc().ShaderType))); |
230 | 232 | #endif |
273 | 275 | } |
274 | 276 | } |
275 | 277 | } |
276 | #ifdef DEVELOPMENT | |
278 | #ifdef DILIGENT_DEVELOPMENT | |
277 | 279 | else if (VerifyStates) |
278 | 280 | { |
279 | 281 | if (const auto* pTexture = ValidatedCast<TextureBaseD3D11>(UAVRes.pTexture)) |
335 | 337 | } |
336 | 338 | } |
337 | 339 | |
338 | #ifdef DEVELOPMENT | |
340 | #ifdef DILIGENT_DEVELOPMENT | |
339 | 341 | if ((m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) != 0 && ShaderTypeInd == CSInd) |
340 | 342 | { |
341 | 343 | dbgVerifyCommittedUAVs(pShaderD3D11->GetDesc().ShaderType); |
370 | 372 | { |
371 | 373 | auto ShaderTypeInd = pShaderResBindingD3D11->GetActiveShaderTypeIndex(s); |
372 | 374 | |
373 | #ifdef DEVELOPMENT | |
375 | #ifdef DILIGENT_DEVELOPMENT | |
374 | 376 | auto* pShaderD3D11 = pPipelineStateD3D11->GetShader<ShaderD3D11Impl>(s); |
375 | 377 | VERIFY_EXPR(ShaderTypeInd == static_cast<Int32>(GetShaderTypeIndex(pShaderD3D11->GetDesc().ShaderType))); |
376 | 378 | #endif |
413 | 415 | } |
414 | 416 | } |
415 | 417 | } |
416 | #ifdef DEVELOPMENT | |
418 | #ifdef DILIGENT_DEVELOPMENT | |
417 | 419 | else if (VerifyStates) |
418 | 420 | { |
419 | 421 | VERIFY_EXPR(CommitResources); |
445 | 447 | (m_pd3d11DeviceContext->*SetCBMethod)(MinSlot, MaxSlot - MinSlot + 1, CommittedD3D11CBs + MinSlot); |
446 | 448 | m_NumCommittedCBs[ShaderTypeInd] = std::max(m_NumCommittedCBs[ShaderTypeInd], static_cast<Uint8>(NumCBs)); |
447 | 449 | } |
448 | #ifdef DEVELOPMENT | |
450 | #ifdef DILIGENT_DEVELOPMENT | |
449 | 451 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
450 | 452 | { |
451 | 453 | dbgVerifyCommittedCBs(pShaderD3D11->GetDesc().ShaderType); |
512 | 514 | } |
513 | 515 | } |
514 | 516 | } |
515 | #ifdef DEVELOPMENT | |
517 | #ifdef DILIGENT_DEVELOPMENT | |
516 | 518 | else if (VerifyStates) |
517 | 519 | { |
518 | 520 | VERIFY_EXPR(CommitResources); |
552 | 554 | (m_pd3d11DeviceContext->*SetSRVMethod)(MinSlot, MaxSlot - MinSlot + 1, CommittedD3D11SRVs + MinSlot); |
553 | 555 | m_NumCommittedSRVs[ShaderTypeInd] = std::max(m_NumCommittedSRVs[ShaderTypeInd], static_cast<Uint8>(NumSRVs)); |
554 | 556 | } |
555 | #ifdef DEVELOPMENT | |
557 | #ifdef DILIGENT_DEVELOPMENT | |
556 | 558 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
557 | 559 | { |
558 | 560 | dbgVerifyCommittedSRVs(pShaderD3D11->GetDesc().ShaderType); |
592 | 594 | (m_pd3d11DeviceContext->*SetSamplerMethod)(MinSlot, MaxSlot - MinSlot + 1, CommittedD3D11Samplers + MinSlot); |
593 | 595 | m_NumCommittedSamplers[ShaderTypeInd] = std::max(m_NumCommittedSamplers[ShaderTypeInd], static_cast<Uint8>(NumSamplers)); |
594 | 596 | } |
595 | #ifdef DEVELOPMENT | |
597 | #ifdef DILIGENT_DEVELOPMENT | |
596 | 598 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
597 | 599 | { |
598 | 600 | dbgVerifyCommittedSamplers(pShaderD3D11->GetDesc().ShaderType); |
603 | 605 | |
604 | 606 | |
605 | 607 | |
606 | #ifdef DEVELOPMENT | |
608 | #ifdef DILIGENT_DEVELOPMENT | |
607 | 609 | if (CommitResources && (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_SHADER_RESOURCES) != 0) |
608 | 610 | { |
609 | 611 | // Use full resource layout to verify that all required resources are committed |
750 | 752 | |
751 | 753 | void DeviceContextD3D11Impl::PrepareForDraw(DRAW_FLAGS Flags) |
752 | 754 | { |
753 | #ifdef DEVELOPMENT | |
755 | #ifdef DILIGENT_DEVELOPMENT | |
754 | 756 | if ((Flags & DRAW_FLAG_VERIFY_RENDER_TARGETS) != 0) |
755 | 757 | DvpVerifyRenderTargets(); |
756 | 758 | #endif |
762 | 764 | CommitD3D11VertexBuffers(m_pPipelineState); |
763 | 765 | } |
764 | 766 | |
765 | #ifdef DEVELOPMENT | |
767 | #ifdef DILIGENT_DEVELOPMENT | |
766 | 768 | if ((Flags & DRAW_FLAG_VERIFY_STATES) != 0) |
767 | 769 | { |
768 | 770 | for (UINT Slot = 0; Slot < m_NumVertexStreams; ++Slot) |
800 | 802 | { |
801 | 803 | CommitD3D11IndexBuffer(IndexType); |
802 | 804 | } |
803 | #ifdef DEVELOPMENT | |
805 | #ifdef DILIGENT_DEVELOPMENT | |
804 | 806 | if (Flags & DRAW_FLAG_VERIFY_STATES) |
805 | 807 | { |
806 | 808 | if (m_pIndexBuffer->IsInKnownState() && m_pIndexBuffer->CheckState(RESOURCE_STATE_UNORDERED_ACCESS)) |
872 | 874 | if (!DvpVerifyDispatchArguments(Attribs)) |
873 | 875 | return; |
874 | 876 | |
875 | #ifdef DEVELOPMENT | |
877 | #ifdef DILIGENT_DEVELOPMENT | |
876 | 878 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
877 | 879 | { |
878 | 880 | // Verify bindings |
892 | 894 | if (!DvpVerifyDispatchIndirectArguments(Attribs, pAttribsBuffer)) |
893 | 895 | return; |
894 | 896 | |
895 | #ifdef DEVELOPMENT | |
897 | #ifdef DILIGENT_DEVELOPMENT | |
896 | 898 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
897 | 899 | { |
898 | 900 | // Verify bindings |
1177 | 1179 | pBuffD3D11Impl->ClearState(RESOURCE_STATE_UNORDERED_ACCESS); |
1178 | 1180 | } |
1179 | 1181 | } |
1180 | #ifdef DEVELOPMENT | |
1182 | #ifdef DILIGENT_DEVELOPMENT | |
1181 | 1183 | else if (StateTransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1182 | 1184 | { |
1183 | 1185 | if (pBuffD3D11Impl->IsInKnownState() && pBuffD3D11Impl->CheckState(RESOURCE_STATE_UNORDERED_ACCESS)) |
1207 | 1209 | m_pIndexBuffer->ClearState(RESOURCE_STATE_UNORDERED_ACCESS); |
1208 | 1210 | } |
1209 | 1211 | } |
1210 | #ifdef DEVELOPMENT | |
1212 | #ifdef DILIGENT_DEVELOPMENT | |
1211 | 1213 | else if (StateTransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1212 | 1214 | { |
1213 | 1215 | if (m_pIndexBuffer->IsInKnownState() && m_pIndexBuffer->CheckState(RESOURCE_STATE_UNORDERED_ACCESS)) |
1443 | 1445 | m_bCommittedD3D11IBUpToDate = false; |
1444 | 1446 | m_pd3d11DeviceContext->IASetIndexBuffer(nullptr, DXGI_FORMAT_R32_UINT, m_CommittedD3D11IndexDataStartOffset); |
1445 | 1447 | } |
1446 | #ifdef DEVELOPMENT | |
1448 | #ifdef DILIGENT_DEVELOPMENT | |
1447 | 1449 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
1448 | 1450 | { |
1449 | 1451 | dbgVerifyCommittedIndexBuffer(); |
1471 | 1473 | m_pd3d11DeviceContext->IASetVertexBuffers(Slot, _countof(ppNullBuffer), ppNullBuffer, Zero, Zero); |
1472 | 1474 | } |
1473 | 1475 | } |
1474 | #ifdef DEVELOPMENT | |
1476 | #ifdef DILIGENT_DEVELOPMENT | |
1475 | 1477 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
1476 | 1478 | { |
1477 | 1479 | dbgVerifyCommittedVertexBuffers(); |
1497 | 1499 | } |
1498 | 1500 | } |
1499 | 1501 | } |
1500 | #ifdef DEVELOPMENT | |
1502 | #ifdef DILIGENT_DEVELOPMENT | |
1501 | 1503 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
1502 | 1504 | { |
1503 | 1505 | dbgVerifyCommittedCBs(); |
1581 | 1583 | if (pTex->IsInKnownState()) |
1582 | 1584 | pTex->SetState(RESOURCE_STATE_RENDER_TARGET); |
1583 | 1585 | } |
1584 | #ifdef DEVELOPMENT | |
1586 | #ifdef DILIGENT_DEVELOPMENT | |
1585 | 1587 | else if (StateTransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1586 | 1588 | { |
1587 | 1589 | DvpVerifyTextureState(*pTex, RESOURCE_STATE_RENDER_TARGET, "Setting render targets (DeviceContextD3D11Impl::SetRenderTargets)"); |
1599 | 1601 | if (pTex->IsInKnownState()) |
1600 | 1602 | pTex->SetState(RESOURCE_STATE_DEPTH_WRITE); |
1601 | 1603 | } |
1602 | #ifdef DEVELOPMENT | |
1604 | #ifdef DILIGENT_DEVELOPMENT | |
1603 | 1605 | else if (StateTransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1604 | 1606 | { |
1605 | 1607 | DvpVerifyTextureState(*pTex, RESOURCE_STATE_DEPTH_WRITE, "Setting depth-stencil buffer (DeviceContextD3D11Impl::SetRenderTargets)"); |
1683 | 1685 | m_NumCommittedUAVs[ShaderType] = 0; |
1684 | 1686 | } |
1685 | 1687 | |
1686 | #ifdef DEVELOPMENT | |
1688 | #ifdef DILIGENT_DEVELOPMENT | |
1687 | 1689 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
1688 | 1690 | { |
1689 | 1691 | dbgVerifyCommittedSRVs(); |
1716 | 1718 | // Device context is now in default state |
1717 | 1719 | InvalidateState(); |
1718 | 1720 | |
1719 | #ifdef DEVELOPMENT | |
1721 | #ifdef DILIGENT_DEVELOPMENT | |
1720 | 1722 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
1721 | 1723 | { |
1722 | 1724 | // Verify bindings |
1753 | 1755 | // Device context is now in default state |
1754 | 1756 | InvalidateState(); |
1755 | 1757 | |
1756 | #ifdef DEVELOPMENT | |
1758 | #ifdef DILIGENT_DEVELOPMENT | |
1757 | 1759 | if (m_DebugFlags & D3D11_DEBUG_FLAG_VERIFY_COMMITTED_RESOURCE_RELEVANCE) |
1758 | 1760 | { |
1759 | 1761 | // Verify bindings |
1944 | 1946 | for (Uint32 i = 0; i < BarrierCount; ++i) |
1945 | 1947 | { |
1946 | 1948 | const auto& Barrier = pResourceBarriers[i]; |
1947 | #ifdef DEVELOPMENT | |
1949 | #ifdef DILIGENT_DEVELOPMENT | |
1948 | 1950 | DvpVerifyStateTransitionDesc(Barrier); |
1949 | 1951 | #endif |
1950 | 1952 | DEV_CHECK_ERR((Barrier.pTexture != nullptr) ^ (Barrier.pBuffer != nullptr), "Exactly one of pTexture or pBuffer must not be null"); |
76 | 76 | }; |
77 | 77 | |
78 | 78 | |
79 | #if defined(_DEBUG) | |
79 | #if defined(DILIGENT_DEVELOPMENT) | |
80 | 80 | // Check for SDK Layer support. |
81 | 81 | inline bool SdkLayersAvailable() |
82 | 82 | { |
129 | 129 | // D3D11_CREATE_DEVICE_BGRA_SUPPORT; |
130 | 130 | UINT creationFlags = 0; |
131 | 131 | |
132 | #if defined(DEVELOPMENT) | |
132 | #if defined(DILIGENT_DEVELOPMENT) | |
133 | 133 | if ((EngineCI.DebugFlags & D3D11_DEBUG_FLAG_CREATE_DEBUG_DEVICE) != 0 && SdkLayersAvailable()) |
134 | 134 | { |
135 | 135 | // If the project is in a debug build, enable debugging via SDK Layers with this flag. |
130 | 130 | |
131 | 131 | const auto& ResourceLayout = m_Desc.ResourceLayout; |
132 | 132 | |
133 | #ifdef DEVELOPMENT | |
133 | #ifdef DILIGENT_DEVELOPMENT | |
134 | 134 | { |
135 | 135 | const ShaderResources* pResources[MAX_SHADERS_IN_PIPELINE] = {}; |
136 | 136 | for (Uint32 s = 0; s < m_NumShaders; ++s) |
153 | 153 | { |
154 | 154 | const auto& StaticResLayout = pPSOD3D11->GetStaticResourceLayout(shader); |
155 | 155 | auto* pShaderD3D11 = ValidatedCast<ShaderD3D11Impl>(ppShaders[shader]); |
156 | #ifdef DEVELOPMENT | |
156 | #ifdef DILIGENT_DEVELOPMENT | |
157 | 157 | if (!StaticResLayout.dvpVerifyBindings()) |
158 | 158 | { |
159 | 159 | LOG_ERROR_MESSAGE("Static resources in SRB of PSO '", pPSOD3D11->GetDesc().Name, |
164 | 164 | } |
165 | 165 | #endif |
166 | 166 | |
167 | #ifdef _DEBUG | |
168 | auto ShaderTypeInd = GetShaderTypeIndex(pShaderD3D11->GetDesc().ShaderType); | |
169 | auto ResourceLayoutInd = m_ResourceLayoutIndex[ShaderTypeInd]; | |
170 | VERIFY_EXPR(ResourceLayoutInd == static_cast<Int8>(shader)); | |
167 | #ifdef DILIGENT_DEBUG | |
168 | { | |
169 | auto ShaderTypeInd = GetShaderTypeIndex(pShaderD3D11->GetDesc().ShaderType); | |
170 | auto ResourceLayoutInd = m_ResourceLayoutIndex[ShaderTypeInd]; | |
171 | VERIFY_EXPR(ResourceLayoutInd == static_cast<Int8>(shader)); | |
172 | } | |
171 | 173 | #endif |
172 | 174 | StaticResLayout.CopyResources(m_pBoundResourceCaches[shader]); |
173 | 175 | pPSOD3D11->SetStaticSamplers(m_pBoundResourceCaches[shader], shader); |
98 | 98 | (sizeof(CachedResource) + sizeof(ID3D11UnorderedAccessView*)) * UAVCount ); |
99 | 99 | // clang-format on |
100 | 100 | |
101 | #ifdef _DEBUG | |
101 | #ifdef DILIGENT_DEBUG | |
102 | 102 | m_pdbgMemoryAllocator = &MemAllocator; |
103 | 103 | #endif |
104 | 104 | if (BufferSize > 0) |
238 | 238 | if (!SamplerFound) |
239 | 239 | { |
240 | 240 | AssignedSamplerIndex = TexSRVBindInfo::InvalidSamplerIndex; |
241 | #ifdef _DEBUG | |
241 | #ifdef DILIGENT_DEBUG | |
242 | 242 | // Shader error will be logged by the PipelineStateD3D11Impl |
243 | 243 | constexpr bool LogStaticSamplerArrayError = false; |
244 | 244 | if (m_pResources->FindStaticSampler(AssignedSamplerAttribs, ResourceLayout, LogStaticSamplerArrayError) < 0) |
249 | 249 | } |
250 | 250 | else |
251 | 251 | { |
252 | #ifdef _DEBUG | |
252 | #ifdef DILIGENT_DEBUG | |
253 | 253 | // Shader error will be logged by the PipelineStateD3D11Impl |
254 | 254 | constexpr bool LogStaticSamplerArrayError = false; |
255 | 255 | if (m_pResources->FindStaticSampler(AssignedSamplerAttribs, ResourceLayout, LogStaticSamplerArrayError) >= 0) |
429 | 429 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
430 | 430 | // resource mapping can be of wrong type |
431 | 431 | RefCntAutoPtr<BufferD3D11Impl> pBuffD3D11Impl(pBuffer, IID_BufferD3D11); |
432 | #ifdef DEVELOPMENT | |
432 | #ifdef DILIGENT_DEVELOPMENT | |
433 | 433 | { |
434 | 434 | auto& CachedCB = m_ParentResLayout.m_ResourceCache.GetCB(m_Attribs.BindPoint + ArrayIndex); |
435 | 435 | VerifyConstantBufferBinding(m_Attribs, GetType(), ArrayIndex, pBuffer, pBuffD3D11Impl.RawPtr(), CachedCB.pBuff.RawPtr(), m_ParentResLayout.GetShaderName()); |
448 | 448 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
449 | 449 | // resource mapping can be of wrong type |
450 | 450 | RefCntAutoPtr<TextureViewD3D11Impl> pViewD3D11(pView, IID_TextureViewD3D11); |
451 | #ifdef DEVELOPMENT | |
451 | #ifdef DILIGENT_DEVELOPMENT | |
452 | 452 | { |
453 | 453 | auto& CachedSRV = ResourceCache.GetSRV(m_Attribs.BindPoint + ArrayIndex); |
454 | 454 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewD3D11.RawPtr(), {TEXTURE_VIEW_SHADER_RESOURCE}, CachedSRV.pView.RawPtr(), m_ParentResLayout.GetShaderName()); |
466 | 466 | if (pViewD3D11) |
467 | 467 | { |
468 | 468 | pSamplerD3D11Impl = ValidatedCast<SamplerD3D11Impl>(pViewD3D11->GetSampler()); |
469 | #ifdef DEVELOPMENT | |
469 | #ifdef DILIGENT_DEVELOPMENT | |
470 | 470 | if (pSamplerD3D11Impl == nullptr) |
471 | 471 | { |
472 | 472 | if (Sampler.m_Attribs.BindCount > 1) |
476 | 476 | } |
477 | 477 | #endif |
478 | 478 | } |
479 | #ifdef DEVELOPMENT | |
479 | #ifdef DILIGENT_DEVELOPMENT | |
480 | 480 | if (Sampler.GetType() != SHADER_RESOURCE_VARIABLE_TYPE_DYNAMIC) |
481 | 481 | { |
482 | 482 | auto& CachedSampler = ResourceCache.GetSampler(SamplerBindPoint); |
506 | 506 | // resource mapping can be of wrong type |
507 | 507 | RefCntAutoPtr<SamplerD3D11Impl> pSamplerD3D11(pSampler, IID_SamplerD3D11); |
508 | 508 | |
509 | #ifdef DEVELOPMENT | |
509 | #ifdef DILIGENT_DEVELOPMENT | |
510 | 510 | if (pSampler && !pSamplerD3D11) |
511 | 511 | { |
512 | 512 | LOG_ERROR_MESSAGE("Failed to bind object '", pSampler->GetDesc().Name, "' to variable '", m_Attribs.GetPrintName(ArrayIndex), |
548 | 548 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
549 | 549 | // resource mapping can be of wrong type |
550 | 550 | RefCntAutoPtr<BufferViewD3D11Impl> pViewD3D11(pView, IID_BufferViewD3D11); |
551 | #ifdef DEVELOPMENT | |
551 | #ifdef DILIGENT_DEVELOPMENT | |
552 | 552 | { |
553 | 553 | auto& CachedSRV = ResourceCache.GetSRV(m_Attribs.BindPoint + ArrayIndex); |
554 | 554 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewD3D11.RawPtr(), {BUFFER_VIEW_SHADER_RESOURCE}, CachedSRV.pView.RawPtr(), m_ParentResLayout.GetShaderName()); |
568 | 568 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
569 | 569 | // resource mapping can be of wrong type |
570 | 570 | RefCntAutoPtr<TextureViewD3D11Impl> pViewD3D11(pView, IID_TextureViewD3D11); |
571 | #ifdef DEVELOPMENT | |
571 | #ifdef DILIGENT_DEVELOPMENT | |
572 | 572 | { |
573 | 573 | auto& CachedUAV = ResourceCache.GetUAV(m_Attribs.BindPoint + ArrayIndex); |
574 | 574 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewD3D11.RawPtr(), {TEXTURE_VIEW_UNORDERED_ACCESS}, CachedUAV.pView.RawPtr(), m_ParentResLayout.GetShaderName()); |
588 | 588 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
589 | 589 | // resource mapping can be of wrong type |
590 | 590 | RefCntAutoPtr<BufferViewD3D11Impl> pViewD3D11(pView, IID_BufferViewD3D11); |
591 | #ifdef DEVELOPMENT | |
591 | #ifdef DILIGENT_DEVELOPMENT | |
592 | 592 | { |
593 | 593 | auto& CachedUAV = ResourceCache.GetUAV(m_Attribs.BindPoint + ArrayIndex); |
594 | 594 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewD3D11.RawPtr(), {BUFFER_VIEW_UNORDERED_ACCESS}, CachedUAV.pView.RawPtr(), m_ParentResLayout.GetShaderName()); |
751 | 751 | template <typename ResourceType> |
752 | 752 | bool TryResource(ShaderResourceLayoutD3D11::OffsetType NextResourceTypeOffset) |
753 | 753 | { |
754 | #ifdef _DEBUG | |
755 | VERIFY(Layout.GetResourceOffset<ResourceType>() >= dbgPreviousResourceOffset, "Resource types are processed out of order!"); | |
756 | dbgPreviousResourceOffset = Layout.GetResourceOffset<ResourceType>(); | |
757 | VERIFY_EXPR(NextResourceTypeOffset >= Layout.GetResourceOffset<ResourceType>()); | |
754 | #ifdef DILIGENT_DEBUG | |
755 | { | |
756 | VERIFY(Layout.GetResourceOffset<ResourceType>() >= dbgPreviousResourceOffset, "Resource types are processed out of order!"); | |
757 | dbgPreviousResourceOffset = Layout.GetResourceOffset<ResourceType>(); | |
758 | VERIFY_EXPR(NextResourceTypeOffset >= Layout.GetResourceOffset<ResourceType>()); | |
759 | } | |
758 | 760 | #endif |
759 | 761 | if (VarOffset < NextResourceTypeOffset) |
760 | 762 | { |
776 | 778 | const ShaderResourceLayoutD3D11& Layout; |
777 | 779 | const size_t VarOffset; |
778 | 780 | Uint32 Index = 0; |
779 | #ifdef _DEBUG | |
781 | #ifdef DILIGENT_DEBUG | |
780 | 782 | Uint32 dbgPreviousResourceOffset = 0; |
781 | 783 | #endif |
782 | 784 | }; |
829 | 831 | template <typename ResourceType> |
830 | 832 | IShaderResourceVariable* TryResource() |
831 | 833 | { |
832 | #ifdef _DEBUG | |
833 | VERIFY(Layout.GetResourceOffset<ResourceType>() >= dbgPreviousResourceOffset, "Resource types are processed out of order!"); | |
834 | dbgPreviousResourceOffset = Layout.GetResourceOffset<ResourceType>(); | |
834 | #ifdef DILIGENT_DEBUG | |
835 | { | |
836 | VERIFY(Layout.GetResourceOffset<ResourceType>() >= dbgPreviousResourceOffset, "Resource types are processed out of order!"); | |
837 | dbgPreviousResourceOffset = Layout.GetResourceOffset<ResourceType>(); | |
838 | } | |
835 | 839 | #endif |
836 | 840 | auto NumResources = Layout.GetNumResources<ResourceType>(); |
837 | 841 | if (Index < NumResources) |
846 | 850 | private: |
847 | 851 | ShaderResourceLayoutD3D11& Layout; |
848 | 852 | Uint32 Index = 0; |
849 | #ifdef _DEBUG | |
853 | #ifdef DILIGENT_DEBUG | |
850 | 854 | Uint32 dbgPreviousResourceOffset = 0; |
851 | 855 | #endif |
852 | 856 | }; |
882 | 886 | } |
883 | 887 | |
884 | 888 | |
885 | #ifdef DEVELOPMENT | |
889 | #ifdef DILIGENT_DEVELOPMENT | |
886 | 890 | bool ShaderResourceLayoutD3D11::dvpVerifyBindings() const |
887 | 891 | { |
888 | 892 |
117 | 117 | } |
118 | 118 | |
119 | 119 | |
120 | #ifdef DEVELOPMENT | |
120 | #ifdef DILIGENT_DEVELOPMENT | |
121 | 121 | static String DbgMakeResourceName(const D3DShaderResourceAttribs& Attr, Uint32 BindPoint) |
122 | 122 | { |
123 | 123 | VERIFY(BindPoint >= Uint32{Attr.BindPoint} && BindPoint < Uint32{Attr.BindPoint} + Attr.BindCount, "Bind point is out of allowed range"); |
64 | 64 | |
65 | 65 | virtual void DILIGENT_CALL_TYPE QueryInterface(const INTERFACE_ID& IID, IObject** ppInterface) override final; |
66 | 66 | |
67 | #ifdef DEVELOPMENT | |
67 | #ifdef DILIGENT_DEVELOPMENT | |
68 | 68 | void DvpVerifyDynamicAllocation(class DeviceContextD3D12Impl* pCtx) const; |
69 | 69 | #endif |
70 | 70 | |
91 | 91 | { |
92 | 92 | if (m_Desc.Usage == USAGE_DYNAMIC) |
93 | 93 | { |
94 | #ifdef DEVELOPMENT | |
94 | #ifdef DILIGENT_DEVELOPMENT | |
95 | 95 | DvpVerifyDynamicAllocation(pCtx); |
96 | 96 | #endif |
97 | 97 | return m_DynamicData[ContextId].GPUAddress; |
412 | 412 | |
413 | 413 | inline void CommandContext::SetDescriptorHeaps(ShaderDescriptorHeaps& Heaps) |
414 | 414 | { |
415 | #ifdef _DEBUG | |
415 | #ifdef DILIGENT_DEBUG | |
416 | 416 | VERIFY(Heaps.pSrvCbvUavHeap != nullptr || Heaps.pSamplerHeap != nullptr, "At least one heap is expected to be set"); |
417 | 417 | VERIFY(Heaps.pSrvCbvUavHeap == nullptr || Heaps.pSrvCbvUavHeap->GetDesc().Type == D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV, "Invalid heap type provided in pSrvCbvUavHeap"); |
418 | 418 | VERIFY(Heaps.pSamplerHeap == nullptr || Heaps.pSamplerHeap->GetDesc().Type == D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER, "Invalid heap type provided in pSamplerHeap"); |
57 | 57 | // allocator |
58 | 58 | void FreeAllocator(CComPtr<ID3D12CommandAllocator>&& Allocator); |
59 | 59 | |
60 | #ifdef DEVELOPMENT | |
60 | #ifdef DILIGENT_DEVELOPMENT | |
61 | 61 | Atomics::Long GetAllocatorCounter() const |
62 | 62 | { |
63 | 63 | return m_AllocatorCounter; |
72 | 72 | |
73 | 73 | Atomics::AtomicLong m_NumAllocators = 0; // For debug purposes only |
74 | 74 | |
75 | #ifdef DEVELOPMENT | |
75 | #ifdef DILIGENT_DEVELOPMENT | |
76 | 76 | Atomics::AtomicLong m_AllocatorCounter = 0; |
77 | 77 | #endif |
78 | 78 | }; |
44 | 44 | Uint64 _Size, |
45 | 45 | void* _CPUAddress, |
46 | 46 | D3D12_GPU_VIRTUAL_ADDRESS _GPUAddress |
47 | #ifdef DEVELOPMENT | |
47 | #ifdef DILIGENT_DEVELOPMENT | |
48 | 48 | , |
49 | 49 | Uint64 _DvpCtxFrameNumber |
50 | 50 | #endif |
55 | 55 | Size {_Size }, |
56 | 56 | CPUAddress {_CPUAddress }, |
57 | 57 | GPUAddress {_GPUAddress } |
58 | #ifdef DEVELOPMENT | |
58 | #ifdef DILIGENT_DEVELOPMENT | |
59 | 59 | , DvpCtxFrameNumber(_DvpCtxFrameNumber) |
60 | 60 | #endif |
61 | 61 | // clang-format on |
66 | 66 | Uint64 Size = 0; // Reserved size of this allocation |
67 | 67 | void* CPUAddress = nullptr; // The CPU-writeable address |
68 | 68 | D3D12_GPU_VIRTUAL_ADDRESS GPUAddress = 0; // The GPU-visible address |
69 | #ifdef DEVELOPMENT | |
69 | #ifdef DILIGENT_DEVELOPMENT | |
70 | 70 | Uint64 DvpCtxFrameNumber = static_cast<Uint64>(-1); |
71 | 71 | #endif |
72 | 72 | }; |
140 | 140 | |
141 | 141 | D3D12DynamicPage AllocatePage(Uint64 SizeInBytes); |
142 | 142 | |
143 | #ifdef DEVELOPMENT | |
143 | #ifdef DILIGENT_DEVELOPMENT | |
144 | 144 | int32_t GetAllocatedPageCounter() const |
145 | 145 | { |
146 | 146 | return m_AllocatedPageCounter; |
154 | 154 | using AvailablePagesMapElemType = std::pair<const Uint64, D3D12DynamicPage>; |
155 | 155 | std::multimap<Uint64, D3D12DynamicPage, std::less<Uint64>, STDAllocatorRawMem<AvailablePagesMapElemType>> m_AvailablePages; |
156 | 156 | |
157 | #ifdef DEVELOPMENT | |
157 | #ifdef DILIGENT_DEVELOPMENT | |
158 | 158 | std::atomic_int32_t m_AllocatedPageCounter = 0; |
159 | 159 | #endif |
160 | 160 | }; |
272 | 272 | rhs.m_FirstCPUHandle.ptr = 0; |
273 | 273 | rhs.m_FirstGPUHandle.ptr = 0; |
274 | 274 | rhs.m_MaxAllocatedSize = 0; |
275 | #ifdef DEVELOPMENT | |
275 | #ifdef DILIGENT_DEVELOPMENT | |
276 | 276 | m_AllocationsCounter.store(rhs.m_AllocationsCounter.load()); |
277 | 277 | rhs.m_AllocationsCounter = 0; |
278 | 278 | #endif |
297 | 297 | size_t GetMaxAllocatedSize() const { return m_MaxAllocatedSize; } |
298 | 298 | // clang-format on |
299 | 299 | |
300 | #ifdef DEVELOPMENT | |
300 | #ifdef DILIGENT_DEVELOPMENT | |
301 | 301 | int32_t DvpGetAllocationsCounter() const |
302 | 302 | { |
303 | 303 | return m_AllocationsCounter; |
336 | 336 | |
337 | 337 | size_t m_MaxAllocatedSize = 0; |
338 | 338 | |
339 | #ifdef DEVELOPMENT | |
339 | #ifdef DILIGENT_DEVELOPMENT | |
340 | 340 | std::atomic_int32_t m_AllocationsCounter = 0; |
341 | 341 | #endif |
342 | 342 | |
383 | 383 | virtual void Free(DescriptorHeapAllocation&& Allocation, Uint64 CmdQueueMask) override final; |
384 | 384 | virtual Uint32 GetDescriptorSize() const override final { return m_DescriptorSize; } |
385 | 385 | |
386 | #ifdef DEVELOPMENT | |
386 | #ifdef DILIGENT_DEVELOPMENT | |
387 | 387 | int32_t DvpGetTotalAllocationCount(); |
388 | 388 | #endif |
389 | 389 | |
482 | 482 | Uint32 GetMaxStaticDescriptors() const { return m_HeapAllocationManager.GetMaxDescriptors(); } |
483 | 483 | Uint32 GetMaxDynamicDescriptors() const { return m_DynamicAllocationsManager.GetMaxDescriptors(); } |
484 | 484 | |
485 | #ifdef DEVELOPMENT | |
485 | #ifdef DILIGENT_DEVELOPMENT | |
486 | 486 | int32_t DvpGetTotalAllocationCount() const |
487 | 487 | { |
488 | 488 | return m_HeapAllocationManager.DvpGetAllocationsCounter() + |
160 | 160 | |
161 | 161 | std::mutex m_ContextPoolMutex; |
162 | 162 | std::vector<PooledCommandContext, STDAllocatorRawMem<PooledCommandContext>> m_ContextPool; |
163 | #ifdef DEVELOPMENT | |
163 | #ifdef DILIGENT_DEVELOPMENT | |
164 | 164 | Atomics::AtomicLong m_AllocatedCtxCounter = 0; |
165 | 165 | #endif |
166 | 166 |
98 | 98 | m_RootParam.ShaderVisibility = Visibility; |
99 | 99 | m_RootParam.DescriptorTable.NumDescriptorRanges = NumRanges; |
100 | 100 | m_RootParam.DescriptorTable.pDescriptorRanges = pRanges; |
101 | #ifdef _DEBUG | |
101 | #ifdef DILIGENT_DEBUG | |
102 | 102 | for (Uint32 r = 0; r < NumRanges; ++r) |
103 | 103 | pRanges[r].RangeType = static_cast<D3D12_DESCRIPTOR_RANGE_TYPE>(-1); |
104 | 104 | #endif |
132 | 132 | DstTbl.pDescriptorRanges = pRanges; |
133 | 133 | const auto& SrcTbl = RP.m_RootParam.DescriptorTable; |
134 | 134 | memcpy(pRanges, SrcTbl.pDescriptorRanges, SrcTbl.NumDescriptorRanges * sizeof(D3D12_DESCRIPTOR_RANGE)); |
135 | #ifdef _DEBUG | |
135 | #ifdef DILIGENT_DEBUG | |
136 | 136 | { |
137 | 137 | Uint32 dbgTableSize = 0; |
138 | 138 | for (Uint32 r = 0; r < SrcTbl.NumDescriptorRanges; ++r) |
374 | 374 | } |
375 | 375 | |
376 | 376 | private: |
377 | #ifdef _DEBUG | |
377 | #ifdef DILIGENT_DEBUG | |
378 | 378 | void dbgVerifyRootParameters() const; |
379 | 379 | #endif |
380 | 380 | |
381 | #ifdef DEVELOPMENT | |
381 | #ifdef DILIGENT_DEVELOPMENT | |
382 | 382 | static void DvpVerifyResourceState(const ShaderResourceCacheD3D12::Resource& Res, |
383 | 383 | D3D12_DESCRIPTOR_RANGE_TYPE RangeType); |
384 | 384 | #endif |
532 | 532 | auto RootInd = RootView.GetRootIndex(); |
533 | 533 | |
534 | 534 | SHADER_TYPE dbgShaderType = SHADER_TYPE_UNKNOWN; |
535 | #ifdef _DEBUG | |
535 | #ifdef DILIGENT_DEBUG | |
536 | 536 | { |
537 | 537 | auto& Param = static_cast<const D3D12_ROOT_PARAMETER&>(RootView); |
538 | 538 | VERIFY_EXPR(Param.ParameterType == D3D12_ROOT_PARAMETER_TYPE_CBV); |
548 | 548 | { |
549 | 549 | if (IsDynamic) |
550 | 550 | { |
551 | #ifdef _DEBUG | |
551 | #ifdef DILIGENT_DEBUG | |
552 | 552 | if (pBuffToTransition->IsInKnownState()) |
553 | 553 | { |
554 | 554 | VERIFY(pBuffToTransition->CheckState(RESOURCE_STATE_CONSTANT_BUFFER), |
565 | 565 | CmdCtx.TransitionResource(pBuffToTransition, RESOURCE_STATE_CONSTANT_BUFFER); |
566 | 566 | } |
567 | 567 | } |
568 | #ifdef DEVELOPMENT | |
568 | #ifdef DILIGENT_DEVELOPMENT | |
569 | 569 | else if (ValidateStates) |
570 | 570 | { |
571 | 571 |
66 | 66 | |
67 | 67 | ShaderResourceCacheD3D12& GetResourceCache() { return m_ShaderResourceCache; } |
68 | 68 | |
69 | #ifdef DEVELOPMENT | |
69 | #ifdef DILIGENT_DEVELOPMENT | |
70 | 70 | void dvpVerifyResourceBindings(const PipelineStateD3D12Impl* pPSO) const; |
71 | 71 | #endif |
72 | 72 |
111 | 111 | |
112 | 112 | ShaderResourceCacheD3D12(DbgCacheContentType dbgContentType) |
113 | 113 | // clang-format off |
114 | #ifdef _DEBUG | |
114 | #ifdef DILIGENT_DEBUG | |
115 | 115 | : m_DbgContentType |
116 | 116 | { |
117 | 117 | dbgContentType |
182 | 182 | // Offset from the start of the descriptor heap allocation to the start of the table |
183 | 183 | Uint32 m_TableStartOffset = InvalidDescriptorOffset; |
184 | 184 | |
185 | #ifdef _DEBUG | |
185 | #ifdef DILIGENT_DEBUG | |
186 | 186 | void SetDebugAttribs(Uint32 MaxOffset, |
187 | 187 | const D3D12_DESCRIPTOR_HEAP_TYPE dbgDescriptorHeapType, |
188 | 188 | const SHADER_TYPE dbgRefShaderType) |
198 | 198 | const Uint32 m_NumResources = 0; |
199 | 199 | |
200 | 200 | private: |
201 | #ifdef _DEBUG | |
201 | #ifdef DILIGENT_DEBUG | |
202 | 202 | D3D12_DESCRIPTOR_HEAP_TYPE m_dbgHeapType = D3D12_DESCRIPTOR_HEAP_TYPE_NUM_TYPES; |
203 | 203 | SHADER_TYPE m_dbgShaderType = SHADER_TYPE_UNKNOWN; |
204 | 204 | #endif |
222 | 222 | void SetDescriptorHeapSpace(DescriptorHeapAllocation&& CbcSrvUavHeapSpace, DescriptorHeapAllocation&& SamplerHeapSpace) |
223 | 223 | { |
224 | 224 | VERIFY(m_SamplerHeapSpace.GetCpuHandle().ptr == 0 && m_CbvSrvUavHeapSpace.GetCpuHandle().ptr == 0, "Space has already been allocated in GPU descriptor heaps"); |
225 | #ifdef _DEBUG | |
225 | #ifdef DILIGENT_DEBUG | |
226 | 226 | Uint32 NumSamplerDescriptors = 0, NumSrvCbvUavDescriptors = 0; |
227 | 227 | for (Uint32 rt = 0; rt < m_NumTables; ++rt) |
228 | 228 | { |
318 | 318 | // Returns the number of dynamic constant buffers bound in the cache regardless of their variable types |
319 | 319 | Uint32 GetNumDynamicCBsBound() const { return m_NumDynamicCBsBound; } |
320 | 320 | |
321 | #ifdef _DEBUG | |
321 | #ifdef DILIGENT_DEBUG | |
322 | 322 | // Only for debug purposes: indicates what types of resources are stored in the cache |
323 | 323 | DbgCacheContentType DbgGetContentType() const { return m_DbgContentType; } |
324 | 324 | void DbgVerifyBoundDynamicCBsCounter() const; |
344 | 344 | // The number of the dynamic buffers bound in the resource cache regardless of their variable type |
345 | 345 | Uint32 m_NumDynamicCBsBound = 0; |
346 | 346 | |
347 | #ifdef _DEBUG | |
347 | #ifdef DILIGENT_DEBUG | |
348 | 348 | // Only for debug purposes: indicates what types of resources are stored in the cache |
349 | 349 | const DbgCacheContentType m_DbgContentType; |
350 | 350 | #endif |
238 | 238 | const ShaderResourceLayoutD3D12& DstLayout, |
239 | 239 | ShaderResourceCacheD3D12& DstCache) const; |
240 | 240 | |
241 | #ifdef DEVELOPMENT | |
241 | #ifdef DILIGENT_DEVELOPMENT | |
242 | 242 | bool dvpVerifyBindings(const ShaderResourceCacheD3D12& ResourceCache) const; |
243 | 243 | #endif |
244 | 244 |
118 | 118 | ShaderVariableD3D12Impl* m_pVariables = nullptr; |
119 | 119 | Uint32 m_NumVariables = 0; |
120 | 120 | |
121 | #ifdef _DEBUG | |
121 | #ifdef DILIGENT_DEBUG | |
122 | 122 | IMemoryAllocator& m_DbgAllocator; |
123 | 123 | #endif |
124 | 124 | // clang-format on |
387 | 387 | { |
388 | 388 | VERIFY(m_Desc.Usage == USAGE_DYNAMIC, "Dynamic buffer is expected"); |
389 | 389 | auto* pCtxD3D12 = ValidatedCast<DeviceContextD3D12Impl>(pContext); |
390 | #ifdef DEVELOPMENT | |
390 | #ifdef DILIGENT_DEVELOPMENT | |
391 | 391 | DvpVerifyDynamicAllocation(pCtxD3D12); |
392 | 392 | #endif |
393 | 393 | auto ContextId = pCtxD3D12->GetContextId(); |
396 | 396 | } |
397 | 397 | } |
398 | 398 | |
399 | #ifdef DEVELOPMENT | |
399 | #ifdef DILIGENT_DEVELOPMENT | |
400 | 400 | void BufferD3D12Impl::DvpVerifyDynamicAllocation(DeviceContextD3D12Impl* pCtx) const |
401 | 401 | { |
402 | 402 | auto ContextId = pCtx->GetContextId(); |
166 | 166 | pd3d12Resource = pBufferD3D12Impl->GetD3D12Resource(); |
167 | 167 | OldState = pBufferD3D12Impl->GetState(); |
168 | 168 | |
169 | #ifdef DEVELOPMENT | |
169 | #ifdef DILIGENT_DEVELOPMENT | |
170 | 170 | // Dynamic buffers wtih no SRV/UAV bind flags are suballocated in |
171 | 171 | // the upload heap when Map() is called and must always be in |
172 | 172 | // D3D12_RESOURCE_STATE_GENERIC_READ state |
80 | 80 | swprintf(AllocatorName, _countof(AllocatorName), L"Cmd list allocator %ld", Atomics::AtomicIncrement(m_NumAllocators) - 1); |
81 | 81 | (*ppAllocator)->SetName(AllocatorName); |
82 | 82 | } |
83 | #ifdef DEVELOPMENT | |
83 | #ifdef DILIGENT_DEVELOPMENT | |
84 | 84 | Atomics::AtomicIncrement(m_AllocatorCounter); |
85 | 85 | #endif |
86 | 86 | } |
124 | 124 | { |
125 | 125 | std::lock_guard<std::mutex> LockGuard(m_AllocatorMutex); |
126 | 126 | m_FreeAllocators.emplace_back(std::move(Allocator)); |
127 | #ifdef DEVELOPMENT | |
127 | #ifdef DILIGENT_DEVELOPMENT | |
128 | 128 | Atomics::AtomicDecrement(m_AllocatorCounter); |
129 | 129 | #endif |
130 | 130 | } |
91 | 91 | D3D12DynamicPage D3D12DynamicMemoryManager::AllocatePage(Uint64 SizeInBytes) |
92 | 92 | { |
93 | 93 | std::lock_guard<std::mutex> AvailablePagesLock(m_AvailablePagesMtx); |
94 | #ifdef DEVELOPMENT | |
94 | #ifdef DILIGENT_DEVELOPMENT | |
95 | 95 | ++m_AllocatedPageCounter; |
96 | 96 | #endif |
97 | 97 | auto PageIt = m_AvailablePages.lower_bound(SizeInBytes); // Returns an iterator pointing to the first element that is not less than key |
139 | 139 | if (Mgr != nullptr) |
140 | 140 | { |
141 | 141 | std::lock_guard<std::mutex> Lock{Mgr->m_AvailablePagesMtx}; |
142 | #ifdef DEVELOPMENT | |
142 | #ifdef DILIGENT_DEVELOPMENT | |
143 | 143 | --Mgr->m_AllocatedPageCounter; |
144 | 144 | #endif |
145 | 145 | auto PageSize = Page.GetSize(); |
237 | 237 | SizeInBytes, |
238 | 238 | CurrPage.GetCPUAddress(AlignedOffset), |
239 | 239 | CurrPage.GetGPUAddress(AlignedOffset) |
240 | #ifdef DEVELOPMENT | |
240 | #ifdef DILIGENT_DEVELOPMENT | |
241 | 241 | , DvpCtxFrameNumber |
242 | 242 | #endif |
243 | 243 | }; |
121 | 121 | |
122 | 122 | m_MaxAllocatedSize = std::max(m_MaxAllocatedSize, m_FreeBlockManager.GetUsedSize()); |
123 | 123 | |
124 | #ifdef DEVELOPMENT | |
124 | #ifdef DILIGENT_DEVELOPMENT | |
125 | 125 | ++m_AllocationsCounter; |
126 | 126 | #endif |
127 | 127 | |
143 | 143 | |
144 | 144 | // Clear the allocation |
145 | 145 | Allocation.Reset(); |
146 | #ifdef DEVELOPMENT | |
146 | #ifdef DILIGENT_DEVELOPMENT | |
147 | 147 | --m_AllocationsCounter; |
148 | 148 | #endif |
149 | 149 | } |
195 | 195 | " (", std::fixed, std::setprecision(2), m_MaxSize * 100.0 / std::max(TotalDescriptors, 1u), "%)."); |
196 | 196 | } |
197 | 197 | |
198 | #ifdef DEVELOPMENT | |
198 | #ifdef DILIGENT_DEVELOPMENT | |
199 | 199 | int32_t CPUDescriptorHeap::DvpGetTotalAllocationCount() |
200 | 200 | { |
201 | 201 | int32_t AllocationCount = 0; |
316 | 316 | //GraphicsCtx.AddReferencedObject(pd3d12Resource); |
317 | 317 | |
318 | 318 | bool IsDynamic = m_pIndexBuffer->GetDesc().Usage == USAGE_DYNAMIC; |
319 | #ifdef DEVELOPMENT | |
319 | #ifdef DILIGENT_DEVELOPMENT | |
320 | 320 | if (IsDynamic) |
321 | 321 | m_pIndexBuffer->DvpVerifyDynamicAllocation(this); |
322 | 322 | #endif |
358 | 358 | if (pBufferD3D12->GetDesc().Usage == USAGE_DYNAMIC) |
359 | 359 | { |
360 | 360 | DynamicBufferPresent = true; |
361 | #ifdef DEVELOPMENT | |
361 | #ifdef DILIGENT_DEVELOPMENT | |
362 | 362 | pBufferD3D12->DvpVerifyDynamicAllocation(this); |
363 | 363 | #endif |
364 | 364 | } |
389 | 389 | |
390 | 390 | void DeviceContextD3D12Impl::PrepareForDraw(GraphicsContext& GraphCtx, DRAW_FLAGS Flags) |
391 | 391 | { |
392 | #ifdef DEVELOPMENT | |
392 | #ifdef DILIGENT_DEVELOPMENT | |
393 | 393 | if ((Flags & DRAW_FLAG_VERIFY_RENDER_TARGETS) != 0) |
394 | 394 | DvpVerifyRenderTargets(); |
395 | 395 | #endif |
399 | 399 | CommitD3D12VertexBuffers(GraphCtx); |
400 | 400 | } |
401 | 401 | |
402 | #ifdef DEVELOPMENT | |
402 | #ifdef DILIGENT_DEVELOPMENT | |
403 | 403 | if ((Flags & DRAW_FLAG_VERIFY_STATES) != 0) |
404 | 404 | { |
405 | 405 | for (Uint32 Buff = 0; Buff < m_NumVertexStreams; ++Buff) |
439 | 439 | } |
440 | 440 | } |
441 | 441 | } |
442 | #ifdef DEVELOPMENT | |
442 | #ifdef DILIGENT_DEVELOPMENT | |
443 | 443 | else |
444 | 444 | { |
445 | 445 | if (m_pPipelineState->ContainsShaderResources()) |
457 | 457 | { |
458 | 458 | CommitD3D12IndexBuffer(GraphCtx, IndexType); |
459 | 459 | } |
460 | #ifdef DEVELOPMENT | |
460 | #ifdef DILIGENT_DEVELOPMENT | |
461 | 461 | if ((Flags & DRAW_FLAG_VERIFY_STATES) != 0) |
462 | 462 | { |
463 | 463 | DvpVerifyBufferState(*m_pIndexBuffer, RESOURCE_STATE_INDEX_BUFFER, "Indexed draw (DeviceContextD3D12Impl::Draw())"); |
496 | 496 | DEV_CHECK_ERR(pAttribsBuffer != nullptr, "Indirect draw attribs buffer must not be null"); |
497 | 497 | |
498 | 498 | auto* pIndirectDrawAttribsD3D12 = ValidatedCast<BufferD3D12Impl>(pAttribsBuffer); |
499 | #ifdef DEVELOPMENT | |
499 | #ifdef DILIGENT_DEVELOPMENT | |
500 | 500 | if (pIndirectDrawAttribsD3D12->GetDesc().Usage == USAGE_DYNAMIC) |
501 | 501 | pIndirectDrawAttribsD3D12->DvpVerifyDynamicAllocation(this); |
502 | 502 | #endif |
562 | 562 | ); |
563 | 563 | } |
564 | 564 | } |
565 | #ifdef DEVELOPMENT | |
565 | #ifdef DILIGENT_DEVELOPMENT | |
566 | 566 | else |
567 | 567 | { |
568 | 568 | if (m_pPipelineState->ContainsShaderResources()) |
592 | 592 | |
593 | 593 | auto* pBufferD3D12 = ValidatedCast<BufferD3D12Impl>(pAttribsBuffer); |
594 | 594 | |
595 | #ifdef DEVELOPMENT | |
595 | #ifdef DILIGENT_DEVELOPMENT | |
596 | 596 | if (pBufferD3D12->GetDesc().Usage == USAGE_DYNAMIC) |
597 | 597 | pBufferD3D12->DvpVerifyDynamicAllocation(this); |
598 | 598 | #endif |
711 | 711 | |
712 | 712 | void DeviceContextD3D12Impl::FinishFrame() |
713 | 713 | { |
714 | #ifdef _DEBUG | |
714 | #ifdef DILIGENT_DEBUG | |
715 | 715 | for (const auto& MappedBuffIt : m_DbgMappedBuffers) |
716 | 716 | { |
717 | 717 | const auto& BuffDesc = MappedBuffIt.first->GetDesc(); |
1301 | 1301 | { |
1302 | 1302 | StateTransitionRequired = TextureD3D12.IsInKnownState() && !TextureD3D12.CheckState(RESOURCE_STATE_COPY_DEST); |
1303 | 1303 | } |
1304 | #ifdef DEVELOPMENT | |
1304 | #ifdef DILIGENT_DEVELOPMENT | |
1305 | 1305 | else if (TextureTransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1306 | 1306 | { |
1307 | 1307 | DvpVerifyTextureState(TextureD3D12, RESOURCE_STATE_COPY_DEST, "Using texture as copy destination (DeviceContextD3D12Impl::CopyTextureRegion)"); |
1336 | 1336 | |
1337 | 1337 | Footprint.Footprint.RowPitch = static_cast<UINT>(SrcStride); |
1338 | 1338 | |
1339 | #ifdef _DEBUG | |
1339 | #ifdef DILIGENT_DEBUG | |
1340 | 1340 | { |
1341 | 1341 | const auto& FmtAttribs = GetTextureFormatAttribs(TexDesc.Format); |
1342 | 1342 | const Uint32 RowCount = std::max((Footprint.Footprint.Height / FmtAttribs.BlockHeight), 1u); |
1387 | 1387 | if (pBufferD3D12->IsInKnownState() && pBufferD3D12->GetState() != RESOURCE_STATE_GENERIC_READ) |
1388 | 1388 | GetCmdContext().TransitionResource(pBufferD3D12, RESOURCE_STATE_GENERIC_READ); |
1389 | 1389 | } |
1390 | #ifdef DEVELOPMENT | |
1390 | #ifdef DILIGENT_DEVELOPMENT | |
1391 | 1391 | else if (BufferTransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1392 | 1392 | { |
1393 | 1393 | DvpVerifyBufferState(*pBufferD3D12, RESOURCE_STATE_COPY_SOURCE, "Using buffer as copy source (DeviceContextD3D12Impl::CopyTextureRegion)"); |
1446 | 1446 | const auto& TexDesc = TextureD3D12.GetDesc(); |
1447 | 1447 | auto UploadSpace = AllocateTextureUploadSpace(TexDesc.Format, DstBox); |
1448 | 1448 | auto UpdateRegionDepth = DstBox.MaxZ - DstBox.MinZ; |
1449 | #ifdef _DEBUG | |
1449 | #ifdef DILIGENT_DEBUG | |
1450 | 1450 | { |
1451 | 1451 | VERIFY(SrcStride >= UploadSpace.RowSize, "Source data stride (", SrcStride, ") is below the image row size (", UploadSpace.RowSize, ")"); |
1452 | 1452 | const Uint32 PlaneSize = SrcStride * UploadSpace.RowCount; |
1731 | 1731 | auto& CmdCtx = GetCmdContext(); |
1732 | 1732 | for (Uint32 i = 0; i < BarrierCount; ++i) |
1733 | 1733 | { |
1734 | #ifdef DEVELOPMENT | |
1734 | #ifdef DILIGENT_DEVELOPMENT | |
1735 | 1735 | DvpVerifyStateTransitionDesc(pResourceBarriers[i]); |
1736 | 1736 | #endif |
1737 | 1737 | CmdCtx.TransitionResource(pResourceBarriers[i]); |
1749 | 1749 | if (Buffer.IsInKnownState() && !Buffer.CheckState(RequiredState)) |
1750 | 1750 | CmdCtx.TransitionResource(&Buffer, RequiredState); |
1751 | 1751 | } |
1752 | #ifdef DEVELOPMENT | |
1752 | #ifdef DILIGENT_DEVELOPMENT | |
1753 | 1753 | else if (TransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1754 | 1754 | { |
1755 | 1755 | DvpVerifyBufferState(Buffer, RequiredState, OperationName); |
1768 | 1768 | if (Texture.IsInKnownState() && !Texture.CheckState(RequiredState)) |
1769 | 1769 | CmdCtx.TransitionResource(&Texture, RequiredState); |
1770 | 1770 | } |
1771 | #ifdef DEVELOPMENT | |
1771 | #ifdef DILIGENT_DEVELOPMENT | |
1772 | 1772 | else if (TransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
1773 | 1773 | { |
1774 | 1774 | DvpVerifyTextureState(Texture, RequiredState, OperationName); |
91 | 91 | m_pStaticVarManagers = ALLOCATE(ShaderVarMgrAllocator, "Raw memory for ShaderVariableManagerD3D12", ShaderVariableManagerD3D12, m_NumShaders); |
92 | 92 | } |
93 | 93 | |
94 | #ifdef DEVELOPMENT | |
94 | #ifdef DILIGENT_DEVELOPMENT | |
95 | 95 | { |
96 | 96 | const ShaderResources* pResources[MAX_SHADERS_IN_PIPELINE] = {}; |
97 | 97 | for (Uint32 s = 0; s < m_NumShaders; ++s) |
347 | 347 | |
348 | 348 | auto IsSameRootSignature = m_RootSig.IsSameAs(pPSOD3D12->m_RootSig); |
349 | 349 | |
350 | #ifdef _DEBUG | |
350 | #ifdef DILIGENT_DEBUG | |
351 | 351 | { |
352 | 352 | bool IsCompatibleShaders = true; |
353 | 353 | if (m_NumShaders != pPSOD3D12->m_NumShaders) |
386 | 386 | class CommandContext& CmdCtx, |
387 | 387 | CommitAndTransitionResourcesAttribs& Attrib) const |
388 | 388 | { |
389 | #ifdef DEVELOPMENT | |
389 | #ifdef DILIGENT_DEVELOPMENT | |
390 | 390 | if (Attrib.pShaderResourceBinding == nullptr && ContainsShaderResources()) |
391 | 391 | { |
392 | 392 | LOG_ERROR_MESSAGE("Pipeline state '", m_Desc.Name, "' requires shader resource binding object to ", |
407 | 407 | return nullptr; |
408 | 408 | } |
409 | 409 | |
410 | #ifdef DEVELOPMENT | |
410 | #ifdef DILIGENT_DEVELOPMENT | |
411 | 411 | if (IsIncompatibleWith(pResBindingD3D12Impl->GetPipelineState())) |
412 | 412 | { |
413 | 413 | LOG_ERROR_MESSAGE("Shader resource binding is incompatible with the pipeline state '", m_Desc.Name, "'. Operation will be ignored."); |
182 | 182 | |
183 | 183 | auto& HeapInfo = m_Heaps[Type]; |
184 | 184 | VERIFY(Index < HeapInfo.HeapSize, "Query index ", Index, " is out of range"); |
185 | #ifdef _DEBUG | |
185 | #ifdef DILIGENT_DEBUG | |
186 | 186 | for (const auto& ind : HeapInfo.AvailableQueries) |
187 | 187 | { |
188 | 188 | VERIFY(ind != Index, "Index ", Index, " already present in available queries list"); |
206 | 206 | IdleGPU(); |
207 | 207 | ReleaseStaleResources(true); |
208 | 208 | |
209 | #ifdef DEVELOPMENT | |
209 | #ifdef DILIGENT_DEVELOPMENT | |
210 | 210 | for (auto i = 0; i < _countof(m_CPUDescriptorHeaps); ++i) |
211 | 211 | { |
212 | 212 | DEV_CHECK_ERR(m_CPUDescriptorHeaps[i].DvpGetTotalAllocationCount() == 0, "All CPU descriptor heap allocations must be released"); |
239 | 239 | { |
240 | 240 | std::lock_guard<std::mutex> LockGuard(m_ContextPoolMutex); |
241 | 241 | m_ContextPool.emplace_back(std::move(Ctx)); |
242 | #ifdef DEVELOPMENT | |
242 | #ifdef DILIGENT_DEVELOPMENT | |
243 | 243 | Atomics::AtomicDecrement(m_AllocatedCtxCounter); |
244 | 244 | #endif |
245 | 245 | } |
338 | 338 | m_ContextPool.pop_back(); |
339 | 339 | Ctx->Reset(m_CmdListManager); |
340 | 340 | Ctx->SetID(ID); |
341 | #ifdef DEVELOPMENT | |
341 | #ifdef DILIGENT_DEVELOPMENT | |
342 | 342 | Atomics::AtomicIncrement(m_AllocatedCtxCounter); |
343 | 343 | #endif |
344 | 344 | return Ctx; |
349 | 349 | auto* pRawMem = ALLOCATE(CmdCtxAllocator, "CommandContext instance", CommandContext, 1); |
350 | 350 | auto pCtx = new (pRawMem) CommandContext(m_CmdListManager); |
351 | 351 | pCtx->SetID(ID); |
352 | #ifdef DEVELOPMENT | |
352 | #ifdef DILIGENT_DEVELOPMENT | |
353 | 353 | Atomics::AtomicIncrement(m_AllocatedCtxCounter); |
354 | 354 | #endif |
355 | 355 | return PooledCommandContext(pCtx, CmdCtxAllocator); |
188 | 188 | { |
189 | 189 | auto ShaderInd = GetShaderTypeIndex(ShaderType); |
190 | 190 | auto ShaderVisibility = ShaderTypeInd2ShaderVisibilityMap[ShaderInd]; |
191 | #ifdef _DEBUG | |
191 | #ifdef DILIGENT_DEBUG | |
192 | 192 | switch (ShaderType) |
193 | 193 | { |
194 | 194 | // clang-format off |
221 | 221 | { |
222 | 222 | VERIFY_EXPR(ShaderVisibility >= D3D12_SHADER_VISIBILITY_ALL && ShaderVisibility <= D3D12_SHADER_VISIBILITY_PIXEL); |
223 | 223 | auto ShaderType = ShaderVisibility2ShaderTypeMap[ShaderVisibility]; |
224 | #ifdef _DEBUG | |
224 | #ifdef DILIGENT_DEBUG | |
225 | 225 | switch (ShaderVisibility) |
226 | 226 | { |
227 | 227 | // clang-format off |
253 | 253 | VERIFY_EXPR(RangeType >= D3D12_DESCRIPTOR_RANGE_TYPE_SRV && RangeType <= D3D12_DESCRIPTOR_RANGE_TYPE_SAMPLER); |
254 | 254 | auto HeapType = RangeType2HeapTypeMap[RangeType]; |
255 | 255 | |
256 | #ifdef _DEBUG | |
256 | #ifdef DILIGENT_DEBUG | |
257 | 257 | switch (RangeType) |
258 | 258 | { |
259 | 259 | // clang-format off |
366 | 366 | } |
367 | 367 | |
368 | 368 | |
369 | #ifdef _DEBUG | |
369 | #ifdef DILIGENT_DEBUG | |
370 | 370 | void RootSignature::dbgVerifyRootParameters() const |
371 | 371 | { |
372 | 372 | Uint32 dbgTotalSrvCbvUavSlots = 0; |
472 | 472 | ++m_TotalRootViews[RootView.GetShaderVariableType()]; |
473 | 473 | } |
474 | 474 | |
475 | #ifdef _DEBUG | |
475 | #ifdef DILIGENT_DEBUG | |
476 | 476 | dbgVerifyRootParameters(); |
477 | 477 | #endif |
478 | 478 | |
644 | 644 | auto& RootTableCache = ResourceCache.GetRootTable(RootParam.GetRootIndex()); |
645 | 645 | |
646 | 646 | SHADER_TYPE dbgShaderType = SHADER_TYPE_UNKNOWN; |
647 | #ifdef _DEBUG | |
647 | #ifdef DILIGENT_DEBUG | |
648 | 648 | dbgShaderType = ShaderTypeFromShaderVisibility(D3D12RootParam.ShaderVisibility); |
649 | 649 | #endif |
650 | 650 | VERIFY_EXPR(D3D12RootParam.ParameterType == D3D12_ROOT_PARAMETER_TYPE_DESCRIPTOR_TABLE); |
654 | 654 | |
655 | 655 | auto HeapType = HeapTypeFromRangeType(D3D12RootParam.DescriptorTable.pDescriptorRanges[0].RangeType); |
656 | 656 | |
657 | #ifdef _DEBUG | |
657 | #ifdef DILIGENT_DEBUG | |
658 | 658 | RootTableCache.SetDebugAttribs(TableSize, HeapType, dbgShaderType); |
659 | 659 | #endif |
660 | 660 | |
678 | 678 | } |
679 | 679 | } |
680 | 680 | |
681 | #ifdef _DEBUG | |
681 | #ifdef DILIGENT_DEBUG | |
682 | 682 | for (Uint32 rv = 0; rv < m_RootParams.GetNumRootViews(); ++rv) |
683 | 683 | { |
684 | 684 | auto& RootParam = m_RootParams.GetRootView(rv); |
775 | 775 | } |
776 | 776 | |
777 | 777 | |
778 | #ifdef DEVELOPMENT | |
778 | #ifdef DILIGENT_DEVELOPMENT | |
779 | 779 | void RootSignature::DvpVerifyResourceState(const ShaderResourceCacheD3D12::Resource& Res, |
780 | 780 | D3D12_DESCRIPTOR_RANGE_TYPE RangeType) |
781 | 781 | { |
871 | 871 | VERIFY(Res.pObject == nullptr && Res.CPUDescriptorHandle.ptr == 0, "Bound resource is unexpected"); |
872 | 872 | } |
873 | 873 | } |
874 | #endif // DEVELOPMENT | |
874 | #endif // DILIGENT_DEVELOPMENT | |
875 | 875 | |
876 | 876 | template <class TOperation> |
877 | 877 | __forceinline void RootSignature::RootParamsManager::ProcessRootTables(TOperation Operation) const |
888 | 888 | VERIFY(d3d12Table.NumDescriptorRanges > 0 && RootTable.GetDescriptorTableSize() > 0, "Unexepected empty descriptor table"); |
889 | 889 | bool IsResourceTable = d3d12Table.pDescriptorRanges[0].RangeType != D3D12_DESCRIPTOR_RANGE_TYPE_SAMPLER; |
890 | 890 | D3D12_DESCRIPTOR_HEAP_TYPE dbgHeapType = D3D12_DESCRIPTOR_HEAP_TYPE_NUM_TYPES; |
891 | #ifdef _DEBUG | |
891 | #ifdef DILIGENT_DEBUG | |
892 | 892 | dbgHeapType = IsResourceTable ? D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV : D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER; |
893 | 893 | #endif |
894 | 894 | Operation(RootInd, RootTable, D3D12Param, IsResourceTable, dbgHeapType); |
908 | 908 | for (UINT d = 0; d < range.NumDescriptors; ++d) |
909 | 909 | { |
910 | 910 | SHADER_TYPE dbgShaderType = SHADER_TYPE_UNKNOWN; |
911 | #ifdef _DEBUG | |
911 | #ifdef DILIGENT_DEBUG | |
912 | 912 | dbgShaderType = ShaderTypeFromShaderVisibility(D3D12Param.ShaderVisibility); |
913 | 913 | VERIFY(dbgHeapType == HeapTypeFromRangeType(range.RangeType), "Mistmatch between descriptor heap type and descriptor range type"); |
914 | 914 | #endif |
1015 | 1015 | { |
1016 | 1016 | TransitionResource(Ctx, Res, range.RangeType); |
1017 | 1017 | } |
1018 | #ifdef DEVELOPMENT | |
1018 | #ifdef DILIGENT_DEVELOPMENT | |
1019 | 1019 | else if (ValidateStates) |
1020 | 1020 | { |
1021 | 1021 | DvpVerifyResourceState(Res, range.RangeType); |
1032 | 1032 | { |
1033 | 1033 | pd3d12Device->CopyDescriptorsSimple(1, DynamicCbvSrvUavDescriptors.GetCpuHandle(DynamicCbvSrvUavTblOffset), Res.CPUDescriptorHandle, D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV); |
1034 | 1034 | } |
1035 | #ifdef DEVELOPMENT | |
1035 | #ifdef DILIGENT_DEVELOPMENT | |
1036 | 1036 | else |
1037 | 1037 | { |
1038 | 1038 | LOG_ERROR_MESSAGE("No valid CbvSrvUav descriptor handle found for root parameter ", RootInd, ", descriptor slot ", OffsetFromTableStart); |
1049 | 1049 | { |
1050 | 1050 | pd3d12Device->CopyDescriptorsSimple(1, DynamicSamplerDescriptors.GetCpuHandle(DynamicSamplerTblOffset), Res.CPUDescriptorHandle, D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER); |
1051 | 1051 | } |
1052 | #ifdef DEVELOPMENT | |
1052 | #ifdef DILIGENT_DEVELOPMENT | |
1053 | 1053 | else |
1054 | 1054 | { |
1055 | 1055 | LOG_ERROR_MESSAGE("No valid sampler descriptor handle found for root parameter ", RootInd, ", descriptor slot ", OffsetFromTableStart); |
1107 | 1107 | } // |
1108 | 1108 | ); |
1109 | 1109 | } |
1110 | #ifdef DEVELOPMENT | |
1110 | #ifdef DILIGENT_DEVELOPMENT | |
1111 | 1111 | else if (ValidateStates) |
1112 | 1112 | { |
1113 | 1113 | ProcessCachedTableResources( |
154 | 154 | } |
155 | 155 | |
156 | 156 | |
157 | #ifdef DEVELOPMENT | |
157 | #ifdef DILIGENT_DEVELOPMENT | |
158 | 158 | void ShaderResourceBindingD3D12Impl::dvpVerifyResourceBindings(const PipelineStateD3D12Impl* pPSO) const |
159 | 159 | { |
160 | 160 | auto* pRefPSO = GetPipelineState<const PipelineStateD3D12Impl>(); |
169 | 169 | const auto& ShaderResLayout = pRefPSO->GetShaderResLayout(l); |
170 | 170 | ShaderResLayout.dvpVerifyBindings(m_ShaderResourceCache); |
171 | 171 | } |
172 | # ifdef _DEBUG | |
172 | # ifdef DILIGENT_DEBUG | |
173 | 173 | m_ShaderResourceCache.DbgVerifyBoundDynamicCBsCounter(); |
174 | 174 | # endif |
175 | 175 | } |
202 | 202 | const auto& ShaderResLayout = pPSO12->GetShaderResLayout(s); |
203 | 203 | auto& StaticResLayout = pPSO12->GetStaticShaderResLayout(s); |
204 | 204 | auto& StaticResCache = pPSO12->GetStaticShaderResCache(s); |
205 | #ifdef DEVELOPMENT | |
205 | #ifdef DILIGENT_DEVELOPMENT | |
206 | 206 | if (!StaticResLayout.dvpVerifyBindings(StaticResCache)) |
207 | 207 | { |
208 | 208 | auto* pShader = pPSO12->GetShader<ShaderD3D12Impl>(s); |
216 | 216 | StaticResLayout.CopyStaticResourceDesriptorHandles(StaticResCache, ShaderResLayout, m_ShaderResourceCache); |
217 | 217 | } |
218 | 218 | |
219 | #ifdef _DEBUG | |
219 | #ifdef DILIGENT_DEBUG | |
220 | 220 | m_ShaderResourceCache.DbgVerifyBoundDynamicCBsCounter(); |
221 | 221 | #endif |
222 | 222 |
98 | 98 | } |
99 | 99 | } |
100 | 100 | |
101 | #ifdef _DEBUG | |
101 | #ifdef DILIGENT_DEBUG | |
102 | 102 | void ShaderResourceCacheD3D12::DbgVerifyBoundDynamicCBsCounter() const |
103 | 103 | { |
104 | 104 | Uint32 NumDynamicCBsBound = 0; |
292 | 292 | if (StaticSamplerInd >= 0) |
293 | 293 | { |
294 | 294 | // Static samplers are never copied, and SamplerId == InvalidSamplerId |
295 | #ifdef _DEBUG | |
295 | #ifdef DILIGENT_DEBUG | |
296 | 296 | auto SamplerCount = GetTotalSamplerCount(); |
297 | 297 | for (Uint32 s = 0; s < SamplerCount; ++s) |
298 | 298 | { |
345 | 345 | } // |
346 | 346 | ); |
347 | 347 | |
348 | #ifdef _DEBUG | |
348 | #ifdef DILIGENT_DEBUG | |
349 | 349 | for (SHADER_RESOURCE_VARIABLE_TYPE VarType = SHADER_RESOURCE_VARIABLE_TYPE_STATIC; VarType < SHADER_RESOURCE_VARIABLE_TYPE_NUM_TYPES; VarType = static_cast<SHADER_RESOURCE_VARIABLE_TYPE>(VarType + 1)) |
350 | 350 | { |
351 | 351 | VERIFY(CurrCbvSrvUav[VarType] == CbvSrvUavCount[VarType], "Not all Srv/Cbv/Uavs are initialized, which result in a crash when dtor is called"); |
360 | 360 | // http://diligentgraphics.com/diligent-engine/architecture/d3d12/shader-resource-cache#Initializing-Shader-Objects |
361 | 361 | VERIFY_EXPR(pRootSig == nullptr); |
362 | 362 | pResourceCache->Initialize(GetRawAllocator(), _countof(StaticResCacheTblSizes), StaticResCacheTblSizes); |
363 | #ifdef _DEBUG | |
363 | #ifdef DILIGENT_DEBUG | |
364 | 364 | pResourceCache->GetRootTable(D3D12_DESCRIPTOR_RANGE_TYPE_SRV).SetDebugAttribs(StaticResCacheTblSizes[D3D12_DESCRIPTOR_RANGE_TYPE_SRV], D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV, m_pResources->GetShaderType()); |
365 | 365 | pResourceCache->GetRootTable(D3D12_DESCRIPTOR_RANGE_TYPE_UAV).SetDebugAttribs(StaticResCacheTblSizes[D3D12_DESCRIPTOR_RANGE_TYPE_UAV], D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV, m_pResources->GetShaderType()); |
366 | 366 | pResourceCache->GetRootTable(D3D12_DESCRIPTOR_RANGE_TYPE_CBV).SetDebugAttribs(StaticResCacheTblSizes[D3D12_DESCRIPTOR_RANGE_TYPE_CBV], D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV, m_pResources->GetShaderType()); |
381 | 381 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
382 | 382 | // resource mapping can be of wrong type |
383 | 383 | RefCntAutoPtr<BufferD3D12Impl> pBuffD3D12(pBuffer, IID_BufferD3D12); |
384 | #ifdef DEVELOPMENT | |
384 | #ifdef DILIGENT_DEVELOPMENT | |
385 | 385 | VerifyConstantBufferBinding(Attribs, GetVariableType(), ArrayInd, pBuffer, pBuffD3D12.RawPtr(), DstRes.pObject.RawPtr(), ParentResLayout.GetShaderName()); |
386 | 386 | #endif |
387 | 387 | if (pBuffD3D12) |
450 | 450 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
451 | 451 | // resource mapping can be of wrong type |
452 | 452 | RefCntAutoPtr<TResourceViewType> pViewD3D12(pView, ResourceViewTraits<TResourceViewType>::IID); |
453 | #ifdef DEVELOPMENT | |
453 | #ifdef DILIGENT_DEVELOPMENT | |
454 | 454 | VerifyResourceViewBinding(Attribs, GetVariableType(), ArrayIndex, pView, pViewD3D12.RawPtr(), {dbgExpectedViewType}, DstRes.pObject.RawPtr(), ParentResLayout.GetShaderName()); |
455 | 455 | #endif |
456 | 456 | if (pViewD3D12) |
562 | 562 | ResourceCache.GetShaderVisibleTableCPUDescriptorHandle<D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER>(RootIndex, OffsetFromTableStart + ArrayIndex) : |
563 | 563 | ResourceCache.GetShaderVisibleTableCPUDescriptorHandle<D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV>(RootIndex, OffsetFromTableStart + ArrayIndex); |
564 | 564 | |
565 | #ifdef _DEBUG | |
565 | #ifdef DILIGENT_DEBUG | |
566 | 566 | { |
567 | 567 | if (ResourceCache.DbgGetContentType() == ShaderResourceCacheD3D12::DbgCacheContentType::StaticShaderResources) |
568 | 568 | { |
612 | 612 | auto ShdrVisibleSamplerHeapCPUDescriptorHandle = ResourceCache.GetShaderVisibleTableCPUDescriptorHandle<D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER>(Sam.RootIndex, Sam.OffsetFromTableStart + SamplerArrInd); |
613 | 613 | |
614 | 614 | auto& DstSam = ResourceCache.GetRootTable(Sam.RootIndex).GetResource(Sam.OffsetFromTableStart + SamplerArrInd, D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER, ParentResLayout.m_pResources->GetShaderType()); |
615 | #ifdef _DEBUG | |
615 | #ifdef DILIGENT_DEBUG | |
616 | 616 | { |
617 | 617 | if (ResourceCache.DbgGetContentType() == ShaderResourceCacheD3D12::DbgCacheContentType::StaticShaderResources) |
618 | 618 | { |
827 | 827 | } |
828 | 828 | |
829 | 829 | |
830 | #ifdef DEVELOPMENT | |
830 | #ifdef DILIGENT_DEVELOPMENT | |
831 | 831 | bool ShaderResourceLayoutD3D12::dvpVerifyBindings(const ShaderResourceCacheD3D12& ResourceCache) const |
832 | 832 | { |
833 | 833 | bool BindingsOK = true; |
873 | 873 | } |
874 | 874 | } |
875 | 875 | |
876 | # ifdef _DEBUG | |
876 | # ifdef DILIGENT_DEBUG | |
877 | 877 | { |
878 | 878 | const auto ShdrVisibleHeapCPUDescriptorHandle = ResourceCache.GetShaderVisibleTableCPUDescriptorHandle<D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV>(res.RootIndex, res.OffsetFromTableStart + ArrInd); |
879 | 879 | if (ResourceCache.DbgGetContentType() == ShaderResourceCacheD3D12::DbgCacheContentType::StaticShaderResources) |
922 | 922 | BindingsOK = false; |
923 | 923 | } |
924 | 924 | |
925 | # ifdef _DEBUG | |
925 | # ifdef DILIGENT_DEBUG | |
926 | 926 | { |
927 | 927 | const auto ShdrVisibleHeapCPUDescriptorHandle = ResourceCache.GetShaderVisibleTableCPUDescriptorHandle<D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER>(SamInfo.RootIndex, SamInfo.OffsetFromTableStart + ArrInd); |
928 | 928 | if (ResourceCache.DbgGetContentType() == ShaderResourceCacheD3D12::DbgCacheContentType::StaticShaderResources) |
62 | 62 | // clang-format off |
63 | 63 | m_Owner {Owner}, |
64 | 64 | m_ResourceCache {ResourceCache} |
65 | #ifdef _DEBUG | |
65 | #ifdef DILIGENT_DEBUG | |
66 | 66 | , m_DbgAllocator {Allocator} |
67 | 67 | #endif |
68 | 68 | // clang-format on |
126 | 126 | Name.erase(OpenBracketPos, Name.length() - OpenBracketPos); |
127 | 127 | // Name == "g_tex2DDiffuse" |
128 | 128 | VERIFY_EXPR(Name.length() == OpenBracketPos); |
129 | #ifdef _DEBUG | |
129 | #ifdef DILIGENT_DEBUG | |
130 | 130 | for (const auto& ExistingRes : Resources) |
131 | 131 | { |
132 | 132 | VERIFY(Name.compare(ExistingRes.Name) != 0, "Resource with the same name has already been enumerated. All array elements are expected to be enumerated one after another"); |
127 | 127 | SamplerOrTexSRVId {_SamplerId} |
128 | 128 | // clang-format on |
129 | 129 | { |
130 | #ifdef _DEBUG | |
130 | #ifdef DILIGENT_DEBUG | |
131 | 131 | // clang-format off |
132 | 132 | VERIFY(_BindPoint <= MaxBindPoint || _BindPoint == InvalidBindPoint, "Bind Point is out of allowed range"); |
133 | 133 | VERIFY(_BindCount <= MaxBindCount, "Bind Count is out of allowed range"); |
375 | 375 | const SHADER_RESOURCE_VARIABLE_TYPE* AllowedVarTypes, |
376 | 376 | Uint32 NumAllowedTypes, |
377 | 377 | bool CountStaticSamplers) const noexcept; |
378 | #ifdef DEVELOPMENT | |
378 | #ifdef DILIGENT_DEVELOPMENT | |
379 | 379 | static void DvpVerifyResourceLayout(const PipelineResourceLayoutDesc& ResourceLayout, |
380 | 380 | const ShaderResources* const pShaderResources[], |
381 | 381 | Uint32 NumShaders); |
539 | 539 | { |
540 | 540 | m_SamplerSuffix = m_ResourceNames.CopyString(CombinedSamplerSuffix); |
541 | 541 | |
542 | #ifdef DEVELOPMENT | |
542 | #ifdef DILIGENT_DEVELOPMENT | |
543 | 543 | for (Uint32 n = 0; n < GetNumSamplers(); ++n) |
544 | 544 | { |
545 | 545 | const auto& Sampler = GetSampler(n); |
91 | 91 | ID3DBlob** ppCompilerOutput) |
92 | 92 | { |
93 | 93 | DWORD dwShaderFlags = D3DCOMPILE_ENABLE_STRICTNESS; |
94 | #if defined(DEBUG) || defined(_DEBUG) | |
94 | #if defined(DILIGENT_DEBUG) | |
95 | 95 | // Set the D3D10_SHADER_DEBUG flag to embed debug information in the shaders. |
96 | 96 | // Setting this flag improves the shader debugging experience, but still allows |
97 | 97 | // the shaders to be optimized and to run exactly the way they will run in |
212 | 212 | return Counters; |
213 | 213 | } |
214 | 214 | |
215 | #ifdef DEVELOPMENT | |
215 | #ifdef DILIGENT_DEVELOPMENT | |
216 | 216 | void ShaderResources::DvpVerifyResourceLayout(const PipelineResourceLayoutDesc& ResourceLayout, |
217 | 217 | const ShaderResources* const pShaderResources[], |
218 | 218 | Uint32 NumShaders) |
104 | 104 | Uint32 Size) : |
105 | 105 | m_AllocationsMgr{Size, Allocator} |
106 | 106 | { |
107 | #ifdef DEVELOPMENT | |
107 | #ifdef DILIGENT_DEVELOPMENT | |
108 | 108 | m_MasterBlockCounter = 0; |
109 | 109 | #endif |
110 | 110 | } |
154 | 154 | if (Mgr != nullptr) |
155 | 155 | { |
156 | 156 | std::lock_guard<std::mutex> Lock{Mgr->m_AllocationsMgrMtx}; |
157 | #ifdef DEVELOPMENT | |
157 | #ifdef DILIGENT_DEVELOPMENT | |
158 | 158 | --Mgr->m_MasterBlockCounter; |
159 | 159 | #endif |
160 | 160 | Mgr->m_AllocationsMgr.Free(std::move(Block)); |
173 | 173 | OffsetType GetUsedSize() const { return m_AllocationsMgr.GetUsedSize();} |
174 | 174 | // clang-format on |
175 | 175 | |
176 | #ifdef DEVELOPMENT | |
176 | #ifdef DILIGENT_DEVELOPMENT | |
177 | 177 | int32_t GetMasterBlockCounter() const |
178 | 178 | { |
179 | 179 | return m_MasterBlockCounter; |
185 | 185 | { |
186 | 186 | std::lock_guard<std::mutex> Lock{m_AllocationsMgrMtx}; |
187 | 187 | auto NewBlock = m_AllocationsMgr.Allocate(SizeInBytes, Alignment); |
188 | #ifdef DEVELOPMENT | |
188 | #ifdef DILIGENT_DEVELOPMENT | |
189 | 189 | if (NewBlock.IsValid()) |
190 | 190 | { |
191 | 191 | ++m_MasterBlockCounter; |
198 | 198 | std::mutex m_AllocationsMgrMtx; |
199 | 199 | VariableSizeAllocationsManager m_AllocationsMgr; |
200 | 200 | |
201 | #ifdef DEVELOPMENT | |
201 | #ifdef DILIGENT_DEVELOPMENT | |
202 | 202 | std::atomic_int32_t m_MasterBlockCounter; |
203 | 203 | #endif |
204 | 204 | }; |
282 | 282 | // while Layout is alive |
283 | 283 | void BindResources(SHADER_TYPE ShaderStage, IResourceMapping* pResourceMapping, Uint32 Flags, const GLProgramResourceCache& dbgResourceCache); |
284 | 284 | |
285 | #ifdef DEVELOPMENT | |
285 | #ifdef DILIGENT_DEVELOPMENT | |
286 | 286 | bool dvpVerifyBindings(const GLProgramResourceCache& ResourceCache) const; |
287 | 287 | #endif |
288 | 288 |
267 | 267 | |
268 | 268 | Uint8* m_pResourceData = nullptr; |
269 | 269 | |
270 | #ifdef _DEBUG | |
270 | #ifdef DILIGENT_DEBUG | |
271 | 271 | IMemoryAllocator* m_pdbgMemoryAllocator = nullptr; |
272 | 272 | #endif |
273 | 273 | }; |
141 | 141 | LogError<true>(/*IsFatal=*/false, __FUNCTION__, __FILE__, __LINE__, __VA_ARGS__, "\nGL Error Code: ", err); \ |
142 | 142 | } while (false) |
143 | 143 | |
144 | #ifdef DEVELOPMENT | |
144 | #ifdef DILIGENT_DEVELOPMENT | |
145 | 145 | # define DEV_CHECK_GL_ERROR CHECK_GL_ERROR |
146 | 146 | #else |
147 | 147 | # define DEV_CHECK_GL_ERROR(...) \ |
341 | 341 | void BufferGLImpl::BufferMemoryBarrier(Uint32 RequiredBarriers, GLContextState& GLContextState) |
342 | 342 | { |
343 | 343 | #if GL_ARB_shader_image_load_store |
344 | # ifdef _DEBUG | |
344 | # ifdef DILIGENT_DEBUG | |
345 | 345 | { |
346 | 346 | // clang-format off |
347 | 347 | constexpr Uint32 BufferBarriers = |
415 | 415 | |
416 | 416 | auto* pShaderResBindingGL = ValidatedCast<ShaderResourceBindingGLImpl>(pResBinding); |
417 | 417 | const auto& ResourceCache = pShaderResBindingGL->GetResourceCache(m_pPipelineState); |
418 | #ifdef DEVELOPMENT | |
418 | #ifdef DILIGENT_DEVELOPMENT | |
419 | 419 | m_pPipelineState->GetResourceLayout().dvpVerifyBindings(ResourceCache); |
420 | 420 | #endif |
421 | 421 | |
516 | 516 | m_BoundWritableTextures.push_back(pTextureGL); |
517 | 517 | } |
518 | 518 | |
519 | # ifdef _DEBUG | |
519 | # ifdef DILIGENT_DEBUG | |
520 | 520 | // Check that the texure being bound has immutable storage |
521 | 521 | { |
522 | 522 | m_ContextState.BindTexture(-1, pTexViewGL->GetBindTarget(), pTexViewGL->GetHandle()); |
654 | 654 | |
655 | 655 | void DeviceContextGLImpl::PrepareForDraw(DRAW_FLAGS Flags, bool IsIndexed, GLenum& GlTopology) |
656 | 656 | { |
657 | #ifdef DEVELOPMENT | |
657 | #ifdef DILIGENT_DEVELOPMENT | |
658 | 658 | if ((Flags & DRAW_FLAG_VERIFY_RENDER_TARGETS) != 0) |
659 | 659 | DvpVerifyRenderTargets(); |
660 | 660 | #endif |
185 | 185 | if (DSVDesc.Format == TEX_FORMAT_D32_FLOAT || |
186 | 186 | DSVDesc.Format == TEX_FORMAT_D16_UNORM) |
187 | 187 | { |
188 | #ifdef _DEBUG | |
188 | #ifdef DILIGENT_DEBUG | |
189 | 189 | { |
190 | 190 | const auto GLTexFmt = pDepthTexGL->GetGLTexFormat(); |
191 | 191 | VERIFY(GLTexFmt == GL_DEPTH_COMPONENT32F || GLTexFmt == GL_DEPTH_COMPONENT16, |
198 | 198 | else if (DSVDesc.Format == TEX_FORMAT_D32_FLOAT_S8X24_UINT || |
199 | 199 | DSVDesc.Format == TEX_FORMAT_D24_UNORM_S8_UINT) |
200 | 200 | { |
201 | #ifdef _DEBUG | |
201 | #ifdef DILIGENT_DEBUG | |
202 | 202 | { |
203 | 203 | const auto GLTexFmt = pDepthTexGL->GetGLTexFormat(); |
204 | 204 | VERIFY(GLTexFmt == GL_DEPTH24_STENCIL8 || GLTexFmt == GL_DEPTH32F_STENCIL8, |
206 | 206 | 0, 0 // |
207 | 207 | }; |
208 | 208 | |
209 | #ifdef _DEBUG | |
209 | #ifdef DILIGENT_DEBUG | |
210 | 210 | attribs[5] |= WGL_CONTEXT_DEBUG_BIT_ARB; |
211 | 211 | #endif |
212 | 212 |
118 | 118 | Uint32 ImageBindingSlots = 0; |
119 | 119 | Uint32 SSBOBindingSlots = 0; |
120 | 120 | |
121 | #ifdef _DEBUG | |
121 | #ifdef DILIGENT_DEBUG | |
122 | 122 | const Uint32 DbgAllowedTypeBits = GetAllowedTypeBits(AllowedVarTypes, NumAllowedTypes); |
123 | 123 | #endif |
124 | 124 | for (Uint32 prog = 0; prog < NumPrograms; ++prog) |
255 | 255 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
256 | 256 | // resource mapping can be of wrong type |
257 | 257 | RefCntAutoPtr<BufferGLImpl> pBuffGLImpl(pBuffer, IID_BufferGL); |
258 | #ifdef DEVELOPMENT | |
258 | #ifdef DILIGENT_DEVELOPMENT | |
259 | 259 | { |
260 | 260 | const auto& CachedUB = ResourceCache.GetConstUB(m_Attribs.Binding + ArrayIndex); |
261 | 261 | VerifyConstantBufferBinding(m_Attribs, GetType(), ArrayIndex, pBuffer, pBuffGLImpl.RawPtr(), CachedUB.pBuffer.RawPtr()); |
282 | 282 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
283 | 283 | // resource mapping can be of wrong type |
284 | 284 | RefCntAutoPtr<TextureViewGLImpl> pViewGL(pView, IID_TextureViewGL); |
285 | #ifdef DEVELOPMENT | |
285 | #ifdef DILIGENT_DEVELOPMENT | |
286 | 286 | { |
287 | 287 | auto& CachedTexSampler = ResourceCache.GetConstSampler(m_Attribs.Binding + ArrayIndex); |
288 | 288 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewGL.RawPtr(), {TEXTURE_VIEW_SHADER_RESOURCE}, CachedTexSampler.pView.RawPtr()); |
299 | 299 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
300 | 300 | // resource mapping can be of wrong type |
301 | 301 | RefCntAutoPtr<BufferViewGLImpl> pViewGL(pView, IID_BufferViewGL); |
302 | #ifdef DEVELOPMENT | |
302 | #ifdef DILIGENT_DEVELOPMENT | |
303 | 303 | { |
304 | 304 | auto& CachedBuffSampler = ResourceCache.GetConstSampler(m_Attribs.Binding + ArrayIndex); |
305 | 305 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewGL.RawPtr(), {BUFFER_VIEW_SHADER_RESOURCE}, CachedBuffSampler.pView.RawPtr()); |
326 | 326 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
327 | 327 | // resource mapping can be of wrong type |
328 | 328 | RefCntAutoPtr<TextureViewGLImpl> pViewGL(pView, IID_TextureViewGL); |
329 | #ifdef DEVELOPMENT | |
329 | #ifdef DILIGENT_DEVELOPMENT | |
330 | 330 | { |
331 | 331 | auto& CachedUAV = ResourceCache.GetConstImage(m_Attribs.Binding + ArrayIndex); |
332 | 332 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewGL.RawPtr(), {TEXTURE_VIEW_UNORDERED_ACCESS}, CachedUAV.pView.RawPtr()); |
339 | 339 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
340 | 340 | // resource mapping can be of wrong type |
341 | 341 | RefCntAutoPtr<BufferViewGLImpl> pViewGL(pView, IID_BufferViewGL); |
342 | #ifdef DEVELOPMENT | |
342 | #ifdef DILIGENT_DEVELOPMENT | |
343 | 343 | { |
344 | 344 | auto& CachedUAV = ResourceCache.GetConstImage(m_Attribs.Binding + ArrayIndex); |
345 | 345 | VerifyResourceViewBinding(m_Attribs, GetType(), ArrayIndex, pView, pViewGL.RawPtr(), {BUFFER_VIEW_UNORDERED_ACCESS}, CachedUAV.pView.RawPtr()); |
366 | 366 | // We cannot use ValidatedCast<> here as the resource retrieved from the |
367 | 367 | // resource mapping can be of wrong type |
368 | 368 | RefCntAutoPtr<BufferViewGLImpl> pViewGL(pView, IID_BufferViewGL); |
369 | #ifdef DEVELOPMENT | |
369 | #ifdef DILIGENT_DEVELOPMENT | |
370 | 370 | { |
371 | 371 | auto& CachedSSBO = ResourceCache.GetConstSSBO(m_Attribs.Binding + ArrayIndex); |
372 | 372 | // HLSL structured buffers are mapped to SSBOs in GLSL |
520 | 520 | VariableEndOffset.NumStorageBlocks - VariableStartOffset.NumStorageBlocks; |
521 | 521 | // clang-format on |
522 | 522 | |
523 | #ifdef _DEBUG | |
523 | #ifdef DILIGENT_DEBUG | |
524 | 524 | { |
525 | 525 | Uint32 DbgNumVars = 0; |
526 | 526 | auto CountVar = [&](const GLVariableBase& Var) { |
720 | 720 | ); |
721 | 721 | } |
722 | 722 | |
723 | #ifdef DEVELOPMENT | |
723 | #ifdef DILIGENT_DEVELOPMENT | |
724 | 724 | bool GLPipelineResourceLayout::dvpVerifyBindings(const GLProgramResourceCache& ResourceCache) const |
725 | 725 | { |
726 | 726 | # define LOG_MISSING_BINDING(VarType, BindInfo, BindPt) LOG_ERROR_MESSAGE("No resource is bound to ", VarType, " variable '", BindInfo.m_Attribs.GetPrintName(BindPt - BindInfo.m_Attribs.Binding), "'") |
61 | 61 | |
62 | 62 | VERIFY_EXPR(BufferSize == GetRequriedMemorySize(UBCount, SamplerCount, ImageCount, SSBOCount)); |
63 | 63 | |
64 | #ifdef _DEBUG | |
64 | #ifdef DILIGENT_DEBUG | |
65 | 65 | m_pdbgMemoryAllocator = &MemAllocator; |
66 | 66 | #endif |
67 | 67 | if (BufferSize > 0) |
510 | 510 | } |
511 | 511 | else |
512 | 512 | { |
513 | #ifdef _DEBUG | |
513 | #ifdef DILIGENT_DEBUG | |
514 | 514 | for (const auto& ub : UniformBlocks) |
515 | 515 | VERIFY(strcmp(ub.Name, Name.data()) != 0, "Uniform block with the name '", ub.Name, "' has already been enumerated"); |
516 | 516 | #endif |
566 | 566 | } |
567 | 567 | else |
568 | 568 | { |
569 | # ifdef _DEBUG | |
569 | # ifdef DILIGENT_DEBUG | |
570 | 570 | for (const auto& sb : StorageBlocks) |
571 | 571 | VERIFY(strcmp(sb.Name, Name.data()) != 0, "Storage block with the name \"", sb.Name, "\" has already been enumerated"); |
572 | 572 | # endif |
638 | 638 | FLAG_FORMAT(TEX_FORMAT_BC7_UNORM_SRGB, bBPTC ); |
639 | 639 | // clang-format on |
640 | 640 | |
641 | #ifdef _DEBUG | |
641 | #ifdef DILIGENT_DEBUG | |
642 | 642 | bool bGL43OrAbove = DeviceCaps.DevType == RENDER_DEVICE_TYPE_GL && |
643 | 643 | (DeviceCaps.MajorVersion >= 5 || (DeviceCaps.MajorVersion == 4 && DeviceCaps.MinorVersion >= 3)); |
644 | 644 |
84 | 84 | |
85 | 85 | const GLProgramResourceCache& ShaderResourceBindingGLImpl::GetResourceCache(PipelineStateGLImpl* pdbgPSO) |
86 | 86 | { |
87 | #ifdef _DEBUG | |
87 | #ifdef DILIGENT_DEBUG | |
88 | 88 | if (pdbgPSO->IsIncompatibleWith(GetPipelineState())) |
89 | 89 | { |
90 | 90 | LOG_ERROR("Shader resource binding is incompatible with the currently bound pipeline state."); |
113 | 113 | const auto* pPSOGL = ValidatedCast<const PipelineStateGLImpl>(pPipelineState); |
114 | 114 | const auto& StaticResLayout = pPSOGL->GetStaticResourceLayout(); |
115 | 115 | |
116 | #ifdef DEVELOPMENT | |
116 | #ifdef DILIGENT_DEVELOPMENT | |
117 | 117 | if (!StaticResLayout.dvpVerifyBindings(pPSOGL->GetStaticResourceCache())) |
118 | 118 | { |
119 | 119 | LOG_ERROR_MESSAGE("Static resources in SRB of PSO '", pPSOGL->GetDesc().Name, |
185 | 185 | ((DstBox.MaxX % 4) == 0 || DstBox.MaxX == MipWidth) && |
186 | 186 | ((DstBox.MaxY % 4) == 0 || DstBox.MaxY == MipHeight), |
187 | 187 | "Compressed texture update region must be 4 pixel-aligned"); |
188 | #ifdef _DEBUG | |
188 | #ifdef DILIGENT_DEBUG | |
189 | 189 | { |
190 | 190 | const auto& FmtAttribs = GetTextureFormatAttribs(m_Desc.Format); |
191 | 191 | auto BlockBytesInRow = ((DstBox.MaxX - DstBox.MinX + 3) / 4) * Uint32{FmtAttribs.ComponentSize}; |
207 | 207 | ((DstBox.MaxX % 4) == 0 || DstBox.MaxX == MipWidth) && |
208 | 208 | ((DstBox.MaxY % 4) == 0 || DstBox.MaxY == MipHeight), |
209 | 209 | "Compressed texture update region must be 4 pixel-aligned"); |
210 | #ifdef _DEBUG | |
210 | #ifdef DILIGENT_DEBUG | |
211 | 211 | { |
212 | 212 | const auto& FmtAttribs = GetTextureFormatAttribs(m_Desc.Format); |
213 | 213 | auto BlockBytesInRow = ((DstBox.MaxX - DstBox.MinX + 3) / 4) * Uint32{FmtAttribs.ComponentSize}; |
516 | 516 | } |
517 | 517 | |
518 | 518 | auto* pRenderDeviceGL = ValidatedCast<RenderDeviceGLImpl>(GetDevice()); |
519 | #ifdef _DEBUG | |
519 | #ifdef DILIGENT_DEBUG | |
520 | 520 | { |
521 | 521 | auto& TexViewObjAllocator = pRenderDeviceGL->GetTexViewObjAllocator(); |
522 | 522 | VERIFY(&TexViewObjAllocator == &m_dbgTexViewObjAllocator, "Texture view allocator does not match allocator provided during texture initialization"); |
581 | 581 | void TextureBaseGL::TextureMemoryBarrier(Uint32 RequiredBarriers, GLContextState& GLContextState) |
582 | 582 | { |
583 | 583 | #if GL_ARB_shader_image_load_store |
584 | # ifdef _DEBUG | |
584 | # ifdef DILIGENT_DEBUG | |
585 | 585 | { |
586 | 586 | // clang-format off |
587 | 587 | constexpr Uint32 TextureBarriers = |
602 | 602 | |
603 | 603 | void TextureBaseGL::SetDefaultGLParameters() |
604 | 604 | { |
605 | #ifdef _DEBUG | |
606 | GLint BoundTex; | |
607 | GLint TextureBinding = 0; | |
608 | switch (m_BindTarget) | |
609 | { | |
610 | // clang-format off | |
611 | case GL_TEXTURE_1D: TextureBinding = GL_TEXTURE_BINDING_1D; break; | |
612 | case GL_TEXTURE_1D_ARRAY: TextureBinding = GL_TEXTURE_BINDING_1D_ARRAY; break; | |
613 | case GL_TEXTURE_2D: TextureBinding = GL_TEXTURE_BINDING_2D; break; | |
614 | case GL_TEXTURE_2D_ARRAY: TextureBinding = GL_TEXTURE_BINDING_2D_ARRAY; break; | |
615 | case GL_TEXTURE_2D_MULTISAMPLE: TextureBinding = GL_TEXTURE_BINDING_2D_MULTISAMPLE; break; | |
616 | case GL_TEXTURE_2D_MULTISAMPLE_ARRAY: TextureBinding = GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY; break; | |
617 | case GL_TEXTURE_3D: TextureBinding = GL_TEXTURE_BINDING_3D; break; | |
618 | case GL_TEXTURE_CUBE_MAP: TextureBinding = GL_TEXTURE_BINDING_CUBE_MAP; break; | |
619 | case GL_TEXTURE_CUBE_MAP_ARRAY: TextureBinding = GL_TEXTURE_BINDING_CUBE_MAP_ARRAY; break; | |
620 | default: UNEXPECTED("Unknown bind target"); | |
621 | // clang-format on | |
622 | } | |
623 | glGetIntegerv(TextureBinding, &BoundTex); | |
624 | CHECK_GL_ERROR("Failed to set GL_TEXTURE_MIN_FILTER texture parameter"); | |
625 | VERIFY(static_cast<GLuint>(BoundTex) == m_GlTexture, "Current texture is not bound to GL context"); | |
605 | #ifdef DILIGENT_DEBUG | |
606 | { | |
607 | GLint BoundTex; | |
608 | GLint TextureBinding = 0; | |
609 | switch (m_BindTarget) | |
610 | { | |
611 | // clang-format off | |
612 | case GL_TEXTURE_1D: TextureBinding = GL_TEXTURE_BINDING_1D; break; | |
613 | case GL_TEXTURE_1D_ARRAY: TextureBinding = GL_TEXTURE_BINDING_1D_ARRAY; break; | |
614 | case GL_TEXTURE_2D: TextureBinding = GL_TEXTURE_BINDING_2D; break; | |
615 | case GL_TEXTURE_2D_ARRAY: TextureBinding = GL_TEXTURE_BINDING_2D_ARRAY; break; | |
616 | case GL_TEXTURE_2D_MULTISAMPLE: TextureBinding = GL_TEXTURE_BINDING_2D_MULTISAMPLE; break; | |
617 | case GL_TEXTURE_2D_MULTISAMPLE_ARRAY: TextureBinding = GL_TEXTURE_BINDING_2D_MULTISAMPLE_ARRAY; break; | |
618 | case GL_TEXTURE_3D: TextureBinding = GL_TEXTURE_BINDING_3D; break; | |
619 | case GL_TEXTURE_CUBE_MAP: TextureBinding = GL_TEXTURE_BINDING_CUBE_MAP; break; | |
620 | case GL_TEXTURE_CUBE_MAP_ARRAY: TextureBinding = GL_TEXTURE_BINDING_CUBE_MAP_ARRAY; break; | |
621 | default: UNEXPECTED("Unknown bind target"); | |
622 | // clang-format on | |
623 | } | |
624 | glGetIntegerv(TextureBinding, &BoundTex); | |
625 | CHECK_GL_ERROR("Failed to set GL_TEXTURE_MIN_FILTER texture parameter"); | |
626 | VERIFY(static_cast<GLuint>(BoundTex) == m_GlTexture, "Current texture is not bound to GL context"); | |
627 | } | |
626 | 628 | #endif |
627 | 629 | |
628 | 630 | if (m_BindTarget != GL_TEXTURE_2D_MULTISAMPLE && |
175 | 175 | ((DstBox.MaxY % 4) == 0 || DstBox.MaxY == MipHeight), |
176 | 176 | "Compressed texture update region must be 4 pixel-aligned"); |
177 | 177 | // clang-format on |
178 | #ifdef _DEBUG | |
178 | #ifdef DILIGENT_DEBUG | |
179 | 179 | { |
180 | 180 | const auto& FmtAttribs = GetTextureFormatAttribs(m_Desc.Format); |
181 | 181 | auto BlockBytesInRow = ((DstBox.MaxX - DstBox.MinX + 3) / 4) * Uint32{FmtAttribs.ComponentSize}; |
192 | 192 | ((DstBox.MaxY % 4) == 0 || DstBox.MaxY == MipHeight), |
193 | 193 | "Compressed texture update region must be 4 pixel-aligned" ); |
194 | 194 | // clang-format on |
195 | #ifdef _DEBUG | |
195 | #ifdef DILIGENT_DEBUG | |
196 | 196 | { |
197 | 197 | const auto& FmtAttribs = GetTextureFormatAttribs(m_Desc.Format); |
198 | 198 | auto BlockBytesInRow = ((DstBox.MaxX - DstBox.MinX + 3) / 4) * Uint32{FmtAttribs.ComponentSize}; |
67 | 67 | |
68 | 68 | virtual void DILIGENT_CALL_TYPE QueryInterface(const INTERFACE_ID& IID, IObject** ppInterface) override; |
69 | 69 | |
70 | #ifdef DEVELOPMENT | |
70 | #ifdef DILIGENT_DEVELOPMENT | |
71 | 71 | void DvpVerifyDynamicAllocation(DeviceContextVkImpl* pCtx) const; |
72 | 72 | #endif |
73 | 73 | |
81 | 81 | { |
82 | 82 | VERIFY(m_Desc.Usage == USAGE_DYNAMIC, "Dynamic buffer is expected"); |
83 | 83 | VERIFY_EXPR(!m_DynamicAllocations.empty()); |
84 | #ifdef DEVELOPMENT | |
84 | #ifdef DILIGENT_DEVELOPMENT | |
85 | 85 | DvpVerifyDynamicAllocation(pCtx); |
86 | 86 | #endif |
87 | 87 | auto& DynAlloc = m_DynamicAllocations[CtxId]; |
61 | 61 | |
62 | 62 | void DestroyPools(); |
63 | 63 | |
64 | #ifdef DEVELOPMENT | |
64 | #ifdef DILIGENT_DEVELOPMENT | |
65 | 65 | int32_t GetAllocatedPoolCount() const |
66 | 66 | { |
67 | 67 | return m_AllocatedPoolCounter; |
80 | 80 | std::mutex m_Mutex; |
81 | 81 | std::deque<VulkanUtilities::CommandPoolWrapper, STDAllocatorRawMem<VulkanUtilities::CommandPoolWrapper>> m_CmdPools; |
82 | 82 | |
83 | #ifdef DEVELOPMENT | |
83 | #ifdef DILIGENT_DEVELOPMENT | |
84 | 84 | std::atomic_int32_t m_AllocatedPoolCounter; |
85 | 85 | #endif |
86 | 86 | }; |
143 | 143 | m_MaxSets {MaxSets }, |
144 | 144 | m_AllowFreeing{AllowFreeing } |
145 | 145 | { |
146 | #ifdef DEVELOPMENT | |
146 | #ifdef DILIGENT_DEVELOPMENT | |
147 | 147 | m_AllocatedPoolCounter = 0; |
148 | 148 | #endif |
149 | 149 | } |
161 | 161 | |
162 | 162 | RenderDeviceVkImpl& GetDeviceVkImpl() { return m_DeviceVkImpl; } |
163 | 163 | |
164 | #ifdef DEVELOPMENT | |
164 | #ifdef DILIGENT_DEVELOPMENT | |
165 | 165 | int32_t GetAllocatedPoolCounter() const |
166 | 166 | { |
167 | 167 | return m_AllocatedPoolCounter; |
184 | 184 | private: |
185 | 185 | void FreePool(VulkanUtilities::DescriptorPoolWrapper&& Pool); |
186 | 186 | |
187 | #ifdef DEVELOPMENT | |
187 | #ifdef DILIGENT_DEVELOPMENT | |
188 | 188 | std::atomic_int32_t m_AllocatedPoolCounter; |
189 | 189 | #endif |
190 | 190 | }; |
212 | 212 | } |
213 | 213 | // clang-format on |
214 | 214 | { |
215 | #ifdef DEVELOPMENT | |
215 | #ifdef DILIGENT_DEVELOPMENT | |
216 | 216 | m_AllocatedSetCounter = 0; |
217 | 217 | #endif |
218 | 218 | } |
221 | 221 | |
222 | 222 | DescriptorSetAllocation Allocate(Uint64 CommandQueueMask, VkDescriptorSetLayout SetLayout, const char* DebugName = ""); |
223 | 223 | |
224 | #ifdef DEVELOPMENT | |
224 | #ifdef DILIGENT_DEVELOPMENT | |
225 | 225 | int32_t GetAllocatedDescriptorSetCounter() const |
226 | 226 | { |
227 | 227 | return m_AllocatedSetCounter; |
231 | 231 | private: |
232 | 232 | void FreeDescriptorSet(VkDescriptorSet Set, VkDescriptorPool Pool, Uint64 QueueMask); |
233 | 233 | |
234 | #ifdef DEVELOPMENT | |
234 | #ifdef DILIGENT_DEVELOPMENT | |
235 | 235 | std::atomic_int32_t m_AllocatedSetCounter; |
236 | 236 | #endif |
237 | 237 | }; |
101 | 101 | Uint32 DynamicOffsetCount = 0; |
102 | 102 | bool DynamicBuffersPresent = false; |
103 | 103 | bool DynamicDescriptorsBound = false; |
104 | #ifdef _DEBUG | |
104 | #ifdef DILIGENT_DEBUG | |
105 | 105 | const PipelineLayout* pDbgPipelineLayout = nullptr; |
106 | 106 | #endif |
107 | 107 | DescriptorSetBindInfo() : |
121 | 121 | DynamicBuffersPresent = false; |
122 | 122 | DynamicDescriptorsBound = false; |
123 | 123 | |
124 | #ifdef _DEBUG | |
124 | #ifdef DILIGENT_DEBUG | |
125 | 125 | // In release mode, do not clear vectors as this causes unnecessary work |
126 | 126 | vkSets.clear(); |
127 | 127 | DynamicOffsets.clear(); |
234 | 234 | VERIFY(BindInfo.DynamicOffsetCount > 0, "This function should only be called for pipelines that contain dynamic descriptors"); |
235 | 235 | |
236 | 236 | VERIFY_EXPR(BindInfo.pResourceCache != nullptr); |
237 | #ifdef _DEBUG | |
237 | #ifdef DILIGENT_DEBUG | |
238 | 238 | Uint32 TotalDynamicDescriptors = 0; |
239 | 239 | for (SHADER_RESOURCE_VARIABLE_TYPE VarType = SHADER_RESOURCE_VARIABLE_TYPE_MUTABLE; VarType <= SHADER_RESOURCE_VARIABLE_TYPE_DYNAMIC; VarType = static_cast<SHADER_RESOURCE_VARIABLE_TYPE>(VarType + 1)) |
240 | 240 | { |
75 | 75 | |
76 | 76 | // clang-format off |
77 | 77 | ShaderResourceCacheVk(DbgCacheContentType dbgContentType) |
78 | #ifdef _DEBUG | |
78 | #ifdef DILIGENT_DEBUG | |
79 | 79 | : m_DbgContentType{dbgContentType} |
80 | 80 | #endif |
81 | 81 | { |
185 | 185 | |
186 | 186 | Uint16& GetDynamicBuffersCounter() { return m_NumDynamicBuffers; } |
187 | 187 | |
188 | #ifdef _DEBUG | |
188 | #ifdef DILIGENT_DEBUG | |
189 | 189 | // Only for debug purposes: indicates what types of resources are stored in the cache |
190 | 190 | DbgCacheContentType DbgGetContentType() const { return m_DbgContentType; } |
191 | 191 | void DbgVerifyResourceInitialization() const; |
214 | 214 | Uint16 m_NumDynamicBuffers = 0; |
215 | 215 | Uint32 m_TotalResources = 0; |
216 | 216 | |
217 | #ifdef _DEBUG | |
217 | #ifdef DILIGENT_DEBUG | |
218 | 218 | // Only for debug purposes: indicates what types of resources are stored in the cache |
219 | 219 | const DbgCacheContentType m_DbgContentType; |
220 | 220 | // Debug array that stores flags indicating if resources in the cache have been initialized |
268 | 268 | ++res; |
269 | 269 | } |
270 | 270 | |
271 | #ifdef _DEBUG | |
271 | #ifdef DILIGENT_DEBUG | |
272 | 272 | for (; res < DescrSet.GetSize(); ++res) |
273 | 273 | { |
274 | 274 | const auto& Res = DescrSet.GetResource(res); |
269 | 269 | const ShaderResourceCacheVk& SrcResourceCache, |
270 | 270 | ShaderResourceCacheVk& DstResourceCache) const; |
271 | 271 | |
272 | #ifdef DEVELOPMENT | |
272 | #ifdef DILIGENT_DEVELOPMENT | |
273 | 273 | bool dvpVerifyBindings(const ShaderResourceCacheVk& ResourceCache) const; |
274 | 274 | static void dvpVerifyResourceLayoutDesc(Uint32 NumShaders, |
275 | 275 | const std::shared_ptr<const SPIRVShaderResources> pShaderResources[], |
113 | 113 | ShaderVariableVkImpl* m_pVariables = nullptr; |
114 | 114 | Uint32 m_NumVariables = 0; |
115 | 115 | |
116 | #ifdef _DEBUG | |
116 | #ifdef DILIGENT_DEBUG | |
117 | 117 | IMemoryAllocator& m_DbgAllocator; |
118 | 118 | #endif |
119 | 119 | }; |
64 | 64 | pDynamicMemMgr{rhs.pDynamicMemMgr}, |
65 | 65 | AlignedOffset {rhs.AlignedOffset }, |
66 | 66 | Size {rhs.Size } |
67 | #ifdef DEVELOPMENT | |
67 | #ifdef DILIGENT_DEVELOPMENT | |
68 | 68 | , dvpFrameNumber{rhs.dvpFrameNumber} |
69 | 69 | #endif |
70 | 70 | { |
71 | 71 | rhs.pDynamicMemMgr = nullptr; |
72 | 72 | rhs.AlignedOffset = 0; |
73 | 73 | rhs.Size = 0; |
74 | #ifdef DEVELOPMENT | |
74 | #ifdef DILIGENT_DEVELOPMENT | |
75 | 75 | rhs.dvpFrameNumber = 0; |
76 | 76 | #endif |
77 | 77 | } |
85 | 85 | rhs.pDynamicMemMgr = nullptr; |
86 | 86 | rhs.AlignedOffset = 0; |
87 | 87 | rhs.Size = 0; |
88 | #ifdef DEVELOPMENT | |
88 | #ifdef DILIGENT_DEVELOPMENT | |
89 | 89 | dvpFrameNumber = rhs.dvpFrameNumber; |
90 | 90 | rhs.dvpFrameNumber = 0; |
91 | 91 | #endif |
95 | 95 | VulkanDynamicMemoryManager* pDynamicMemMgr = nullptr; |
96 | 96 | size_t AlignedOffset = 0; // Offset from the start of the buffer |
97 | 97 | size_t Size = 0; // Reserved size of this allocation |
98 | #ifdef DEVELOPMENT | |
98 | #ifdef DILIGENT_DEVELOPMENT | |
99 | 99 | Int64 dvpFrameNumber = 0; |
100 | 100 | #endif |
101 | 101 | }; |
59 | 59 | |
60 | 60 | CommandPoolWrapper&& Release(); |
61 | 61 | |
62 | #ifdef DEVELOPMENT | |
62 | #ifdef DILIGENT_DEVELOPMENT | |
63 | 63 | int32_t DvpGetBufferCounter() const |
64 | 64 | { |
65 | 65 | return m_BuffCounter; |
73 | 73 | |
74 | 74 | std::mutex m_Mutex; |
75 | 75 | std::deque<VkCommandBuffer> m_CmdBuffers; |
76 | #ifdef DEVELOPMENT | |
76 | #ifdef DILIGENT_DEVELOPMENT | |
77 | 77 | std::atomic_int32_t m_BuffCounter; |
78 | 78 | #endif |
79 | 79 | }; |
159 | 159 | RESOURCE_STATE_INDIRECT_ARGUMENT); |
160 | 160 | SetState(State); |
161 | 161 | |
162 | #ifdef _DEBUG | |
162 | #ifdef DILIGENT_DEBUG | |
163 | 163 | { |
164 | 164 | VkAccessFlags AccessFlags = |
165 | 165 | VK_ACCESS_INDIRECT_COMMAND_READ_BIT | |
404 | 404 | return ResourceStateFlagsToVkAccessFlags(GetState()); |
405 | 405 | } |
406 | 406 | |
407 | #ifdef DEVELOPMENT | |
407 | #ifdef DILIGENT_DEVELOPMENT | |
408 | 408 | void BufferVkImpl::DvpVerifyDynamicAllocation(DeviceContextVkImpl* pCtx) const |
409 | 409 | { |
410 | 410 | auto ContextId = pCtx->GetContextId(); |
43 | 43 | m_CmdPools (STD_ALLOCATOR_RAW_MEM(VulkanUtilities::CommandPoolWrapper, GetRawAllocator(), "Allocator for deque<VulkanUtilities::CommandPoolWrapper>")) |
44 | 44 | // clang-format on |
45 | 45 | { |
46 | #ifdef DEVELOPMENT | |
46 | #ifdef DILIGENT_DEVELOPMENT | |
47 | 47 | m_AllocatedPoolCounter = 0; |
48 | 48 | #endif |
49 | 49 | } |
75 | 75 | |
76 | 76 | LogicalDevice.ResetCommandPool(CmdPool); |
77 | 77 | |
78 | #ifdef DEVELOPMENT | |
78 | #ifdef DILIGENT_DEVELOPMENT | |
79 | 79 | ++m_AllocatedPoolCounter; |
80 | 80 | #endif |
81 | 81 | return std::move(CmdPool); |
129 | 129 | void CommandPoolManager::FreeCommandPool(VulkanUtilities::CommandPoolWrapper&& CmdPool) |
130 | 130 | { |
131 | 131 | std::lock_guard<std::mutex> LockGuard(m_Mutex); |
132 | #ifdef DEVELOPMENT | |
132 | #ifdef DILIGENT_DEVELOPMENT | |
133 | 133 | --m_AllocatedPoolCounter; |
134 | 134 | #endif |
135 | 135 | m_CmdPools.emplace_back(std::move(CmdPool)); |
67 | 67 | VulkanUtilities::DescriptorPoolWrapper DescriptorPoolManager::GetPool(const char* DebugName) |
68 | 68 | { |
69 | 69 | std::lock_guard<std::mutex> Lock{m_Mutex}; |
70 | #ifdef DEVELOPMENT | |
70 | #ifdef DILIGENT_DEVELOPMENT | |
71 | 71 | ++m_AllocatedPoolCounter; |
72 | 72 | #endif |
73 | 73 | if (m_Pools.empty()) |
127 | 127 | std::lock_guard<std::mutex> Lock{m_Mutex}; |
128 | 128 | m_DeviceVkImpl.GetLogicalDevice().ResetDescriptorPool(Pool); |
129 | 129 | m_Pools.emplace_back(std::move(Pool)); |
130 | #ifdef DEVELOPMENT | |
130 | #ifdef DILIGENT_DEVELOPMENT | |
131 | 131 | --m_AllocatedPoolCounter; |
132 | 132 | #endif |
133 | 133 | } |
176 | 176 | std::swap(*it, m_Pools.front()); |
177 | 177 | } |
178 | 178 | |
179 | #ifdef DEVELOPMENT | |
179 | #ifdef DILIGENT_DEVELOPMENT | |
180 | 180 | ++m_AllocatedSetCounter; |
181 | 181 | #endif |
182 | 182 | return {Set, Pool, CommandQueueMask, *this}; |
191 | 191 | auto Set = AllocateDescriptorSet(LogicalDevice, NewPool, SetLayout, DebugName); |
192 | 192 | DEV_CHECK_ERR(Set != VK_NULL_HANDLE, "Failed to allocate descriptor set"); |
193 | 193 | |
194 | #ifdef DEVELOPMENT | |
194 | #ifdef DILIGENT_DEVELOPMENT | |
195 | 195 | ++m_AllocatedSetCounter; |
196 | 196 | #endif |
197 | 197 | |
233 | 233 | { |
234 | 234 | std::lock_guard<std::mutex> Lock{Allocator->m_Mutex}; |
235 | 235 | Allocator->m_DeviceVkImpl.GetLogicalDevice().FreeDescriptorSet(Pool, Set); |
236 | #ifdef DEVELOPMENT | |
236 | #ifdef DILIGENT_DEVELOPMENT | |
237 | 237 | --Allocator->m_AllocatedSetCounter; |
238 | 238 | #endif |
239 | 239 | } |
326 | 326 | |
327 | 327 | void DeviceContextVkImpl::CommitVkVertexBuffers() |
328 | 328 | { |
329 | #ifdef DEVELOPMENT | |
329 | #ifdef DILIGENT_DEVELOPMENT | |
330 | 330 | if (m_NumVertexStreams < m_pPipelineState->GetNumBufferSlotsUsed()) |
331 | 331 | LOG_ERROR("Currently bound pipeline state '", m_pPipelineState->GetDesc().Name, "' expects ", m_pPipelineState->GetNumBufferSlotsUsed(), " input buffer slots, but only ", m_NumVertexStreams, " is bound"); |
332 | 332 | #endif |
343 | 343 | if (pBufferVk->GetDesc().Usage == USAGE_DYNAMIC) |
344 | 344 | { |
345 | 345 | DynamicBufferPresent = true; |
346 | #ifdef DEVELOPMENT | |
346 | #ifdef DILIGENT_DEVELOPMENT | |
347 | 347 | pBufferVk->DvpVerifyDynamicAllocation(this); |
348 | 348 | #endif |
349 | 349 | } |
413 | 413 | |
414 | 414 | void DeviceContextVkImpl::PrepareForDraw(DRAW_FLAGS Flags) |
415 | 415 | { |
416 | #ifdef DEVELOPMENT | |
416 | #ifdef DILIGENT_DEVELOPMENT | |
417 | 417 | if ((Flags & DRAW_FLAG_VERIFY_RENDER_TARGETS) != 0) |
418 | 418 | DvpVerifyRenderTargets(); |
419 | 419 | #endif |
425 | 425 | CommitVkVertexBuffers(); |
426 | 426 | } |
427 | 427 | |
428 | #ifdef DEVELOPMENT | |
428 | #ifdef DILIGENT_DEVELOPMENT | |
429 | 429 | if ((Flags & DRAW_FLAG_VERIFY_STATES) != 0) |
430 | 430 | { |
431 | 431 | for (Uint32 slot = 0; slot < m_NumVertexStreams; ++slot) |
450 | 450 | } |
451 | 451 | } |
452 | 452 | #if 0 |
453 | # ifdef _DEBUG | |
453 | # ifdef DILIGENT_DEBUG | |
454 | 454 | else |
455 | 455 | { |
456 | 456 | if ( m_pPipelineState->dbgContainsShaderResources() ) |
459 | 459 | # endif |
460 | 460 | #endif |
461 | 461 | |
462 | #ifdef DEVELOPMENT | |
462 | #ifdef DILIGENT_DEVELOPMENT | |
463 | 463 | if (m_pPipelineState->GetVkRenderPass() != m_RenderPass) |
464 | 464 | { |
465 | 465 | DvpLogRenderPass_PSOMismatch(); |
474 | 474 | DEV_CHECK_ERR(pAttribsBuffer, "Indirect draw attribs buffer must not be null"); |
475 | 475 | auto* pIndirectDrawAttribsVk = ValidatedCast<BufferVkImpl>(pAttribsBuffer); |
476 | 476 | |
477 | #ifdef DEVELOPMENT | |
477 | #ifdef DILIGENT_DEVELOPMENT | |
478 | 478 | if (pIndirectDrawAttribsVk->GetDesc().Usage == USAGE_DYNAMIC) |
479 | 479 | pIndirectDrawAttribsVk->DvpVerifyDynamicAllocation(this); |
480 | 480 | #endif |
489 | 489 | { |
490 | 490 | PrepareForDraw(Flags); |
491 | 491 | |
492 | #ifdef DEVELOPMENT | |
492 | #ifdef DILIGENT_DEVELOPMENT | |
493 | 493 | if ((Flags & DRAW_FLAG_VERIFY_STATES) != 0) |
494 | 494 | { |
495 | 495 | DvpVerifyBufferState(*m_pIndexBuffer, RESOURCE_STATE_INDEX_BUFFER, "Indexed draw call (DeviceContextVkImpl::Draw)"); |
569 | 569 | } |
570 | 570 | } |
571 | 571 | #if 0 |
572 | # ifdef _DEBUG | |
572 | # ifdef DILIGENT_DEBUG | |
573 | 573 | else |
574 | 574 | { |
575 | 575 | if ( m_pPipelineState->dbgContainsShaderResources() ) |
598 | 598 | |
599 | 599 | auto* pBufferVk = ValidatedCast<BufferVkImpl>(pAttribsBuffer); |
600 | 600 | |
601 | #ifdef DEVELOPMENT | |
601 | #ifdef DILIGENT_DEVELOPMENT | |
602 | 602 | if (pBufferVk->GetDesc().Usage == USAGE_DYNAMIC) |
603 | 603 | pBufferVk->DvpVerifyDynamicAllocation(this); |
604 | 604 | #endif |
802 | 802 | |
803 | 803 | void DeviceContextVkImpl::FinishFrame() |
804 | 804 | { |
805 | #ifdef _DEBUG | |
805 | #ifdef DILIGENT_DEBUG | |
806 | 806 | for (const auto& MappedBuffIt : m_DbgMappedBuffers) |
807 | 807 | { |
808 | 808 | const auto& BuffDesc = MappedBuffIt.first->GetDesc(); |
1106 | 1106 | if (m_Framebuffer != VK_NULL_HANDLE) |
1107 | 1107 | { |
1108 | 1108 | VERIFY_EXPR(m_RenderPass != VK_NULL_HANDLE); |
1109 | #ifdef DEVELOPMENT | |
1109 | #ifdef DILIGENT_DEVELOPMENT | |
1110 | 1110 | if (VerifyStates) |
1111 | 1111 | { |
1112 | 1112 | TransitionRenderTargets(RESOURCE_STATE_TRANSITION_MODE_VERIFY); |
1196 | 1196 | Uint64 SrcOffset, |
1197 | 1197 | RESOURCE_STATE_TRANSITION_MODE TransitionMode) |
1198 | 1198 | { |
1199 | #ifdef DEVELOPMENT | |
1199 | #ifdef DILIGENT_DEVELOPMENT | |
1200 | 1200 | if (DstOffset + NumBytes > pBuffVk->GetDesc().uiSizeInBytes) |
1201 | 1201 | { |
1202 | 1202 | LOG_ERROR("Update region is out of buffer bounds which will result in an undefined behavior"); |
1227 | 1227 | // be resource barrier issues in the cmd list in the device context |
1228 | 1228 | auto* pBuffVk = ValidatedCast<BufferVkImpl>(pBuffer); |
1229 | 1229 | |
1230 | #ifdef DEVELOPMENT | |
1230 | #ifdef DILIGENT_DEVELOPMENT | |
1231 | 1231 | if (pBuffVk->GetDesc().Usage == USAGE_DYNAMIC) |
1232 | 1232 | { |
1233 | 1233 | LOG_ERROR("Dynamic buffers must be updated via Map()"); |
1257 | 1257 | auto* pSrcBuffVk = ValidatedCast<BufferVkImpl>(pSrcBuffer); |
1258 | 1258 | auto* pDstBuffVk = ValidatedCast<BufferVkImpl>(pDstBuffer); |
1259 | 1259 | |
1260 | #ifdef DEVELOPMENT | |
1260 | #ifdef DILIGENT_DEVELOPMENT | |
1261 | 1261 | if (pDstBuffVk->GetDesc().Usage == USAGE_DYNAMIC) |
1262 | 1262 | { |
1263 | 1263 | LOG_ERROR("Dynamic buffers cannot be copy destinations"); |
1633 | 1633 | // pages will be discarded |
1634 | 1634 | VERIFY((Allocation.AlignedOffset % BufferOffsetAlignment) == 0, "Allocation offset must be at least 32-bit algined"); |
1635 | 1635 | |
1636 | #ifdef _DEBUG | |
1636 | #ifdef DILIGENT_DEBUG | |
1637 | 1637 | { |
1638 | 1638 | VERIFY(SrcStride >= CopyInfo.RowSize, "Source data stride (", SrcStride, ") is below the image row size (", CopyInfo.RowSize, ")"); |
1639 | 1639 | const Uint32 PlaneSize = SrcStride * CopyInfo.RowCount; |
2204 | 2204 | VERIFY_EXPR(Texture.GetLayout() == ExpectedLayout); |
2205 | 2205 | } |
2206 | 2206 | } |
2207 | #ifdef DEVELOPMENT | |
2207 | #ifdef DILIGENT_DEVELOPMENT | |
2208 | 2208 | else if (TransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
2209 | 2209 | { |
2210 | 2210 | DvpVerifyTextureState(Texture, RequiredState, OperationName); |
2300 | 2300 | VERIFY_EXPR(Buffer.CheckAccessFlags(ExpectedAccessFlags)); |
2301 | 2301 | } |
2302 | 2302 | } |
2303 | #ifdef DEVELOPMENT | |
2303 | #ifdef DILIGENT_DEVELOPMENT | |
2304 | 2304 | else if (TransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
2305 | 2305 | { |
2306 | 2306 | DvpVerifyBufferState(Buffer, RequiredState, OperationName); |
2311 | 2311 | VulkanDynamicAllocation DeviceContextVkImpl::AllocateDynamicSpace(Uint32 SizeInBytes, Uint32 Alignment) |
2312 | 2312 | { |
2313 | 2313 | auto DynAlloc = m_DynamicHeap.Allocate(SizeInBytes, Alignment); |
2314 | #ifdef DEVELOPMENT | |
2314 | #ifdef DILIGENT_DEVELOPMENT | |
2315 | 2315 | DynAlloc.dvpFrameNumber = m_ContextFrameNumber; |
2316 | 2316 | #endif |
2317 | 2317 | return DynAlloc; |
2327 | 2327 | for (Uint32 i = 0; i < BarrierCount; ++i) |
2328 | 2328 | { |
2329 | 2329 | const auto& Barrier = pResourceBarriers[i]; |
2330 | #ifdef DEVELOPMENT | |
2330 | #ifdef DILIGENT_DEVELOPMENT | |
2331 | 2331 | DvpVerifyStateTransitionDesc(Barrier); |
2332 | 2332 | #endif |
2333 | 2333 | if (Barrier.TransitionType == STATE_TRANSITION_TYPE_BEGIN) |
145 | 145 | } |
146 | 146 | VERIFY_EXPR(((NumBindings & (NumBindings - 1)) == 0) && NumBindings == MemSize || NumBindings < MemSize); |
147 | 147 | |
148 | #ifdef _DEBUG | |
148 | #ifdef DILIGENT_DEBUG | |
149 | 149 | static constexpr size_t MinMemSize = 1; |
150 | 150 | #else |
151 | 151 | static constexpr size_t MinMemSize = 16; |
441 | 441 | if (StaticAndMutSet.SetIndex >= 0) |
442 | 442 | { |
443 | 443 | const char* DescrSetName = "Static/Mutable Descriptor Set"; |
444 | #ifdef DEVELOPMENT | |
444 | #ifdef DILIGENT_DEVELOPMENT | |
445 | 445 | std::string _DescrSetName(DbgPipelineName); |
446 | 446 | _DescrSetName.append(" - static/mutable set"); |
447 | 447 | DescrSetName = _DescrSetName.c_str(); |
457 | 457 | DescriptorSetBindInfo& BindInfo, |
458 | 458 | VkDescriptorSet VkDynamicDescrSet) const |
459 | 459 | { |
460 | #ifdef _DEBUG | |
460 | #ifdef DILIGENT_DEBUG | |
461 | 461 | BindInfo.vkSets.clear(); |
462 | 462 | #endif |
463 | 463 | |
490 | 490 | TotalDynamicDescriptors += Set.NumDynamicDescriptors; |
491 | 491 | } |
492 | 492 | |
493 | #ifdef _DEBUG | |
493 | #ifdef DILIGENT_DEBUG | |
494 | 494 | for (const auto& set : BindInfo.vkSets) |
495 | 495 | VERIFY(set != VK_NULL_HANDLE, "Descriptor set must not be null"); |
496 | 496 | #endif |
500 | 500 | BindInfo.DynamicOffsets.resize(TotalDynamicDescriptors); |
501 | 501 | BindInfo.BindPoint = IsCompute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS; |
502 | 502 | BindInfo.pResourceCache = &ResourceCache; |
503 | #ifdef _DEBUG | |
503 | #ifdef DILIGENT_DEBUG | |
504 | 504 | BindInfo.pDbgPipelineLayout = this; |
505 | 505 | #endif |
506 | 506 | BindInfo.DynamicBuffersPresent = ResourceCache.GetNumDynamicBuffers() > 0; |
270 | 270 | |
271 | 271 | PipelineCI.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO; |
272 | 272 | PipelineCI.pNext = nullptr; |
273 | #ifdef _DEBUG | |
273 | #ifdef DILIGENT_DEBUG | |
274 | 274 | PipelineCI.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT; |
275 | 275 | #endif |
276 | 276 | PipelineCI.basePipelineHandle = VK_NULL_HANDLE; // a pipeline to derive from |
298 | 298 | |
299 | 299 | PipelineCI.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; |
300 | 300 | PipelineCI.pNext = nullptr; |
301 | #ifdef _DEBUG | |
301 | #ifdef DILIGENT_DEBUG | |
302 | 302 | PipelineCI.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT; |
303 | 303 | #endif |
304 | 304 | |
511 | 511 | |
512 | 512 | auto IsSamePipelineLayout = m_PipelineLayout.IsSameAs(pPSOVk->m_PipelineLayout); |
513 | 513 | |
514 | #ifdef _DEBUG | |
514 | #ifdef DILIGENT_DEBUG | |
515 | 515 | { |
516 | 516 | bool IsCompatibleShaders = true; |
517 | 517 | if (m_NumShaders != pPSOVk->m_NumShaders) |
558 | 558 | if (!m_HasStaticResources && !m_HasNonStaticResources) |
559 | 559 | return; |
560 | 560 | |
561 | #ifdef DEVELOPMENT | |
561 | #ifdef DILIGENT_DEVELOPMENT | |
562 | 562 | if (pShaderResourceBinding == nullptr) |
563 | 563 | { |
564 | 564 | LOG_ERROR_MESSAGE("Pipeline state '", m_Desc.Name, "' requires shader resource binding object to ", |
569 | 569 | |
570 | 570 | auto* pResBindingVkImpl = ValidatedCast<ShaderResourceBindingVkImpl>(pShaderResourceBinding); |
571 | 571 | |
572 | #ifdef DEVELOPMENT | |
572 | #ifdef DILIGENT_DEVELOPMENT | |
573 | 573 | { |
574 | 574 | auto* pRefPSO = pResBindingVkImpl->GetPipelineState(); |
575 | 575 | if (IsIncompatibleWith(pRefPSO)) |
587 | 587 | |
588 | 588 | auto& ResourceCache = pResBindingVkImpl->GetResourceCache(); |
589 | 589 | |
590 | #ifdef DEVELOPMENT | |
590 | #ifdef DILIGENT_DEVELOPMENT | |
591 | 591 | for (Uint32 s = 0; s < m_NumShaders; ++s) |
592 | 592 | { |
593 | 593 | m_ShaderResourceLayouts[s].dvpVerifyBindings(ResourceCache); |
594 | 594 | } |
595 | 595 | #endif |
596 | #ifdef _DEBUG | |
596 | #ifdef DILIGENT_DEBUG | |
597 | 597 | ResourceCache.DbgVerifyDynamicBuffersCounter(); |
598 | 598 | #endif |
599 | 599 | |
601 | 601 | { |
602 | 602 | ResourceCache.TransitionResources<false>(pCtxVkImpl); |
603 | 603 | } |
604 | #ifdef DEVELOPMENT | |
604 | #ifdef DILIGENT_DEVELOPMENT | |
605 | 605 | else if (StateTransitionMode == RESOURCE_STATE_TRANSITION_MODE_VERIFY) |
606 | 606 | { |
607 | 607 | ResourceCache.TransitionResources<true>(pCtxVkImpl); |
615 | 615 | if (DynamicDescriptorSetVkLayout != VK_NULL_HANDLE) |
616 | 616 | { |
617 | 617 | const char* DynamicDescrSetName = "Dynamic Descriptor Set"; |
618 | #ifdef DEVELOPMENT | |
618 | #ifdef DILIGENT_DEVELOPMENT | |
619 | 619 | std::string _DynamicDescrSetName(m_Desc.Name); |
620 | 620 | _DynamicDescrSetName.append(" - dynamic set"); |
621 | 621 | DynamicDescrSetName = _DynamicDescrSetName.c_str(); |
688 | 688 | { |
689 | 689 | const auto& StaticResLayout = GetStaticShaderResLayout(s); |
690 | 690 | const auto& StaticResCache = GetStaticResCache(s); |
691 | #ifdef DEVELOPMENT | |
691 | #ifdef DILIGENT_DEVELOPMENT | |
692 | 692 | if (!StaticResLayout.dvpVerifyBindings(StaticResCache)) |
693 | 693 | { |
694 | 694 | const auto* pShaderVk = GetShader<const ShaderVkImpl>(s); |
702 | 702 | const auto& ShaderResourceLayouts = GetShaderResLayout(s); |
703 | 703 | ShaderResourceLayouts.InitializeStaticResources(StaticResLayout, StaticResCache, ResourceCache); |
704 | 704 | } |
705 | #ifdef _DEBUG | |
705 | #ifdef DILIGENT_DEBUG | |
706 | 706 | ResourceCache.DbgVerifyDynamicBuffersCounter(); |
707 | 707 | #endif |
708 | 708 | } |
185 | 185 | |
186 | 186 | auto& HeapInfo = m_Heaps[Type]; |
187 | 187 | VERIFY(Index < HeapInfo.PoolSize, "Query index ", Index, " is out of range"); |
188 | #ifdef _DEBUG | |
188 | #ifdef DILIGENT_DEBUG | |
189 | 189 | for (const auto& ind : HeapInfo.AvailableQueries) |
190 | 190 | { |
191 | 191 | VERIFY(ind != Index, "Index ", Index, " already present in available queries list"); |
58 | 58 | SamplerCI.addressModeW = AddressModeToVkAddressMode(m_Desc.AddressW); |
59 | 59 | SamplerCI.mipLodBias = m_Desc.MipLODBias; |
60 | 60 | SamplerCI.anisotropyEnable = IsAnisotropicFilter(m_Desc.MinFilter); |
61 | #ifdef DEVELOPMENT | |
61 | #ifdef DILIGENT_DEVELOPMENT | |
62 | 62 | if (!((SamplerCI.anisotropyEnable && IsAnisotropicFilter(m_Desc.MagFilter)) || |
63 | 63 | (!SamplerCI.anisotropyEnable && !IsAnisotropicFilter(m_Desc.MagFilter)))) |
64 | 64 | { |
68 | 68 | |
69 | 69 |