git.s-ol.nu ~forks/DiligentCore / e84c0e4
Vulkan: fixed tests on Mac. azhirnov authored 6 months ago assiduous committed 6 months ago
22 changed file(s) with 627 addition(s) and 74 deletion(s). Raw diff Collapse all Expand all
12151215
12161216 PIPELINE_RESOURCE_FLAGS GetValidPipelineResourceFlags(SHADER_RESOURCE_TYPE ResourceType)
12171217 {
1218
12191218 static_assert(SHADER_RESOURCE_TYPE_LAST == 8, "Please update the switch below to handle the new shader resource type");
12201219 switch (ResourceType)
12211220 {
106106 // when resource is bound.
107107 using SRBArray = std::array<ShaderResourceBindingVkImpl*, MAX_RESOURCE_SIGNATURES>;
108108 void DvpVerifySRBResources(const SRBArray& SRBs) const;
109
110 void DvpValidateResourceLimits() const;
109111 #endif
110112
111113 private:
3030 # define VK_NO_PROTOTYPES
3131 #endif
3232
33 #include "vulkan/vulkan.h"
34
3533 #define VK_FORMAT_RANGE_SIZE (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1)
3634
3735 #if DILIGENT_USE_VOLK
36 # define VK_ENABLE_BETA_EXTENSIONS
3837 # include "volk/volk.h"
38 #else
39 # include "vulkan/vulkan.h"
3940 #endif
4041
4142 #if defined(VK_USE_PLATFORM_XLIB_KHR) || defined(_X11_XLIB_H_)
3838 public:
3939 struct ExtensionFeatures
4040 {
41 VkPhysicalDeviceMeshShaderFeaturesNV MeshShader = {};
42 VkPhysicalDevice16BitStorageFeaturesKHR Storage16Bit = {};
43 VkPhysicalDevice8BitStorageFeaturesKHR Storage8Bit = {};
44 VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ShaderFloat16Int8 = {};
45 VkPhysicalDeviceAccelerationStructureFeaturesKHR AccelStruct = {};
46 VkPhysicalDeviceRayTracingPipelineFeaturesKHR RayTracingPipeline = {};
47 VkPhysicalDeviceRayQueryFeaturesKHR RayQuery = {};
48 bool Spirv14 = false; // Ray tracing requires Vulkan 1.2 or SPIRV 1.4 extension
49 bool Spirv15 = false; // DXC shaders with ray tracing requires Vulkan 1.2 with SPIRV 1.5
50 VkPhysicalDeviceBufferDeviceAddressFeaturesKHR BufferDeviceAddress = {};
51 VkPhysicalDeviceDescriptorIndexingFeaturesEXT DescriptorIndexing = {};
41 VkPhysicalDeviceMeshShaderFeaturesNV MeshShader = {};
42 VkPhysicalDevice16BitStorageFeaturesKHR Storage16Bit = {};
43 VkPhysicalDevice8BitStorageFeaturesKHR Storage8Bit = {};
44 VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ShaderFloat16Int8 = {};
45 VkPhysicalDeviceAccelerationStructureFeaturesKHR AccelStruct = {};
46 VkPhysicalDeviceRayTracingPipelineFeaturesKHR RayTracingPipeline = {};
47 VkPhysicalDeviceRayQueryFeaturesKHR RayQuery = {};
48 bool Spirv14 = false; // Ray tracing requires Vulkan 1.2 or SPIRV 1.4 extension
49 bool Spirv15 = false; // DXC shaders with ray tracing requires Vulkan 1.2 with SPIRV 1.5
50 VkPhysicalDeviceBufferDeviceAddressFeaturesKHR BufferDeviceAddress = {};
51 VkPhysicalDeviceDescriptorIndexingFeaturesEXT DescriptorIndexing = {};
52 bool HasPortabilitySubset = false;
53 VkPhysicalDevicePortabilitySubsetFeaturesKHR PortabilitySubset = {};
5254 };
5355
5456 struct ExtensionProperties
5759 VkPhysicalDeviceAccelerationStructurePropertiesKHR AccelStruct = {};
5860 VkPhysicalDeviceRayTracingPipelinePropertiesKHR RayTracingPipeline = {};
5961 VkPhysicalDeviceDescriptorIndexingPropertiesEXT DescriptorIndexing = {};
62 VkPhysicalDevicePortabilitySubsetPropertiesKHR PortabilitySubset = {};
6063 };
6164
6265 public:
32153215 vkASBuildInfo.ppGeometries = nullptr;
32163216 vkASBuildInfo.scratchData.deviceAddress = pScratchVk->GetVkDeviceAddress() + Attribs.ScratchBufferOffset;
32173217
3218 const auto& ASLimits = m_pDevice->GetPhysicalDevice().GetExtProperties().AccelStruct;
3219 VERIFY(vkASBuildInfo.scratchData.deviceAddress % ASLimits.minAccelerationStructureScratchOffsetAlignment == 0, "Scratch buffer start address is not properly aligned");
3220
32183221 EnsureVkCmdBuffer();
32193222 m_CommandBuffer.BuildAccelerationStructure(1, &vkASBuildInfo, &VkRangePtr);
32203223 ++m_State.NumCommands;
33193322 vkASBuildInfo.ppGeometries = nullptr;
33203323 vkASBuildInfo.scratchData.deviceAddress = pScratchVk->GetVkDeviceAddress() + Attribs.ScratchBufferOffset;
33213324
3325 const auto& ASLimits = m_pDevice->GetPhysicalDevice().GetExtProperties().AccelStruct;
3326 VERIFY(vkASBuildInfo.scratchData.deviceAddress % ASLimits.minAccelerationStructureScratchOffsetAlignment == 0, "Scratch buffer start address is not properly aligned");
3327
33223328 m_CommandBuffer.BuildAccelerationStructure(1, &vkASBuildInfo, &vkRangePtr);
33233329 ++m_State.NumCommands;
33243330 }
245245 const VulkanUtilities::VulkanPhysicalDevice::ExtensionFeatures& DeviceExtFeatures = PhysicalDevice->GetExtFeatures();
246246 VulkanUtilities::VulkanPhysicalDevice::ExtensionFeatures EnabledExtFeats = {};
247247
248 // SPIRV 1.5 is in Vulkan 1.2 core
249 EnabledExtFeats.Spirv15 = DeviceExtFeatures.Spirv15;
250
251248 #define ENABLE_FEATURE(IsFeatureSupported, Feature, FeatureName) \
252249 do \
253250 { \
437434 VERIFY_EXPR(DeviceExtFeatures.Spirv14);
438435 }
439436
437 // SPIRV 1.5 is in Vulkan 1.2 core
438 EnabledExtFeats.Spirv15 = DeviceExtFeatures.Spirv15;
439
440440 DeviceExtensions.push_back(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME); // required for VK_KHR_acceleration_structure
441441 DeviceExtensions.push_back(VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME); // required for VK_KHR_acceleration_structure
442442 DeviceExtensions.push_back(VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME); // required for ray tracing
478478 EnabledExtFeats.RayTracingPipeline.rayTraversalPrimitiveCulling = false; // for GLSL_EXT_ray_flags_primitive_culling
479479 }
480480 }
481
482 #ifdef PLATFORM_MACOS
483 if (DeviceExtFeatures.HasPortabilitySubset)
484 {
485 EnabledExtFeats.HasPortabilitySubset = DeviceExtFeatures.HasPortabilitySubset;
486 EnabledExtFeats.PortabilitySubset = DeviceExtFeatures.PortabilitySubset;
487 DeviceExtensions.push_back(VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME);
488
489 *NextExt = &EnabledExtFeats.PortabilitySubset;
490 NextExt = &EnabledExtFeats.PortabilitySubset.pNext;
491 }
492 #endif
481493
482494 // make sure that last pNext is null
483495 *NextExt = nullptr;
876876 VERIFY_EXPR(!m_Signatures[0] || m_Signatures[0]->GetDesc().BindingIndex == 0);
877877 }
878878
879 #ifdef DILIGENT_DEVELOPMENT
880 DvpValidateResourceLimits();
881 #endif
882
879883 m_PipelineLayout.Create(GetDevice(), m_Signatures, m_SignatureCount);
880884
881885 // Verify that pipeline layout is compatible with shader resources and
10951099 }
10961100 VERIFY_EXPR(res_info == m_ResourceAttibutions.end());
10971101 }
1098 #endif
1102
1103 void PipelineStateVkImpl::DvpValidateResourceLimits() const
1104 {
1105 const auto& Limits = GetDevice()->GetPhysicalDevice().GetProperties().limits;
1106 const auto& ASLimits = GetDevice()->GetPhysicalDevice().GetExtProperties().AccelStruct;
1107 const auto& DescIndFeats = GetDevice()->GetPhysicalDevice().GetExtFeatures().DescriptorIndexing;
1108 const auto& DescIndProps = GetDevice()->GetPhysicalDevice().GetExtProperties().DescriptorIndexing;
1109 const auto DescCount = static_cast<Uint32>(DescriptorType::Count);
1110
1111 std::array<Uint32, DescCount> DescriptorCount = {};
1112 std::array<std::array<Uint32, DescCount>, MAX_SHADERS_IN_PIPELINE> PerStageDescriptorCount = {};
1113 std::array<bool, MAX_SHADERS_IN_PIPELINE> ShaderStagePresented = {};
1114
1115 for (Uint32 s = 0; s < GetResourceSignatureCount(); ++s)
1116 {
1117 const auto* pSignature = GetResourceSignature(s);
1118 if (pSignature == nullptr)
1119 continue;
1120
1121 for (Uint32 r = 0; r < pSignature->GetTotalResourceCount(); ++r)
1122 {
1123 const auto& ResDesc = pSignature->GetResourceDesc(r);
1124 const auto& ResAttr = pSignature->GetResourceAttribs(r);
1125 const auto DescIndex = static_cast<Uint32>(ResAttr.DescrType);
1126
1127 DescriptorCount[DescIndex] += ResAttr.ArraySize;
1128
1129 for (auto ShaderStages = ResDesc.ShaderStages; ShaderStages != 0;)
1130 {
1131 const auto ShaderInd = GetShaderTypePipelineIndex(ExtractLSB(ShaderStages), m_Desc.PipelineType);
1132 PerStageDescriptorCount[ShaderInd][DescIndex] += ResAttr.ArraySize;
1133 ShaderStagePresented[ShaderInd] = true;
1134 }
1135
1136 if ((ResDesc.Flags & PIPELINE_RESOURCE_FLAG_RUNTIME_ARRAY) != 0)
1137 {
1138 bool NonUniformIndexingSupported = false;
1139 bool NonUniformIndexingIsNative = false;
1140 switch (ResAttr.GetDescriptorType())
1141 {
1142 case DescriptorType::Sampler:
1143 NonUniformIndexingSupported = true;
1144 NonUniformIndexingIsNative = true;
1145 break;
1146 case DescriptorType::CombinedImageSampler:
1147 case DescriptorType::SeparateImage:
1148 NonUniformIndexingSupported = DescIndFeats.shaderSampledImageArrayNonUniformIndexing;
1149 NonUniformIndexingIsNative = DescIndProps.shaderSampledImageArrayNonUniformIndexingNative;
1150 break;
1151 case DescriptorType::StorageImage:
1152 NonUniformIndexingSupported = DescIndFeats.shaderStorageImageArrayNonUniformIndexing;
1153 NonUniformIndexingIsNative = DescIndProps.shaderStorageImageArrayNonUniformIndexingNative;
1154 break;
1155 case DescriptorType::UniformTexelBuffer:
1156 NonUniformIndexingSupported = DescIndFeats.shaderUniformTexelBufferArrayNonUniformIndexing;
1157 NonUniformIndexingIsNative = DescIndProps.shaderSampledImageArrayNonUniformIndexingNative;
1158 break;
1159 case DescriptorType::StorageTexelBuffer:
1160 case DescriptorType::StorageTexelBuffer_ReadOnly:
1161 NonUniformIndexingSupported = DescIndFeats.shaderStorageTexelBufferArrayNonUniformIndexing;
1162 NonUniformIndexingIsNative = DescIndProps.shaderStorageBufferArrayNonUniformIndexingNative;
1163 break;
1164 case DescriptorType::UniformBuffer:
1165 case DescriptorType::UniformBufferDynamic:
1166 NonUniformIndexingSupported = DescIndFeats.shaderUniformBufferArrayNonUniformIndexing;
1167 NonUniformIndexingIsNative = DescIndProps.shaderUniformBufferArrayNonUniformIndexingNative;
1168 break;
1169 case DescriptorType::StorageBuffer:
1170 case DescriptorType::StorageBuffer_ReadOnly:
1171 case DescriptorType::StorageBufferDynamic:
1172 case DescriptorType::StorageBufferDynamic_ReadOnly:
1173 NonUniformIndexingSupported = DescIndFeats.shaderStorageBufferArrayNonUniformIndexing;
1174 NonUniformIndexingIsNative = DescIndProps.shaderStorageBufferArrayNonUniformIndexingNative;
1175 break;
1176 case DescriptorType::InputAttachment:
1177 NonUniformIndexingSupported = DescIndFeats.shaderInputAttachmentArrayNonUniformIndexing;
1178 NonUniformIndexingIsNative = DescIndProps.shaderInputAttachmentArrayNonUniformIndexingNative;
1179 break;
1180 case DescriptorType::AccelerationStructure:
1181 // There is no separate feature for acceleration structures, GLSL spec says:
1182 // "If GL_EXT_nonuniform_qualifier is supported
1183 // When aggregated into arrays within a shader, accelerationStructureEXT can
1184 // be indexed with a non-uniform integral expressions, when decorated with the
1185 // nonuniformEXT qualifier."
1186 // Descriptor indexing is supported here, otherwise error will be generated in ValidatePipelineResourceSignatureDesc().
1187 NonUniformIndexingSupported = true;
1188 NonUniformIndexingIsNative = true;
1189 break;
1190 }
1191
1192 // TODO: We don't know if this resource is used for non-uniform indexing or not.
1193 if (!NonUniformIndexingSupported)
1194 {
1195 LOG_WARNING_MESSAGE("PSO '", m_Desc.Name, "', resource signature '", pSignature->GetDesc().Name, "' contains shader resource '",
1196 ResDesc.Name, "' that is defined with RUNTIME_ARRAY flag, but current device does not support non-uniform indexing for this resource type.");
1197 }
1198 else if (!NonUniformIndexingIsNative)
1199 {
1200 LOG_WARNING_MESSAGE("Performance warning in PSO '", m_Desc.Name, "', resource signature '", pSignature->GetDesc().Name, "': shader resource '",
1201 ResDesc.Name, "' is defined with RUNTIME_ARRAY flag, but non-uniform indexing is emulated on this device.");
1202 }
1203 }
1204 }
1205 }
1206
1207 // Check total descriptor count
1208 {
1209 const Uint32 NumSampledImages =
1210 DescriptorCount[static_cast<Uint32>(DescriptorType::CombinedImageSampler)] +
1211 DescriptorCount[static_cast<Uint32>(DescriptorType::SeparateImage)] +
1212 DescriptorCount[static_cast<Uint32>(DescriptorType::UniformTexelBuffer)];
1213 const Uint32 NumStorageImages =
1214 DescriptorCount[static_cast<Uint32>(DescriptorType::StorageImage)] +
1215 DescriptorCount[static_cast<Uint32>(DescriptorType::StorageTexelBuffer)] +
1216 DescriptorCount[static_cast<Uint32>(DescriptorType::StorageTexelBuffer_ReadOnly)];
1217 const Uint32 NumStorageBuffers =
1218 DescriptorCount[static_cast<Uint32>(DescriptorType::StorageBuffer)] +
1219 DescriptorCount[static_cast<Uint32>(DescriptorType::StorageBuffer_ReadOnly)];
1220 const Uint32 NumDynamicStorageBuffers =
1221 DescriptorCount[static_cast<Uint32>(DescriptorType::StorageBufferDynamic)] +
1222 DescriptorCount[static_cast<Uint32>(DescriptorType::StorageBufferDynamic_ReadOnly)];
1223 const Uint32 NumSamplers = DescriptorCount[static_cast<Uint32>(DescriptorType::Sampler)];
1224 const Uint32 NumUniformBuffers = DescriptorCount[static_cast<Uint32>(DescriptorType::UniformBuffer)];
1225 const Uint32 NumDynamicUniformBuffers = DescriptorCount[static_cast<Uint32>(DescriptorType::UniformBufferDynamic)];
1226 const Uint32 NumInputAttachments = DescriptorCount[static_cast<Uint32>(DescriptorType::InputAttachment)];
1227 const Uint32 NumAccelerationStructures = DescriptorCount[static_cast<Uint32>(DescriptorType::AccelerationStructure)];
1228
1229 DEV_CHECK_ERR(NumSamplers <= Limits.maxDescriptorSetSamplers,
1230 "In PSO '", m_Desc.Name, "', the number of samplers (", NumSamplers, ") exceeds the limit (", Limits.maxDescriptorSetSamplers, ").");
1231 DEV_CHECK_ERR(NumSampledImages <= Limits.maxDescriptorSetSampledImages,
1232 "In PSO '", m_Desc.Name, "', the number of sampled images (", NumSampledImages, ") exceeds the limit (", Limits.maxDescriptorSetSampledImages, ").");
1233 DEV_CHECK_ERR(NumStorageImages <= Limits.maxDescriptorSetStorageImages,
1234 "In PSO '", m_Desc.Name, "', the number of storage images (", NumStorageImages, ") exceeds the limit (", Limits.maxDescriptorSetStorageImages, ").");
1235 DEV_CHECK_ERR(NumStorageBuffers <= Limits.maxDescriptorSetStorageBuffers,
1236 "In PSO '", m_Desc.Name, "', the number of storage buffers (", NumStorageBuffers, ") exceeds the limit (", Limits.maxDescriptorSetStorageBuffers, ").");
1237 DEV_CHECK_ERR(NumDynamicStorageBuffers <= Limits.maxDescriptorSetStorageBuffersDynamic,
1238 "In PSO '", m_Desc.Name, "', the number of dynamic storage buffers (", NumDynamicStorageBuffers, ") exceeds the limit (", Limits.maxDescriptorSetStorageBuffersDynamic, ").");
1239 DEV_CHECK_ERR(NumUniformBuffers <= Limits.maxDescriptorSetUniformBuffers,
1240 "In PSO '", m_Desc.Name, "', the number of uniform buffers (", NumUniformBuffers, ") exceeds the limit (", Limits.maxDescriptorSetUniformBuffers, ").");
1241 DEV_CHECK_ERR(NumDynamicUniformBuffers <= Limits.maxDescriptorSetUniformBuffersDynamic,
1242 "In PSO '", m_Desc.Name, "', the number of dynamic uniform buffers (", NumDynamicUniformBuffers, ") exceeds the limit (", Limits.maxDescriptorSetUniformBuffersDynamic, ").");
1243 DEV_CHECK_ERR(NumInputAttachments <= Limits.maxDescriptorSetInputAttachments,
1244 "In PSO '", m_Desc.Name, "', the number of input attachments (", NumInputAttachments, ") exceeds the limit (", Limits.maxDescriptorSetInputAttachments, ").");
1245 DEV_CHECK_ERR(NumAccelerationStructures <= ASLimits.maxDescriptorSetAccelerationStructures,
1246 "In PSO '", m_Desc.Name, "', the number of acceleration structures (", NumAccelerationStructures, ") exceeds the limit (", ASLimits.maxDescriptorSetAccelerationStructures, ").");
1247 }
1248
1249 // Check per stage descriptor count
1250 for (Uint32 ShaderInd = 0; ShaderInd < PerStageDescriptorCount.size(); ++ShaderInd)
1251 {
1252 if (!ShaderStagePresented[ShaderInd])
1253 continue;
1254
1255 const auto& NumDesc = PerStageDescriptorCount[ShaderInd];
1256 const auto ShaderType = GetShaderTypeFromPipelineIndex(ShaderInd, m_Desc.PipelineType);
1257 const char* StageName = GetShaderTypeLiteralName(ShaderType);
1258
1259 const Uint32 NumSampledImages =
1260 NumDesc[static_cast<Uint32>(DescriptorType::CombinedImageSampler)] +
1261 NumDesc[static_cast<Uint32>(DescriptorType::SeparateImage)] +
1262 NumDesc[static_cast<Uint32>(DescriptorType::UniformTexelBuffer)];
1263 const Uint32 NumStorageImages =
1264 NumDesc[static_cast<Uint32>(DescriptorType::StorageImage)] +
1265 NumDesc[static_cast<Uint32>(DescriptorType::StorageTexelBuffer)] +
1266 NumDesc[static_cast<Uint32>(DescriptorType::StorageTexelBuffer_ReadOnly)];
1267 const Uint32 NumStorageBuffers =
1268 NumDesc[static_cast<Uint32>(DescriptorType::StorageBuffer)] +
1269 NumDesc[static_cast<Uint32>(DescriptorType::StorageBuffer_ReadOnly)] +
1270 NumDesc[static_cast<Uint32>(DescriptorType::StorageBufferDynamic)] +
1271 NumDesc[static_cast<Uint32>(DescriptorType::StorageBufferDynamic_ReadOnly)];
1272 const Uint32 NumUniformBuffers =
1273 NumDesc[static_cast<Uint32>(DescriptorType::UniformBuffer)] +
1274 NumDesc[static_cast<Uint32>(DescriptorType::UniformBufferDynamic)];
1275 const Uint32 NumSamplers = NumDesc[static_cast<Uint32>(DescriptorType::Sampler)];
1276 const Uint32 NumInputAttachments = NumDesc[static_cast<Uint32>(DescriptorType::InputAttachment)];
1277 const Uint32 NumAccelerationStructures = NumDesc[static_cast<Uint32>(DescriptorType::AccelerationStructure)];
1278 const Uint32 NumResources = NumSampledImages + NumStorageImages + NumStorageBuffers + NumUniformBuffers + NumSamplers + NumInputAttachments + NumAccelerationStructures;
1279
1280 DEV_CHECK_ERR(NumResources <= Limits.maxPerStageResources,
1281 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the total number of resources (", NumResources, ") exceeds the per-stage limit (", Limits.maxPerStageResources, ").");
1282 DEV_CHECK_ERR(NumSamplers <= Limits.maxPerStageDescriptorSamplers,
1283 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the number of samplers (", NumSamplers, ") exceeds the per-stage limit (", Limits.maxPerStageDescriptorSamplers, ").");
1284 DEV_CHECK_ERR(NumSampledImages <= Limits.maxPerStageDescriptorSampledImages,
1285 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the number of sampled images (", NumSampledImages, ") exceeds the per-stage limit (", Limits.maxPerStageDescriptorSampledImages, ").");
1286 DEV_CHECK_ERR(NumStorageImages <= Limits.maxPerStageDescriptorStorageImages,
1287 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the number of storage images (", NumStorageImages, ") exceeds the per-stage limit (", Limits.maxPerStageDescriptorStorageImages, ").");
1288 DEV_CHECK_ERR(NumStorageBuffers <= Limits.maxPerStageDescriptorStorageBuffers,
1289 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the number of storage buffers (", NumStorageBuffers, ") exceeds the per-stage limit (", Limits.maxPerStageDescriptorStorageBuffers, ").");
1290 DEV_CHECK_ERR(NumUniformBuffers <= Limits.maxPerStageDescriptorUniformBuffers,
1291 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the number of uniform buffers (", NumUniformBuffers, ") exceeds the per-stage limit (", Limits.maxPerStageDescriptorUniformBuffers, ").");
1292 DEV_CHECK_ERR(NumInputAttachments <= Limits.maxPerStageDescriptorInputAttachments,
1293 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the number of input attachments (", NumInputAttachments, ") exceeds the per-stage limit (", Limits.maxPerStageDescriptorInputAttachments, ").");
1294 DEV_CHECK_ERR(NumAccelerationStructures <= ASLimits.maxPerStageDescriptorAccelerationStructures,
1295 "In PSO '", m_Desc.Name, "' shader stage '", StageName, "', the number of acceleration structures (", NumAccelerationStructures, ") exceeds the per-stage limit (", ASLimits.maxPerStageDescriptorAccelerationStructures, ").");
1296 }
1297 }
1298 #endif // DILIGENT_DEVELOPMENT
10991299
11001300 } // namespace Diligent
3232 #include "VulkanTypeConversions.hpp"
3333 #include "EngineMemory.h"
3434
35 #ifdef VK_USE_PLATFORM_WIN32_KHR
36 # define WIN32_LEAN_AND_MEAN
37 # include <Windows.h>
38 # undef CreateSemaphore
39 #endif
40
3541 namespace Diligent
3642 {
3743
5959 const auto& LogicalDevice = pRenderDeviceVk->GetLogicalDevice();
6060
6161 const bool bInitializeTexture = (pInitData != nullptr && pInitData->pSubResources != nullptr && pInitData->NumSubresources > 0);
62 const bool ImageView2DSupported =
63 (m_Desc.Type == RESOURCE_DIM_TEX_3D && LogicalDevice.GetEnabledExtFeatures().HasPortabilitySubset) ?
64 LogicalDevice.GetEnabledExtFeatures().PortabilitySubset.imageView2DOn3DImage == VK_TRUE :
65 true;
66
6267 if (m_Desc.Usage == USAGE_IMMUTABLE || m_Desc.Usage == USAGE_DEFAULT || m_Desc.Usage == USAGE_DYNAMIC)
6368 {
6469 VkImageCreateInfo ImageCI = {};
7984 else if (m_Desc.Type == RESOURCE_DIM_TEX_3D)
8085 {
8186 ImageCI.imageType = VK_IMAGE_TYPE_3D;
82 ImageCI.flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
87 if (ImageView2DSupported)
88 ImageCI.flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
8389 }
8490 else
8591 {
124130 {
125131 // VK_IMAGE_USAGE_TRANSFER_DST_BIT is required for vkCmdClearColorImage()
126132 ImageCI.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
133 DEV_CHECK_ERR(ImageView2DSupported, "imageView2DOn3DImage in VkPhysicalDevicePortabilitySubsetFeaturesKHR is not enabled, can not create render target with 2D image view");
127134 }
128135 if (m_Desc.BindFlags & BIND_DEPTH_STENCIL)
129136 {
130137 // VK_IMAGE_USAGE_TRANSFER_DST_BIT is required for vkCmdClearDepthStencilImage()
131138 ImageCI.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
139 DEV_CHECK_ERR(ImageView2DSupported, "imageView2DOn3DImage in VkPhysicalDevicePortabilitySubsetFeaturesKHR is not enabled, can not create depth-stencil target with 2D image view");
132140 }
133141 if (m_Desc.BindFlags & BIND_UNORDERED_ACCESS)
134142 {
145153
146154 if (m_Desc.MiscFlags & MISC_TEXTURE_FLAG_GENERATE_MIPS)
147155 {
148 if (CheckCSBasedMipGenerationSupport(ImageCI.format))
156 if (CheckCSBasedMipGenerationSupport(ImageCI.format) && ImageView2DSupported)
149157 {
150158 ImageCI.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
151159 m_bCSBasedMipGenerationSupported = true;
5454 // Temporarily disable false warnings from validation layers (1.2.170).
5555 // TODO: check in next Vulkan SDK
5656 #if 1
57 if (std::string{"VUID-vkCmdPipelineBarrier-dstAccessMask-02816"} == callbackData->pMessageIdName)
58 {
59 std::string msg{callbackData->pMessage};
60 if (msg.find("dstAccessMask bit VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR is not supported by stage mask (VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR)") != std::string::npos)
61 return VK_FALSE;
57 if (callbackData->pMessageIdName)
58 {
59 if (std::string{"VUID-vkCmdPipelineBarrier-dstAccessMask-02816"} == callbackData->pMessageIdName)
60 {
61 std::string msg{callbackData->pMessage};
62 if (msg.find("dstAccessMask bit VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR is not supported by stage mask (VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR)") != std::string::npos)
63 return VK_FALSE;
64 }
6265 }
6366 #endif
6467
188188 m_ExtFeatures.Spirv15 = true;
189189 }
190190
191 # ifdef PLATFORM_MACOS
192 // Extension required for MoltenVk
193 if (IsExtensionSupported(VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME))
194 {
195 *NextFeat = &m_ExtFeatures.PortabilitySubset;
196 NextFeat = &m_ExtFeatures.PortabilitySubset.pNext;
197
198 m_ExtFeatures.HasPortabilitySubset = true;
199 m_ExtFeatures.PortabilitySubset.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR;
200
201 *NextProp = &m_ExtProperties.PortabilitySubset;
202 NextProp = &m_ExtProperties.PortabilitySubset.pNext;
203
204 m_ExtProperties.PortabilitySubset.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR;
205 }
206 # endif
207
191208 // make sure that last pNext is null
192209 *NextFeat = nullptr;
193210 *NextProp = nullptr;
11 #extension GL_ARB_shading_language_420pack : enable
22 #extension GL_EXT_nonuniform_qualifier : require
33
4 uniform texture2D g_Textures[];
4 #if TEXTURES_NONUNIFORM_INDEXING
5 # define TEXTURES_NONUNIFORM(x) nonuniformEXT(x)
6 # define TEXTURES_COUNT // unsized array
7 #else
8 # define TEXTURES_NONUNIFORM(x) x
9 # define TEXTURES_COUNT NUM_TEXTURES
10 #endif
11
12 #if CONST_BUFFERS_NONUNIFORM_INDEXING
13 # define CONST_BUFFERS_NONUNIFORM(x) nonuniformEXT(x)
14 # define CONST_BUFFERS_COUNT // unsized array
15 #else
16 # define CONST_BUFFERS_NONUNIFORM(x) x
17 # define CONST_BUFFERS_COUNT NUM_CONST_BUFFERS
18 #endif
19
20 #if FMT_BUFFERS_NONUNIFORM_INDEXING
21 # define FMT_BUFFERS_NONUNIFORM(x) nonuniformEXT(x)
22 # define FMT_BUFFERS_COUNT // unsized array
23 #else
24 # define FMT_BUFFERS_NONUNIFORM(x) x
25 # define FMT_BUFFERS_COUNT NUM_FMT_BUFFERS
26 #endif
27
28 #if STRUCT_BUFFERS_NONUNIFORM_INDEXING
29 # define STRUCT_BUFFERS_NONUNIFORM(x) nonuniformEXT(x)
30 # define STRUCT_BUFFERS_COUNT // unsized array
31 #else
32 # define STRUCT_BUFFERS_NONUNIFORM(x) x
33 # define STRUCT_BUFFERS_COUNT NUM_STRUCT_BUFFERS
34 #endif
35
36 #if RWTEXTURES_NONUNIFORM_INDEXING
37 # define RWTEXTURES_NONUNIFORM(x) nonuniformEXT(x)
38 # define RWTEXTURES_COUNT // unsized array
39 #else
40 # define RWTEXTURES_NONUNIFORM(x) x
41 # define RWTEXTURES_COUNT NUM_RWTEXTURES
42 #endif
43
44 #if RWSTRUCT_BUFFERS_NONUNIFORM_INDEXING
45 # define RWSTRUCT_BUFFERS_NONUNIFORM(x) nonuniformEXT(x)
46 # define RWSTRUCT_BUFFERS_COUNT // unsized array
47 #else
48 # define RWSTRUCT_BUFFERS_NONUNIFORM(x) x
49 # define RWSTRUCT_BUFFERS_COUNT NUM_RWSTRUCT_BUFFERS
50 #endif
51
52 #if RWFMT_BUFFERS_NONUNIFORM_INDEXING
53 # define RWFMT_BUFFERS_NONUNIFORM(x) nonuniformEXT(x)
54 # define RWFMT_BUFFERS_COUNT // unsized array
55 #else
56 # define RWFMT_BUFFERS_NONUNIFORM(x) x
57 # define RWFMT_BUFFERS_COUNT NUM_RWFMT_BUFFERS
58 #endif
59
60 uniform texture2D g_Textures[TEXTURES_COUNT];
561 uniform sampler g_Samplers[];
662
763 uniform g_ConstantBuffers
864 {
965 vec4 Data;
10 }g_ConstantBufferInst[];
66 }g_ConstantBufferInst[CONST_BUFFERS_COUNT];
1167
12 uniform samplerBuffer g_FormattedBuffers[];
68 uniform samplerBuffer g_FormattedBuffers[FMT_BUFFERS_COUNT];
1369
1470 layout(std140) readonly buffer g_StructuredBuffers
1571 {
1672 vec4 Data;
17 }g_StructuredBufferInst[];
73 }g_StructuredBufferInst[STRUCT_BUFFERS_COUNT];
1874
19 layout(rgba8) uniform image2D g_RWTextures[];
75 layout(rgba8) uniform image2D g_RWTextures[RWTEXTURES_COUNT];
2076
2177 layout(std140) buffer g_RWStructBuffers
2278 {
2379 vec4 Data;
24 }g_RWStructBuffersInst[];
80 }g_RWStructBuffersInst[RWSTRUCT_BUFFERS_COUNT];
2581
2682
27 layout(rgba32f) uniform imageBuffer g_RWFormattedBuffers[];
83 layout(rgba32f) uniform imageBuffer g_RWFormattedBuffers[RWFMT_BUFFERS_COUNT];
84
2885
2986 vec4 CheckValue(vec4 Val, vec4 Expected)
3087 {
68125 StructBuffRefValues[1] = StructBuff_Ref1;
69126 StructBuffRefValues[2] = StructBuff_Ref2;
70127
71 vec4 RWTexRefValues[NUM_TEXTURES];
128 vec4 RWTexRefValues[NUM_RWTEXTURES];
72129 RWTexRefValues[0] = RWTex2D_Ref0;
73130 RWTexRefValues[1] = RWTex2D_Ref1;
74131 RWTexRefValues[2] = RWTex2D_Ref2;
93150 uint RWFmtBuffIdx = index % NUM_RWFMT_BUFFERS;
94151
95152 vec4 AllCorrect = vec4(1.0, 1.0, 1.0, 1.0);
96 AllCorrect *= CheckValue(textureLod(sampler2D(g_Textures[nonuniformEXT(TexIdx)], g_Samplers[nonuniformEXT(SamIdx)]), coord, 0.0), TexRefValues[TexIdx]);
97 AllCorrect *= CheckValue(g_ConstantBufferInst[nonuniformEXT(BuffIdx)].Data, ConstBuffRefValues[BuffIdx]);
98 AllCorrect *= CheckValue(texelFetch(g_FormattedBuffers[nonuniformEXT(FmtBuffIdx)], 0), FmtBuffRefValues[FmtBuffIdx]);
99 AllCorrect *= CheckValue(g_StructuredBufferInst[nonuniformEXT(StructBuffIdx)].Data, StructBuffRefValues[StructBuffIdx]);
100 AllCorrect *= CheckValue(imageLoad(g_RWTextures[nonuniformEXT(RWTexIdx)], ivec2(coord * 10)), RWTexRefValues[RWTexIdx]);
101 AllCorrect *= CheckValue(g_RWStructBuffersInst[nonuniformEXT(RWStructBuffIdx)].Data, RWStructBuffRefValues[RWStructBuffIdx]);
102 AllCorrect *= CheckValue(imageLoad(g_RWFormattedBuffers[nonuniformEXT(RWFmtBuffIdx)], 0), RWFmtBuffRefValues[RWFmtBuffIdx]);
153 AllCorrect *= CheckValue(textureLod(sampler2D(g_Textures[TEXTURES_NONUNIFORM(TexIdx)], g_Samplers[nonuniformEXT(SamIdx)]), coord, 0.0), TexRefValues[TexIdx]);
154 AllCorrect *= CheckValue(g_ConstantBufferInst[CONST_BUFFERS_NONUNIFORM(BuffIdx)].Data, ConstBuffRefValues[BuffIdx]);
155 AllCorrect *= CheckValue(texelFetch(g_FormattedBuffers[FMT_BUFFERS_NONUNIFORM(FmtBuffIdx)], 0), FmtBuffRefValues[FmtBuffIdx]);
156 AllCorrect *= CheckValue(g_StructuredBufferInst[STRUCT_BUFFERS_NONUNIFORM(StructBuffIdx)].Data, StructBuffRefValues[StructBuffIdx]);
157 AllCorrect *= CheckValue(imageLoad(g_RWTextures[RWTEXTURES_NONUNIFORM(RWTexIdx)], ivec2(coord * 10)), RWTexRefValues[RWTexIdx]);
158 AllCorrect *= CheckValue(g_RWStructBuffersInst[RWSTRUCT_BUFFERS_NONUNIFORM(RWStructBuffIdx)].Data, RWStructBuffRefValues[RWStructBuffIdx]);
159 AllCorrect *= CheckValue(imageLoad(g_RWFormattedBuffers[RWFMT_BUFFERS_NONUNIFORM(RWFmtBuffIdx)], 0), RWFmtBuffRefValues[RWFmtBuffIdx]);
103160
104161 return AllCorrect;
105162 }
0 Texture2D g_Textures[] : register(t0, space1);
0
1 #if TEXTURES_NONUNIFORM_INDEXING
2 # define TEXTURES_NONUNIFORM(x) NonUniformResourceIndex(x)
3 # define TEXTURES_COUNT // unsized array
4 #else
5 # define TEXTURES_NONUNIFORM(x) x
6 # define TEXTURES_COUNT NUM_TEXTURES
7 #endif
8
9 #if CONST_BUFFERS_NONUNIFORM_INDEXING
10 # define CONST_BUFFERS_NONUNIFORM(x) NonUniformResourceIndex(x)
11 # define CONST_BUFFERS_COUNT // unsized array
12 #else
13 # define CONST_BUFFERS_NONUNIFORM(x) x
14 # define CONST_BUFFERS_COUNT NUM_CONST_BUFFERS
15 #endif
16
17 #if FMT_BUFFERS_NONUNIFORM_INDEXING
18 # define FMT_BUFFERS_NONUNIFORM(x) NonUniformResourceIndex(x)
19 # define FMT_BUFFERS_COUNT // unsized array
20 #else
21 # define FMT_BUFFERS_NONUNIFORM(x) x
22 # define FMT_BUFFERS_COUNT NUM_FMT_BUFFERS
23 #endif
24
25 #if STRUCT_BUFFERS_NONUNIFORM_INDEXING
26 # define STRUCT_BUFFERS_NONUNIFORM(x) NonUniformResourceIndex(x)
27 # define STRUCT_BUFFERS_COUNT // unsized array
28 #else
29 # define STRUCT_BUFFERS_NONUNIFORM(x) x
30 # define STRUCT_BUFFERS_COUNT NUM_STRUCT_BUFFERS
31 #endif
32
33 #if RWTEXTURES_NONUNIFORM_INDEXING
34 # define RWTEXTURES_NONUNIFORM(x) NonUniformResourceIndex(x)
35 # define RWTEXTURES_COUNT // unsized array
36 #else
37 # define RWTEXTURES_NONUNIFORM(x) x
38 # define RWTEXTURES_COUNT NUM_RWTEXTURES
39 #endif
40
41 #if RWSTRUCT_BUFFERS_NONUNIFORM_INDEXING
42 # define RWSTRUCT_BUFFERS_NONUNIFORM(x) NonUniformResourceIndex(x)
43 # define RWSTRUCT_BUFFERS_COUNT // unsized array
44 #else
45 # define RWSTRUCT_BUFFERS_NONUNIFORM(x) x
46 # define RWSTRUCT_BUFFERS_COUNT NUM_RWSTRUCT_BUFFERS
47 #endif
48
49 #if RWFMT_BUFFERS_NONUNIFORM_INDEXING
50 # define RWFMT_BUFFERS_NONUNIFORM(x) NonUniformResourceIndex(x)
51 # define RWFMT_BUFFERS_COUNT // unsized array
52 #else
53 # define RWFMT_BUFFERS_NONUNIFORM(x) x
54 # define RWFMT_BUFFERS_COUNT NUM_RWFMT_BUFFERS
55 #endif
56
57 Texture2D g_Textures[TEXTURES_COUNT] : register(t0, space1);
158 SamplerState g_Samplers[] : register(s4, space27);
259
360 struct CBData
461 {
562 float4 Data;
663 };
7 ConstantBuffer<CBData> g_ConstantBuffers[] : register(b10, space5);
64 ConstantBuffer<CBData> g_ConstantBuffers[CONST_BUFFERS_COUNT] : register(b10, space5);
865
9 Buffer g_FormattedBuffers[]: register(t15, space7);
66 Buffer g_FormattedBuffers[FMT_BUFFERS_COUNT]: register(t15, space7);
1067
1168 struct StructBuffData
1269 {
1370 float4 Data;
1471 };
15 StructuredBuffer<StructBuffData> g_StructuredBuffers[];
72 StructuredBuffer<StructBuffData> g_StructuredBuffers[STRUCT_BUFFERS_COUNT];
1673
17 RWTexture2D<unorm float4 /*format=rgba8*/> g_RWTextures[] : register(u10, space5);
74 RWTexture2D<unorm float4 /*format=rgba8*/> g_RWTextures[RWTEXTURES_COUNT] : register(u10, space5);
1875
1976
2077 #ifndef VULKAN // RW structured buffers are not supported by DXC
2279 {
2380 float4 Data;
2481 };
25 RWStructuredBuffer<RWStructBuffData> g_RWStructBuffers[] : register(u10, space6);
82 RWStructuredBuffer<RWStructBuffData> g_RWStructBuffers[RWSTRUCT_BUFFERS_COUNT] : register(u10, space6);
2683 #endif
2784
28 RWBuffer<float4> g_RWFormattedBuffers[] : register(u10, space41);
29
85 RWBuffer<float4> g_RWFormattedBuffers[RWFMT_BUFFERS_COUNT] : register(u10, space41);
3086
3187 float4 CheckValue(float4 Val, float4 Expected)
3288 {
70126 StructBuffRefValues[1] = StructBuff_Ref1;
71127 StructBuffRefValues[2] = StructBuff_Ref2;
72128
73 float4 RWTexRefValues[NUM_TEXTURES];
129 float4 RWTexRefValues[NUM_RWTEXTURES];
74130 RWTexRefValues[0] = RWTex2D_Ref0;
75131 RWTexRefValues[1] = RWTex2D_Ref1;
76132 RWTexRefValues[2] = RWTex2D_Ref2;
95151 uint RWFmtBuffIdx = index % NUM_RWFMT_BUFFERS;
96152
97153 float4 AllCorrect = float4(1.0, 1.0, 1.0, 1.0);
98 AllCorrect *= CheckValue(g_Textures[NonUniformResourceIndex(TexIdx)].SampleLevel(g_Samplers[NonUniformResourceIndex(SamIdx)], coord, 0.0), TexRefValues[TexIdx]);
99 AllCorrect *= CheckValue(g_ConstantBuffers[NonUniformResourceIndex(ConstBuffIdx)].Data, ConstBuffRefValues[ConstBuffIdx]);
100 AllCorrect *= CheckValue(g_FormattedBuffers[NonUniformResourceIndex(FmtBuffIdx)].Load(0), FmtBuffRefValues[FmtBuffIdx]);
101 AllCorrect *= CheckValue(g_StructuredBuffers[NonUniformResourceIndex(StructBuffIdx)][0].Data, StructBuffRefValues[StructBuffIdx]);
102 AllCorrect *= CheckValue(g_RWTextures[NonUniformResourceIndex(RWTexIdx)][int2(coord*10)], RWTexRefValues[RWTexIdx]);
154 AllCorrect *= CheckValue(g_Textures[TEXTURES_NONUNIFORM(TexIdx)].SampleLevel(g_Samplers[NonUniformResourceIndex(SamIdx)], coord, 0.0), TexRefValues[TexIdx]);
155 AllCorrect *= CheckValue(g_ConstantBuffers[CONST_BUFFERS_NONUNIFORM(ConstBuffIdx)].Data, ConstBuffRefValues[ConstBuffIdx]);
156 AllCorrect *= CheckValue(g_FormattedBuffers[FMT_BUFFERS_NONUNIFORM(FmtBuffIdx)].Load(0), FmtBuffRefValues[FmtBuffIdx]);
157 AllCorrect *= CheckValue(g_StructuredBuffers[STRUCT_BUFFERS_NONUNIFORM(StructBuffIdx)][0].Data, StructBuffRefValues[StructBuffIdx]);
158 AllCorrect *= CheckValue(g_RWTextures[RWTEXTURES_NONUNIFORM(RWTexIdx)][int2(coord*10)], RWTexRefValues[RWTexIdx]);
103159 #ifndef VULKAN
104 AllCorrect *= CheckValue(g_RWStructBuffers[NonUniformResourceIndex(RWStructBuffIdx)][0].Data, RWStructBuffRefValues[RWStructBuffIdx]);
160 AllCorrect *= CheckValue(g_RWStructBuffers[RWSTRUCT_BUFFERS_NONUNIFORM(RWStructBuffIdx)][0].Data, RWStructBuffRefValues[RWStructBuffIdx]);
105161 #endif
106 AllCorrect *= CheckValue(g_RWFormattedBuffers[NonUniformResourceIndex(RWFmtBuffIdx)][0], RWFmtBuffRefValues[RWFmtBuffIdx]);
162 AllCorrect *= CheckValue(g_RWFormattedBuffers[RWFMT_BUFFERS_NONUNIFORM(RWFmtBuffIdx)][0], RWFmtBuffRefValues[RWFmtBuffIdx]);
107163
108164 return AllCorrect;
109165 }
7474
7575 void ExecuteCommandList(ID3D12CommandList* pCmdList, bool WaitForIdle);
7676
77 virtual bool HasDXCompiler() const override final
78 {
79 return m_pDxCompiler != nullptr && m_pDxCompiler->IsLoaded();
80 }
81
7782 HRESULT CompileDXILShader(const std::string& Source,
7883 LPCWSTR strFunctionName,
7984 const DxcDefine* Defines,
6767 void TearDown() override final;
6868
6969 virtual void Reset();
70
71 virtual bool HasDXCompiler() const { return false; }
7072
7173 void ReleaseResources();
7274
2929 #include <array>
3030
3131 #include "TestingEnvironment.hpp"
32 #include "DXCompiler.hpp"
3233
3334 #define VK_NO_PROTOTYPES
3435 #include "vulkan/vulkan.h"
35 #include "vulkan/vulkan_beta.h"
3636
3737 namespace Diligent
3838 {
7777 return m_vkPhysicalDevice;
7878 }
7979
80 bool HasDXCompiler() const override final
81 {
82 return m_pDxCompiler != nullptr && m_pDxCompiler->IsLoaded();
83 }
84
8085 VkShaderModule CreateShaderModule(const SHADER_TYPE ShaderType, const std::string& ShaderSource);
8186
8287 static VkRenderPassCreateInfo GetRenderPassCreateInfo(
109114 VkCommandPool m_vkCmdPool = VK_NULL_HANDLE;
110115 VkFence m_vkFence = VK_NULL_HANDLE;
111116
117 std::unique_ptr<IDXCompiler> m_pDxCompiler;
118
112119 VkPhysicalDeviceMemoryProperties m_MemoryProperties = {};
120
121 public:
122 VkPhysicalDeviceDescriptorIndexingFeaturesEXT DescriptorIndexing = {};
123 VkPhysicalDeviceProperties DeviceProps = {};
113124 };
114125
115126 } // namespace Testing
136136 auto* const pEnv = TestingEnvironment::GetInstance();
137137 auto* const pDevice = pEnv->GetDevice();
138138
139 sm_HasMeshShader = pDevice->GetDeviceCaps().Features.MeshShaders;
140 sm_HasRayTracing = pDevice->GetDeviceCaps().Features.RayTracing;
139 sm_HasMeshShader = pDevice->GetDeviceCaps().Features.MeshShaders && pEnv->HasDXCompiler();
140 sm_HasRayTracing = pDevice->GetDeviceCaps().Features.RayTracing && pEnv->HasDXCompiler();
141141
142142 ShaderCreateInfo ShaderCI;
143143 ShaderCI.Source = g_TrivialVSSource;
13891389 GTEST_SKIP();
13901390 }
13911391
1392 static constexpr char PSSource[] = R"(
1392 static constexpr char PSSource_HLSL[] = R"(
13931393 Texture2D g_Texture[];
13941394 cbuffer ConstBuffer
13951395 {
14011401 }
14021402 )";
14031403
1404 static constexpr char PSSource_GLSL[] = R"(
1405 #version 460 core
1406 #extension GL_EXT_nonuniform_qualifier : require
1407 #extension GL_EXT_samplerless_texture_functions : require
1408
1409 uniform texture2D g_Texture[];
1410 layout(std140) uniform ConstBuffer
1411 {
1412 uint Index;
1413 };
1414 layout(location=0) out vec4 out_Color;
1415
1416 void main()
1417 {
1418 out_Color = texelFetch(g_Texture[nonuniformEXT(Index)], ivec2(0,0), 0);
1419 }
1420 )";
1421
14041422 ShaderCreateInfo ShaderCI;
1405 ShaderCI.Source = PSSource;
14061423 ShaderCI.Desc.ShaderType = SHADER_TYPE_PIXEL;
14071424 ShaderCI.Desc.Name = "Invalid Run-Time Array (PSOCreationFailureTest)";
1408 ShaderCI.SourceLanguage = SHADER_SOURCE_LANGUAGE_HLSL;
1409 if (deviceCaps.IsVulkanDevice())
1410 ShaderCI.ShaderCompiler = SHADER_COMPILER_DXC; // GLSLang does not handle HLSL run-time arrays properly
1425 ShaderCI.ShaderCompiler = pEnv->GetDefaultCompiler(ShaderCI.SourceLanguage);
1426 if (deviceCaps.IsD3DDevice())
1427 {
1428 ShaderCI.Source = PSSource_HLSL;
1429 ShaderCI.SourceLanguage = SHADER_SOURCE_LANGUAGE_HLSL;
1430 }
14111431 else
1412 ShaderCI.ShaderCompiler = pEnv->GetDefaultCompiler(ShaderCI.SourceLanguage);
1432 {
1433 ShaderCI.Source = PSSource_GLSL;
1434 ShaderCI.SourceLanguage = SHADER_SOURCE_LANGUAGE_GLSL_VERBATIM;
1435 }
14131436 ShaderCI.UseCombinedTextureSamplers = true;
14141437 ShaderCI.CompileFlags = SHADER_COMPILE_FLAG_ENABLE_UNBOUNDED_ARRAYS;
14151438 RefCntAutoPtr<IShader> pPS;
3333 #include "GraphicsAccessories.hpp"
3434 #include "ResourceLayoutTestCommon.hpp"
3535
36 #include "Vulkan/TestingEnvironmentVk.hpp"
37
3638 #include "gtest/gtest.h"
3739
3840 using namespace Diligent;
15571559 GTEST_SKIP() << "Direct3D does not support GLSL";
15581560 }
15591561
1562 if (deviceCaps.IsVulkanDevice() && !pEnv->HasDXCompiler())
1563 {
1564 GTEST_SKIP() << "Vulkan requires DXCompiler which is not found";
1565 }
1566
1567 bool ConstantBufferNonUniformIndexing = true;
1568 bool SRVBufferNonUniformIndexing = true;
1569 bool UAVBufferNonUniformIndexing = true;
1570 bool SRVTextureNonUniformIndexing = true;
1571 bool UAVTextureNonUniformIndexing = true;
1572
1573 if (pDevice->GetDeviceCaps().IsVulkanDevice())
1574 {
1575 auto* pEnvVk = static_cast<TestingEnvironmentVk*>(pEnv);
1576 ConstantBufferNonUniformIndexing = (pEnvVk->DescriptorIndexing.shaderUniformBufferArrayNonUniformIndexing == VK_TRUE);
1577 SRVBufferNonUniformIndexing = (pEnvVk->DescriptorIndexing.shaderStorageBufferArrayNonUniformIndexing == VK_TRUE);
1578 UAVBufferNonUniformIndexing = SRVBufferNonUniformIndexing;
1579 SRVTextureNonUniformIndexing = (pEnvVk->DescriptorIndexing.shaderSampledImageArrayNonUniformIndexing == VK_TRUE);
1580 UAVTextureNonUniformIndexing = (pEnvVk->DescriptorIndexing.shaderStorageImageArrayNonUniformIndexing == VK_TRUE);
1581 }
1582
15601583 TestingEnvironment::ScopedReset EnvironmentAutoReset;
15611584
15621585 auto* pContext = pEnv->GetDeviceContext();
16781701 Macros.AddShaderMacro("NUM_RWTEXTURES", RWTexArraySize);
16791702 Macros.AddShaderMacro("NUM_RWSTRUCT_BUFFERS", RWStructBuffArraySize);
16801703 Macros.AddShaderMacro("NUM_RWFMT_BUFFERS", RWFormattedBuffArraySize);
1704
1705 Macros.AddShaderMacro("TEXTURES_NONUNIFORM_INDEXING", SRVTextureNonUniformIndexing ? 1 : 0);
1706 Macros.AddShaderMacro("CONST_BUFFERS_NONUNIFORM_INDEXING", ConstantBufferNonUniformIndexing ? 1 : 0);
1707 Macros.AddShaderMacro("FMT_BUFFERS_NONUNIFORM_INDEXING", SRVBufferNonUniformIndexing ? 1 : 0);
1708 Macros.AddShaderMacro("STRUCT_BUFFERS_NONUNIFORM_INDEXING", SRVBufferNonUniformIndexing ? 1 : 0);
1709 Macros.AddShaderMacro("RWTEXTURES_NONUNIFORM_INDEXING", UAVTextureNonUniformIndexing ? 1 : 0);
1710 Macros.AddShaderMacro("RWSTRUCT_BUFFERS_NONUNIFORM_INDEXING", UAVBufferNonUniformIndexing ? 1 : 0);
1711 Macros.AddShaderMacro("RWFMT_BUFFERS_NONUNIFORM_INDEXING", UAVBufferNonUniformIndexing ? 1 : 0);
1712
16811713 if (IsGLSL)
16821714 Macros.AddShaderMacro("float4", "vec4");
16831715 for (Uint32 i = 0; i < TexArraySize; ++i)
3636 #include "ResourceLayoutTestCommon.hpp"
3737 #include "TestingSwapChainBase.hpp"
3838
39 #include "Vulkan/TestingEnvironmentVk.hpp"
40
3941 #include "gtest/gtest.h"
4042
4143 using namespace Diligent;
746748 ComputeShaderReference(pSwapChain);
747749
748750 const auto& deviceCaps = pDevice->GetDeviceCaps();
749 auto deviceType = deviceCaps.DevType;
750751
751752 constexpr Uint32 MaxStaticBuffArraySize = 4;
752753 constexpr Uint32 MaxMutableBuffArraySize = 3;
753754 constexpr Uint32 MaxDynamicBuffArraySize = 2;
755 constexpr Uint32 MaxUAVBuffers =
756 MaxStaticBuffArraySize +
757 MaxMutableBuffArraySize +
758 MaxDynamicBuffArraySize +
759 3 /*non array resources*/ +
760 1 /*output UAV texture*/;
761
762 bool UseReducedUAVCount = false;
763 switch (deviceCaps.DevType)
764 {
765 case RENDER_DEVICE_TYPE_D3D11:
766 case RENDER_DEVICE_TYPE_GL:
767 case RENDER_DEVICE_TYPE_GLES:
768 UseReducedUAVCount = true;
769 break;
770
771 case RENDER_DEVICE_TYPE_VULKAN:
772 {
773 const auto* pEnvVk = static_cast<const TestingEnvironmentVk*>(pEnv);
774 const auto& Limits = pEnvVk->DeviceProps.limits;
775 if (Limits.maxPerStageDescriptorStorageImages < 8)
776 {
777 GTEST_SKIP() << "The number of supported UAV buffers is too small.";
778 }
779 else if (Limits.maxPerStageDescriptorStorageImages < MaxUAVBuffers)
780 UseReducedUAVCount = true;
781 break;
782 }
783 }
754784
755785 // Prepare buffers with reference values
756786 ReferenceBuffers RefBuffers{
761791 IsFormatted ? BUFFER_MODE_FORMATTED : BUFFER_MODE_STRUCTURED //
762792 };
763793
764 const Uint32 StaticBuffArraySize = deviceType == RENDER_DEVICE_TYPE_D3D11 || deviceCaps.IsGLDevice() ? 1 : MaxStaticBuffArraySize;
765 const Uint32 MutableBuffArraySize = deviceType == RENDER_DEVICE_TYPE_D3D11 || deviceCaps.IsGLDevice() ? 1 : MaxMutableBuffArraySize;
794 const Uint32 StaticBuffArraySize = UseReducedUAVCount ? 1 : MaxStaticBuffArraySize;
795 const Uint32 MutableBuffArraySize = UseReducedUAVCount ? 1 : MaxMutableBuffArraySize;
766796 const Uint32 DynamicBuffArraySize = MaxDynamicBuffArraySize;
767797
768798 static constexpr size_t Buff_StaticIdx = 0;
919949 ComputeShaderReference(pSwapChain);
920950
921951 const auto& deviceCaps = pDevice->GetDeviceCaps();
922 auto deviceType = deviceCaps.DevType;
923952
924953 constexpr Uint32 MaxStaticTexArraySize = 2;
925954 constexpr Uint32 MaxMutableTexArraySize = 4;
926955 constexpr Uint32 MaxDynamicTexArraySize = 3;
956 constexpr Uint32 MaxUAVTextures =
957 MaxStaticTexArraySize +
958 MaxMutableTexArraySize +
959 MaxDynamicTexArraySize +
960 3 /*non array resources*/ +
961 1 /*output UAV texture*/;
962
963 bool UseReducedUAVCount = false;
964 switch (deviceCaps.DevType)
965 {
966 case RENDER_DEVICE_TYPE_D3D11:
967 case RENDER_DEVICE_TYPE_GL:
968 case RENDER_DEVICE_TYPE_GLES:
969 UseReducedUAVCount = true;
970 break;
971
972 case RENDER_DEVICE_TYPE_VULKAN:
973 {
974 const auto* pEnvVk = static_cast<TestingEnvironmentVk*>(pEnv);
975 const auto& Limits = pEnvVk->DeviceProps.limits;
976 if (Limits.maxPerStageDescriptorStorageImages < 8)
977 {
978 GTEST_SKIP() << "The number of supported UAV textures is too small.";
979 }
980 else if (Limits.maxPerStageDescriptorStorageImages < MaxUAVTextures)
981 UseReducedUAVCount = true;
982 break;
983 }
984 }
927985
928986 const Uint32 StaticTexArraySize = MaxStaticTexArraySize;
929 const Uint32 MutableTexArraySize = deviceType == RENDER_DEVICE_TYPE_D3D11 || deviceCaps.IsGLDevice() ? 1 : MaxMutableTexArraySize;
930 const Uint32 DynamicTexArraySize = deviceType == RENDER_DEVICE_TYPE_D3D11 || deviceCaps.IsGLDevice() ? 1 : MaxDynamicTexArraySize;
987 const Uint32 MutableTexArraySize = UseReducedUAVCount ? 1 : MaxMutableTexArraySize;
988 const Uint32 DynamicTexArraySize = UseReducedUAVCount ? 1 : MaxDynamicTexArraySize;
931989
932990 ReferenceTextures RefTextures{
933991 3 + MaxStaticTexArraySize + MaxMutableTexArraySize + MaxDynamicTexArraySize + 1, // Extra texture for dynamic variables
528528 {
529529 case SHADER_COMPILER_DEFAULT:
530530 case SHADER_COMPILER_GLSLANG:
531 m_ShaderCompiler = compiler;
532
531533 case SHADER_COMPILER_DXC:
532 m_ShaderCompiler = compiler;
534 if (HasDXCompiler())
535 m_ShaderCompiler = compiler;
533536 break;
534537
535538 default:
644644
645645 // Test texture 3D
646646 if (FmtInfo.Dimensions & RESOURCE_DIMENSION_SUPPORT_TEX_3D)
647 CreateTestTexture(RESOURCE_DIM_TEX_3D, TestInfo.Fmt, TestInfo.BindFlags, 1, TestInfo.TestDataUpload);
647 {
648 auto TexInfo2 = TestInfo;
649 #ifdef PLATFORM_MACOS
650 // in MoltenVk 2D image view from 3D texture may be unsupported.
651 TexInfo2.BindFlags &= ~(BIND_RENDER_TARGET | BIND_DEPTH_STENCIL);
652 if (TexInfo2.Fmt == TEX_FORMAT_D32_FLOAT || TexInfo2.Fmt == TEX_FORMAT_D16_UNORM)
653 return;
654 #endif
655 CreateTestTexture(RESOURCE_DIM_TEX_3D, TexInfo2.Fmt, TexInfo2.BindFlags, 1, TexInfo2.TestDataUpload);
656 }
648657 }
649658
650659
4646
4747 TestingEnvironmentVk::TestingEnvironmentVk(const CreateInfo& CI,
4848 const SwapChainDesc& SCDesc) :
49 TestingEnvironment{CI, SCDesc}
49 TestingEnvironment{CI, SCDesc},
50 m_pDxCompiler{CreateDXCompiler(DXCompilerTarget::Vulkan, nullptr)}
5051 {
5152 #if !DILIGENT_NO_GLSLANG
5253 GLSLangUtils::InitializeGlslang();
7071
7172 auto vkPhysicalDevice = pRenderDeviceVk->GetVkPhysicalDevice();
7273 vkGetPhysicalDeviceMemoryProperties(vkPhysicalDevice, &m_MemoryProperties);
74
75 vkGetPhysicalDeviceProperties(vkPhysicalDevice, &DeviceProps);
76
77 {
78 // Enumerate available extensions
79 uint32_t ExtensionCount = 0;
80 std::vector<VkExtensionProperties> InstanceExtensions;
81
82 vkEnumerateInstanceExtensionProperties(nullptr, &ExtensionCount, nullptr);
83 InstanceExtensions.resize(ExtensionCount);
84 vkEnumerateInstanceExtensionProperties(nullptr, &ExtensionCount, InstanceExtensions.data());
85
86 bool HasPhysicalDeviceProps2 = false;
87 bool HasDescriptorIndexing = false;
88 for (uint32_t i = 0; i < ExtensionCount; ++i)
89 {
90 if (!HasPhysicalDeviceProps2 && strcmp(InstanceExtensions[i].extensionName, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0)
91 HasPhysicalDeviceProps2 = true;
92 if (!HasDescriptorIndexing && strcmp(InstanceExtensions[i].extensionName, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME) == 0)
93 HasDescriptorIndexing = true;
94 }
95
96 // Get extension features and properties.
97 if (HasPhysicalDeviceProps2)
98 {
99 VkPhysicalDeviceFeatures2 Feats2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2};
100 void** NextFeat = &Feats2.pNext;
101
102 if (HasDescriptorIndexing)
103 {
104 *NextFeat = &DescriptorIndexing;
105 NextFeat = &DescriptorIndexing.pNext;
106
107 DescriptorIndexing.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT;
108 }
109
110 vkGetPhysicalDeviceFeatures2KHR(vkPhysicalDevice, &Feats2);
111 }
112 }
73113
74114 {
75115 VkCommandPoolCreateInfo CmdPoolCI = {};