git.s-ol.nu forks/DiligentCore / 8d13276
External Vulkan SwapChain support s-ol 2 years ago
7 changed file(s) with 117 addition(s) and 819 deletion(s). Raw diff Collapse all Expand all
0 function(custom_post_configure_target TARGET)
1 set_target_properties(${TARGET} PROPERTIES
2 LINK_OPTIONS -static-libstdc++
3 )
4 endfunction()
3535 #include "VulkanUtilities/VulkanInstance.hpp"
3636 #include "VulkanUtilities/VulkanObjectWrappers.hpp"
3737 #include "ManagedVulkanObject.hpp"
38 #include "../interface/EngineFactoryVk.h"
3839
3940 namespace Diligent
4041 {
4950 const SwapChainDesc& SwapChainDesc,
5051 class RenderDeviceVkImpl* pRenderDeviceVk,
5152 class DeviceContextVkImpl* pDeviceContextVk,
52 const NativeWindow& Window);
53 uint32_t SwapChainImageCount,
54 const VkImage* pSwapChainImages,
55 SwapChainImageCallbacks SwapChainCallbacks);
5356 ~SwapChainVkImpl();
5457
5558 IMPLEMENT_QUERY_INTERFACE_IN_PLACE(IID_SwapChainVk, TSwapChainBase)
5659
5760 /// Implementation of ISwapChain::Present() in Vulkan backend.
5861 virtual void DILIGENT_CALL_TYPE Present(Uint32 SyncInterval) override final;
62
63 /// Implementation of ISwapChain::Present() in Vulkan backend.
64 virtual void DILIGENT_CALL_TYPE PresentImage() override final;
65
66 /// Implementation of ISwapChainVk::AcquireNextImage() in Vulkan backend.
67 virtual VkResult DILIGENT_CALL_TYPE AcquireNextImage() override final;
5968
6069 /// Implementation of ISwapChain::Resize() in Vulkan backend.
6170 virtual void DILIGENT_CALL_TYPE Resize(Uint32 NewWidth, Uint32 NewHeight, SURFACE_TRANSFORM NewPreTransform) override final;
6574
6675 /// Implementation of ISwapChain::SetWindowedMode() in Vulkan backend.
6776 virtual void DILIGENT_CALL_TYPE SetWindowedMode() override final;
68
69 /// Implementation of ISwapChainVk::GetVkSwapChain().
70 virtual VkSwapchainKHR DILIGENT_CALL_TYPE GetVkSwapChain() override final { return m_VkSwapChain; }
7177
7278 /// Implementation of ISwapChain::GetCurrentBackBufferRTV() in Vulkan backend.
7379 virtual ITextureViewVk* DILIGENT_CALL_TYPE GetCurrentBackBufferRTV() override final
8086 virtual ITextureViewVk* DILIGENT_CALL_TYPE GetDepthBufferDSV() override final { return m_pDepthBufferDSV; }
8187
8288 private:
83 void CreateSurface();
84 void CreateVulkanSwapChain();
85 void InitBuffersAndViews();
86 VkResult AcquireNextImage(DeviceContextVkImpl* pDeviceCtxVk);
87 void RecreateVulkanSwapchain(DeviceContextVkImpl* pImmediateCtxVk);
88 void WaitForImageAcquiredFences();
89 void ReleaseSwapChainResources(DeviceContextVkImpl* pImmediateCtxVk, bool DestroyVkSwapChain);
90
91 const NativeWindow m_Window;
89 void InitBuffersAndViews(uint32_t SwapChainImageCount, const VkImage *SwapChainImages);
90 void ReleaseSwapChainResources(DeviceContextVkImpl* pImmediateCtxVk);
9291
9392 std::shared_ptr<const VulkanUtilities::VulkanInstance> m_VulkanInstance;
94
95 Uint32 m_DesiredBufferCount = 0;
96
97 VkSurfaceKHR m_VkSurface = VK_NULL_HANDLE;
98 VkSwapchainKHR m_VkSwapChain = VK_NULL_HANDLE;
99 VkFormat m_VkColorFormat = VK_FORMAT_UNDEFINED;
100
101 #if PLATFORM_ANDROID
102 // Surface extent corresponding to identity transform. We have to store this value,
103 // because on Android vkGetPhysicalDeviceSurfaceCapabilitiesKHR is not reliable and
104 // starts reporting incorrect dimensions after few rotations.
105 VkExtent2D m_SurfaceIdentityExtent = {};
106
107 // Keep track of current surface transform to detect orientation changes.
108 VkSurfaceTransformFlagBitsKHR m_CurrentSurfaceTransform = VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR;
109 #endif
110
111 std::vector<RefCntAutoPtr<ManagedSemaphore>> m_ImageAcquiredSemaphores;
112 std::vector<RefCntAutoPtr<ManagedSemaphore>> m_DrawCompleteSemaphores;
113 std::vector<VulkanUtilities::FenceWrapper> m_ImageAcquiredFences;
114
11593 std::vector<RefCntAutoPtr<ITextureViewVk>, STDAllocatorRawMem<RefCntAutoPtr<ITextureViewVk>>> m_pBackBufferRTV;
116
11794 std::vector<bool, STDAllocatorRawMem<bool>> m_SwapChainImagesInitialized;
118 std::vector<bool, STDAllocatorRawMem<bool>> m_ImageAcquiredFenceSubmitted;
11995
12096 RefCntAutoPtr<ITextureViewVk> m_pDepthBufferDSV;
12197
122 Uint32 m_SemaphoreIndex = 0;
98 void* m_Cookie;
99 VkResult (*m_AcquireImageCallback)(void* Cookie, uint32_t* Image);
100 void (*m_ReleaseImageCallback)(void* Cookie);
101
123102 uint32_t m_BackBufferIndex = 0;
124103 bool m_IsMinimized = false;
125104 bool m_VSyncEnabled = true;
6363
6464 // clang-format off
6565
66 struct SwapChainImageCallbacks {
67 void* Cookie;
68 VkResult (*ImageCallbackAcquire)(void* Cookie, uint32_t* Image);
69 void (*ImageCallbackRelease)(void* Cookie);
70 };
71
72 typedef struct SwapChainImageCallbacks SwapChainImageCallbacks;
73
6674 DILIGENT_BEGIN_INTERFACE(IEngineFactoryVk, IEngineFactory)
6775 {
6876 /// Creates a render device and device contexts for Vulkan backend
100108 /// \param [out] ppSwapChain - Address of the memory location where pointer to the new
101109 /// swap chain will be written
102110 VIRTUAL void METHOD(CreateSwapChainVk)(THIS_
103 IRenderDevice* pDevice,
104 IDeviceContext* pImmediateContext,
105 const SwapChainDesc REF SwapChainDesc,
106 const NativeWindow REF Window,
107 ISwapChain** ppSwapChain) PURE;
111 IRenderDevice* pDevice,
112 IDeviceContext* pImmediateContext,
113 const SwapChainDesc REF SwapChainDesc,
114 uint32_t SwapChainImageCount,
115 const VkImage* pSwapChainImages,
116 const SwapChainImageCallbacks REF SwapChainCallbacks,
117 ISwapChain** ppSwapChain) PURE;
108118 };
109119 DILIGENT_END_INTERFACE
110120
3030 /// Definition of the Diligent::IRenderDeviceVk interface
3131
3232 #include "../../GraphicsEngine/interface/RenderDevice.h"
33 #include "CommandQueueVk.h"
3334
3435 DILIGENT_BEGIN_NAMESPACE(Diligent)
3536
149150 const TopLevelASDesc REF Desc,
150151 RESOURCE_STATE InitialState,
151152 ITopLevelAS** ppTLAS) PURE;
153
154 VIRTUAL ICommandQueueVk* METHOD(LockCommandQueue)(THIS_
155 Uint32 QueueIndex) PURE;
156
157 VIRTUAL void METHOD(UnlockCommandQueue)(THIS_
158 Uint32 QueueIndex) PURE;
159
152160 };
153161 DILIGENT_END_INTERFACE
154162
168176 # define IRenderDeviceVk_CreateBufferFromVulkanResource(This, ...) CALL_IFACE_METHOD(RenderDeviceVk, CreateBufferFromVulkanResource, This, __VA_ARGS__)
169177 # define IRenderDeviceVk_CreateBLASFromVulkanResource(This, ...) CALL_IFACE_METHOD(RenderDeviceVk, CreateBLASFromVulkanResource, This, __VA_ARGS__)
170178 # define IRenderDeviceVk_CreateTLASFromVulkanResource(This, ...) CALL_IFACE_METHOD(RenderDeviceVk, CreateTLASFromVulkanResource, This, __VA_ARGS__)
179 # define IRenderDeviceVk_LockCommandQueue(This, ...) CALL_IFACE_METHOD(RenderDeviceVk, LockCommandQueue, This, __VA_ARGS__)
180 # define IRenderDeviceVk_UnlockCommandQueue(This, ...) CALL_IFACE_METHOD(RenderDeviceVk, UnlockCommandQueue, This, __VA_ARGS__)
171181
172182 // clang-format on
173183
4848 /// Exposes Vulkan-specific functionality of a swap chain.
4949 DILIGENT_BEGIN_INTERFACE(ISwapChainVk, ISwapChain)
5050 {
51 /// Returns a handle to the Vulkan swap chain object.
52 VIRTUAL VkSwapchainKHR METHOD(GetVkSwapChain)(THIS) PURE;
51 VIRTUAL void METHOD(PresentImage)(THIS) PURE;
52 VIRTUAL VkResult METHOD(AcquireNextImage)(THIS) PURE;
5353 };
5454 DILIGENT_END_INTERFACE
5555
5959
6060 // clang-format off
6161
62 # define ISwapChainVk_GetVkSwapChain(This) CALL_IFACE_METHOD(SwapChainVk, GetVkSwapChain, This)
62 # define ISwapChainVk_PresentImage(This, ...) CALL_IFACE_METHOD(SwapChainVk, PresentImage, This, __VA_ARGS__)
63 # define ISwapChainVk_GetVkBufferView(This) CALL_IFACE_METHOD(SwapChainVk, AcquireNextImage, This)
6364
6465 // clang-format on
6566
8383 IRenderDevice** ppDevice,
8484 IDeviceContext** ppContexts); //override final;
8585
86 virtual void DILIGENT_CALL_TYPE CreateSwapChainVk(IRenderDevice* pDevice,
87 IDeviceContext* pImmediateContext,
88 const SwapChainDesc& SwapChainDesc,
89 const NativeWindow& Window,
90 ISwapChain** ppSwapChain) override final;
86 virtual void DILIGENT_CALL_TYPE CreateSwapChainVk(IRenderDevice* pDevice,
87 IDeviceContext* pImmediateContext,
88 const SwapChainDesc& SwapChainDesc,
89 uint32_t SwapChainImageCount,
90 const VkImage* pSwapChainImages,
91 const SwapChainImageCallbacks& SwapChainCallbacks,
92 ISwapChain** ppSwapChain) override final;
9193
9294 #if PLATFORM_ANDROID
9395 virtual void InitAndroidFileSystem(struct ANativeActivity* NativeActivity,
572574 };
573575
574576 AttachToVulkanDevice(
575 VulkanInstance,
576 std::move(VulkanPhysicalDevice),
577 VulkanInstance,
578 std::move(VulkanPhysicalDevice),
577579 VulkanLogicalDevice,
578580 CommandQueues.size(), CommandQueues.data(),
579581 EngineCI,
668670 }
669671
670672
671 void EngineFactoryVkImpl::CreateSwapChainVk(IRenderDevice* pDevice,
672 IDeviceContext* pImmediateContext,
673 const SwapChainDesc& SCDesc,
674 const NativeWindow& Window,
675 ISwapChain** ppSwapChain)
673 void EngineFactoryVkImpl::CreateSwapChainVk(IRenderDevice* pDevice,
674 IDeviceContext* pImmediateContext,
675 const SwapChainDesc& SCDesc,
676 uint32_t SwapChainImageCount,
677 const VkImage* pSwapChainImages,
678 const SwapChainImageCallbacks& SwapChainCallbacks,
679 ISwapChain** ppSwapChain)
676680 {
677681 VERIFY(ppSwapChain, "Null pointer provided");
678682 if (!ppSwapChain)
686690 auto* pDeviceContextVk = ValidatedCast<DeviceContextVkImpl>(pImmediateContext);
687691 auto& RawMemAllocator = GetRawAllocator();
688692
689 auto* pSwapChainVk = NEW_RC_OBJ(RawMemAllocator, "SwapChainVkImpl instance", SwapChainVkImpl)(SCDesc, pDeviceVk, pDeviceContextVk, Window);
693 auto* pSwapChainVk = NEW_RC_OBJ(RawMemAllocator, "SwapChainVkImpl instance", SwapChainVkImpl)(SCDesc, pDeviceVk, pDeviceContextVk, SwapChainImageCount, pSwapChainImages, SwapChainCallbacks);
690694 pSwapChainVk->QueryInterface(IID_SwapChain, reinterpret_cast<IObject**>(ppSwapChain));
691695 }
692696 catch (const std::runtime_error&)
3535 namespace Diligent
3636 {
3737
38 SwapChainVkImpl::SwapChainVkImpl(IReferenceCounters* pRefCounters,
39 const SwapChainDesc& SCDesc,
40 RenderDeviceVkImpl* pRenderDeviceVk,
41 DeviceContextVkImpl* pDeviceContextVk,
42 const NativeWindow& Window) :
38 SwapChainVkImpl::SwapChainVkImpl(IReferenceCounters* pRefCounters,
39 const SwapChainDesc& SCDesc,
40 RenderDeviceVkImpl* pRenderDeviceVk,
41 DeviceContextVkImpl* pDeviceContextVk,
42 uint32_t SwapChainImageCount,
43 const VkImage* pSwapChainImages,
44 SwapChainImageCallbacks SwapChainCallbacks) :
4345 // clang-format off
4446 TSwapChainBase {pRefCounters, pRenderDeviceVk, pDeviceContextVk, SCDesc},
45 m_Window {Window},
4647 m_VulkanInstance {pRenderDeviceVk->GetVulkanInstance()},
47 m_DesiredBufferCount {SCDesc.BufferCount},
4848 m_pBackBufferRTV (STD_ALLOCATOR_RAW_MEM(RefCntAutoPtr<ITextureView>, GetRawAllocator(), "Allocator for vector<RefCntAutoPtr<ITextureView>>")),
4949 m_SwapChainImagesInitialized (STD_ALLOCATOR_RAW_MEM(bool, GetRawAllocator(), "Allocator for vector<bool>")),
50 m_ImageAcquiredFenceSubmitted(STD_ALLOCATOR_RAW_MEM(bool, GetRawAllocator(), "Allocator for vector<bool>"))
50 m_Cookie (SwapChainCallbacks.Cookie),
51 m_AcquireImageCallback (SwapChainCallbacks.ImageCallbackAcquire),
52 m_ReleaseImageCallback (SwapChainCallbacks.ImageCallbackRelease)
5153 // clang-format on
5254 {
53 CreateSurface();
54 CreateVulkanSwapChain();
55 InitBuffersAndViews();
56 auto res = AcquireNextImage(pDeviceContextVk);
57 DEV_CHECK_ERR(res == VK_SUCCESS, "Failed to acquire next image for the newly created swap chain");
58 (void)res;
59 }
60
61 void SwapChainVkImpl::CreateSurface()
62 {
63 if (m_VkSurface != VK_NULL_HANDLE)
64 {
65 vkDestroySurfaceKHR(m_VulkanInstance->GetVkInstance(), m_VkSurface, NULL);
66 m_VkSurface = VK_NULL_HANDLE;
67 }
68
69 // Create OS-specific surface
70 VkResult err = VK_ERROR_INITIALIZATION_FAILED;
71 #if defined(VK_USE_PLATFORM_WIN32_KHR)
72 if (m_Window.hWnd != NULL)
73 {
74 VkWin32SurfaceCreateInfoKHR surfaceCreateInfo = {};
75
76 surfaceCreateInfo.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
77 surfaceCreateInfo.hinstance = GetModuleHandle(NULL);
78 surfaceCreateInfo.hwnd = (HWND)m_Window.hWnd;
79
80 err = vkCreateWin32SurfaceKHR(m_VulkanInstance->GetVkInstance(), &surfaceCreateInfo, nullptr, &m_VkSurface);
81 }
82 #elif defined(VK_USE_PLATFORM_ANDROID_KHR)
83 if (m_Window.pAWindow != nullptr)
84 {
85 VkAndroidSurfaceCreateInfoKHR surfaceCreateInfo = {};
86
87 surfaceCreateInfo.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
88 surfaceCreateInfo.window = (ANativeWindow*)m_Window.pAWindow;
89
90 err = vkCreateAndroidSurfaceKHR(m_VulkanInstance->GetVkInstance(), &surfaceCreateInfo, NULL, &m_VkSurface);
91 }
92 #elif defined(VK_USE_PLATFORM_IOS_MVK)
93 if (m_Window.pCALayer != nullptr)
94 {
95 VkIOSSurfaceCreateInfoMVK surfaceCreateInfo = {};
96
97 surfaceCreateInfo.sType = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK;
98 surfaceCreateInfo.pView = m_Window.pCALayer;
99
100 err = vkCreateIOSSurfaceMVK(m_VulkanInstance->GetVkInstance(), &surfaceCreateInfo, nullptr, &m_VkSurface);
101 }
102 #elif defined(VK_USE_PLATFORM_MACOS_MVK)
103 if (m_Window.pNSView != nullptr)
104 {
105 VkMacOSSurfaceCreateInfoMVK surfaceCreateInfo = {};
106
107 surfaceCreateInfo.sType = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK;
108 surfaceCreateInfo.pView = m_Window.pNSView;
109
110 err = vkCreateMacOSSurfaceMVK(m_VulkanInstance->GetVkInstance(), &surfaceCreateInfo, NULL, &m_VkSurface);
111 }
112 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
113 if (m_Window.pDisplay != nullptr)
114 {
115 VkWaylandSurfaceCreateInfoKHR surfaceCreateInfo = {};
116
117 surfaceCreateInfo.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
118 surfaceCreateInfo.display = reinterpret_cast<struct wl_display*>(m_Window.pDisplay);
119 surfaceCreateInfo.Surface = reinterpret_cast<struct wl_surface*>(nullptr);
120
121 err = vkCreateWaylandSurfaceKHR(m_VulkanInstance->GetVkInstance(), &surfaceCreateInfo, nullptr, &m_VkSurface);
122 }
123 #elif defined(VK_USE_PLATFORM_XCB_KHR) || defined(VK_USE_PLATFORM_XLIB_KHR)
124
125 # if defined(VK_USE_PLATFORM_XCB_KHR)
126 if (m_Window.pXCBConnection != nullptr && m_Window.WindowId != 0)
127 {
128 VkXcbSurfaceCreateInfoKHR surfaceCreateInfo = {};
129
130 surfaceCreateInfo.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
131 surfaceCreateInfo.connection = reinterpret_cast<xcb_connection_t*>(m_Window.pXCBConnection);
132 surfaceCreateInfo.window = m_Window.WindowId;
133
134 err = vkCreateXcbSurfaceKHR(m_VulkanInstance->GetVkInstance(), &surfaceCreateInfo, nullptr, &m_VkSurface);
135 }
136 # endif
137
138 # if defined(VK_USE_PLATFORM_XLIB_KHR)
139 if ((m_Window.pDisplay != nullptr && m_Window.WindowId != 0) && m_VkSurface == VK_NULL_HANDLE)
140 {
141 VkXlibSurfaceCreateInfoKHR surfaceCreateInfo = {};
142
143 surfaceCreateInfo.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
144 surfaceCreateInfo.dpy = reinterpret_cast<Display*>(m_Window.pDisplay);
145 surfaceCreateInfo.window = m_Window.WindowId;
146
147 err = vkCreateXlibSurfaceKHR(m_VulkanInstance->GetVkInstance(), &surfaceCreateInfo, nullptr, &m_VkSurface);
148 }
149 # endif
150
151 #endif
152
153 CHECK_VK_ERROR_AND_THROW(err, "Failed to create OS-specific surface");
154
155 auto* pRenderDeviceVk = m_pRenderDevice.RawPtr<RenderDeviceVkImpl>();
156 const auto& PhysicalDevice = pRenderDeviceVk->GetPhysicalDevice();
157 auto& CmdQueueVK = pRenderDeviceVk->GetCommandQueue(0);
158 auto QueueFamilyIndex = CmdQueueVK.GetQueueFamilyIndex();
159 if (!PhysicalDevice.CheckPresentSupport(QueueFamilyIndex, m_VkSurface))
160 {
161 LOG_ERROR_AND_THROW("Selected physical device does not support present capability.\n"
162 "There could be few ways to mitigate this problem. One is to try to find another queue that supports present, but does not support graphics and compute capabilities."
163 "Another way is to find another physical device that exposes queue family that supports present and graphics capability. Neither apporach is currently implemented in Diligent Engine.");
164 }
165 }
166
167 void SwapChainVkImpl::CreateVulkanSwapChain()
168 {
169 auto* pRenderDeviceVk = m_pRenderDevice.RawPtr<RenderDeviceVkImpl>();
170 const auto& PhysicalDevice = pRenderDeviceVk->GetPhysicalDevice();
171 auto vkDeviceHandle = PhysicalDevice.GetVkDeviceHandle();
172 // Get the list of VkFormats that are supported:
173 uint32_t formatCount = 0;
174
175 auto err = vkGetPhysicalDeviceSurfaceFormatsKHR(vkDeviceHandle, m_VkSurface, &formatCount, NULL);
176 CHECK_VK_ERROR_AND_THROW(err, "Failed to query number of supported formats");
177 VERIFY_EXPR(formatCount > 0);
178 std::vector<VkSurfaceFormatKHR> SupportedFormats(formatCount);
179 err = vkGetPhysicalDeviceSurfaceFormatsKHR(vkDeviceHandle, m_VkSurface, &formatCount, SupportedFormats.data());
180 CHECK_VK_ERROR_AND_THROW(err, "Failed to query supported format properties");
181 VERIFY_EXPR(formatCount == SupportedFormats.size());
182 m_VkColorFormat = TexFormatToVkFormat(m_SwapChainDesc.ColorBufferFormat);
183
184 VkColorSpaceKHR ColorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
185 if (formatCount == 1 && SupportedFormats[0].format == VK_FORMAT_UNDEFINED)
186 {
187 // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
188 // the surface has no preferred format. Otherwise, at least one
189 // supported format will be returned.
190
191 // Do nothing
192 }
193 else
194 {
195 bool FmtFound = false;
196 for (const auto& SrfFmt : SupportedFormats)
197 {
198 if (SrfFmt.format == m_VkColorFormat)
199 {
200 FmtFound = true;
201 ColorSpace = SrfFmt.colorSpace;
202 break;
203 }
204 }
205 if (!FmtFound)
206 {
207 VkFormat VkReplacementColorFormat = VK_FORMAT_UNDEFINED;
208 switch (m_VkColorFormat)
209 {
210 // clang-format off
211 case VK_FORMAT_R8G8B8A8_UNORM: VkReplacementColorFormat = VK_FORMAT_B8G8R8A8_UNORM; break;
212 case VK_FORMAT_B8G8R8A8_UNORM: VkReplacementColorFormat = VK_FORMAT_R8G8B8A8_UNORM; break;
213 case VK_FORMAT_B8G8R8A8_SRGB: VkReplacementColorFormat = VK_FORMAT_R8G8B8A8_SRGB; break;
214 case VK_FORMAT_R8G8B8A8_SRGB: VkReplacementColorFormat = VK_FORMAT_B8G8R8A8_SRGB; break;
215 default: VkReplacementColorFormat = VK_FORMAT_UNDEFINED;
216 // clang-format on
217 }
218
219 bool ReplacementFmtFound = false;
220 for (const auto& SrfFmt : SupportedFormats)
221 {
222 if (SrfFmt.format == VkReplacementColorFormat)
223 {
224 ReplacementFmtFound = true;
225 ColorSpace = SrfFmt.colorSpace;
226 break;
227 }
228 }
229
230 if (ReplacementFmtFound)
231 {
232 m_VkColorFormat = VkReplacementColorFormat;
233 auto NewColorBufferFormat = VkFormatToTexFormat(VkReplacementColorFormat);
234 LOG_INFO_MESSAGE("Requested color buffer format ", GetTextureFormatAttribs(m_SwapChainDesc.ColorBufferFormat).Name, " is not supported by the surface and will be replaced with ", GetTextureFormatAttribs(NewColorBufferFormat).Name);
235 m_SwapChainDesc.ColorBufferFormat = NewColorBufferFormat;
236 }
237 else
238 {
239 LOG_WARNING_MESSAGE("Requested color buffer format ", GetTextureFormatAttribs(m_SwapChainDesc.ColorBufferFormat).Name, "is not supported by the surface");
240 }
241 }
242 }
243
244 VkSurfaceCapabilitiesKHR surfCapabilities = {};
245
246 err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vkDeviceHandle, m_VkSurface, &surfCapabilities);
247 CHECK_VK_ERROR_AND_THROW(err, "Failed to query physical device surface capabilities");
248
249 uint32_t presentModeCount = 0;
250
251 err = vkGetPhysicalDeviceSurfacePresentModesKHR(vkDeviceHandle, m_VkSurface, &presentModeCount, NULL);
252 CHECK_VK_ERROR_AND_THROW(err, "Failed to query surface present mode count");
253 VERIFY_EXPR(presentModeCount > 0);
254 std::vector<VkPresentModeKHR> presentModes(presentModeCount);
255 err = vkGetPhysicalDeviceSurfacePresentModesKHR(vkDeviceHandle, m_VkSurface, &presentModeCount, presentModes.data());
256 CHECK_VK_ERROR_AND_THROW(err, "Failed to query surface present modes");
257 VERIFY_EXPR(presentModeCount == presentModes.size());
258
259
260 VkSurfaceTransformFlagBitsKHR vkPreTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
261 if (m_DesiredPreTransform != SURFACE_TRANSFORM_OPTIMAL)
262 {
263 vkPreTransform = SurfaceTransformToVkSurfaceTransformFlag(m_DesiredPreTransform);
264 if ((surfCapabilities.supportedTransforms & vkPreTransform) != 0)
265 {
266 m_SwapChainDesc.PreTransform = m_DesiredPreTransform;
267 }
268 else
269 {
270 LOG_WARNING_MESSAGE(GetSurfaceTransformString(m_DesiredPreTransform),
271 " is not supported by the presentation engine. Optimal surface transform will be used instead."
272 " Query the swap chain description to get the actual surface transform.");
273 m_DesiredPreTransform = SURFACE_TRANSFORM_OPTIMAL;
274 }
275 }
276
277 if (m_DesiredPreTransform == SURFACE_TRANSFORM_OPTIMAL)
278 {
279 // Use current surface transform to avoid extra cost of presenting the image.
280 // If preTransform does not match the currentTransform value returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR,
281 // the presentation engine will transform the image content as part of the presentation operation.
282 // https://android-developers.googleblog.com/2020/02/handling-device-orientation-efficiently.html
283 // https://community.arm.com/developer/tools-software/graphics/b/blog/posts/appropriate-use-of-surface-rotation
284 vkPreTransform = surfCapabilities.currentTransform;
285 m_SwapChainDesc.PreTransform = VkSurfaceTransformFlagToSurfaceTransform(vkPreTransform);
286 LOG_INFO_MESSAGE("Using ", GetSurfaceTransformString(m_SwapChainDesc.PreTransform), " swap chain pretransform");
287 }
288
289 VkExtent2D swapchainExtent = {};
290 // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
291 if (surfCapabilities.currentExtent.width == 0xFFFFFFFF && m_SwapChainDesc.Width != 0 && m_SwapChainDesc.Height != 0)
292 {
293 // If the surface size is undefined, the size is set to
294 // the size of the images requested.
295 swapchainExtent.width = std::min(std::max(m_SwapChainDesc.Width, surfCapabilities.minImageExtent.width), surfCapabilities.maxImageExtent.width);
296 swapchainExtent.height = std::min(std::max(m_SwapChainDesc.Height, surfCapabilities.minImageExtent.height), surfCapabilities.maxImageExtent.height);
297 }
298 else
299 {
300 // If the surface size is defined, the swap chain size must match
301 swapchainExtent = surfCapabilities.currentExtent;
302 }
303
304 #if PLATFORM_ANDROID
305 // On Android, vkGetPhysicalDeviceSurfaceCapabilitiesKHR is not reliable and starts reporting incorrect
306 // dimensions after few rotations. To alleviate the problem, we store the surface extent corresponding to
307 // identity rotation.
308 // https://android-developers.googleblog.com/2020/02/handling-device-orientation-efficiently.html
309 if (m_SurfaceIdentityExtent.width == 0 || m_SurfaceIdentityExtent.height == 0)
310 {
311 m_SurfaceIdentityExtent = surfCapabilities.currentExtent;
312 constexpr auto Rotate90TransformFlags =
313 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
314 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
315 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
316 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR;
317 if ((surfCapabilities.currentTransform & Rotate90TransformFlags) != 0)
318 std::swap(m_SurfaceIdentityExtent.width, m_SurfaceIdentityExtent.height);
319 }
320
321 if (m_DesiredPreTransform == SURFACE_TRANSFORM_OPTIMAL)
322 {
323 swapchainExtent = m_SurfaceIdentityExtent;
324 }
325 m_CurrentSurfaceTransform = surfCapabilities.currentTransform;
326 #endif
327
328 swapchainExtent.width = std::max(swapchainExtent.width, 1u);
329 swapchainExtent.height = std::max(swapchainExtent.height, 1u);
330 m_SwapChainDesc.Width = swapchainExtent.width;
331 m_SwapChainDesc.Height = swapchainExtent.height;
332
333 // The FIFO present mode is guaranteed by the spec to always be supported.
334 VkPresentModeKHR PresentMode = VK_PRESENT_MODE_FIFO_KHR;
335 {
336 std::vector<VkPresentModeKHR> PreferredPresentModes;
337 if (m_VSyncEnabled)
338 {
339 // FIFO relaxed waits for the next VSync, but if the frame is late,
340 // it still shows it even if VSync has already passed, which may
341 // result in tearing.
342 PreferredPresentModes.push_back(VK_PRESENT_MODE_FIFO_RELAXED_KHR);
343 PreferredPresentModes.push_back(VK_PRESENT_MODE_FIFO_KHR);
344 }
345 else
346 {
347 // Mailbox is the lowest latency non-tearing presentation mode.
348 PreferredPresentModes.push_back(VK_PRESENT_MODE_MAILBOX_KHR);
349 PreferredPresentModes.push_back(VK_PRESENT_MODE_IMMEDIATE_KHR);
350 PreferredPresentModes.push_back(VK_PRESENT_MODE_FIFO_KHR);
351 }
352
353 for (auto PreferredMode : PreferredPresentModes)
354 {
355 if (std::find(presentModes.begin(), presentModes.end(), PreferredMode) != presentModes.end())
356 {
357 PresentMode = PreferredMode;
358 break;
359 }
360 }
361
362 const char* PresentModeName = nullptr;
363 #define PRESENT_MODE_CASE(Mode) \
364 case Mode: PresentModeName = #Mode; break;
365 switch (PresentMode)
366 {
367 PRESENT_MODE_CASE(VK_PRESENT_MODE_IMMEDIATE_KHR)
368 PRESENT_MODE_CASE(VK_PRESENT_MODE_MAILBOX_KHR)
369 PRESENT_MODE_CASE(VK_PRESENT_MODE_FIFO_KHR)
370 PRESENT_MODE_CASE(VK_PRESENT_MODE_FIFO_RELAXED_KHR)
371 PRESENT_MODE_CASE(VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR)
372 PRESENT_MODE_CASE(VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR)
373 default: PresentModeName = "<UNKNOWN>";
374 }
375 #undef PRESENT_MODE_CASE
376 LOG_INFO_MESSAGE("Using ", PresentModeName, " swap chain present mode");
377 }
378
379 // Determine the number of VkImage's to use in the swap chain.
380 // We need to acquire only 1 presentable image at at time.
381 // Asking for minImageCount images ensures that we can acquire
382 // 1 presentable image as long as we present it before attempting
383 // to acquire another.
384 if (m_DesiredBufferCount < surfCapabilities.minImageCount)
385 {
386 LOG_INFO_MESSAGE("Desired back buffer count (", m_DesiredBufferCount, ") is smaller than the minimal image count supported for this surface (", surfCapabilities.minImageCount, "). Resetting to ", surfCapabilities.minImageCount);
387 m_DesiredBufferCount = surfCapabilities.minImageCount;
388 }
389 if (surfCapabilities.maxImageCount != 0 && m_DesiredBufferCount > surfCapabilities.maxImageCount)
390 {
391 LOG_INFO_MESSAGE("Desired back buffer count (", m_DesiredBufferCount, ") is greater than the maximal image count supported for this surface (", surfCapabilities.maxImageCount, "). Resetting to ", surfCapabilities.maxImageCount);
392 m_DesiredBufferCount = surfCapabilities.maxImageCount;
393 }
394 // We must use m_DesiredBufferCount instead of m_SwapChainDesc.BufferCount, because Vulkan on Android
395 // may decide to always add extra buffers, causing infinite growth of the swap chain when it is recreated:
396 // m_SwapChainDesc.BufferCount
397 // CreateVulkanSwapChain() 2 -> 4
398 // CreateVulkanSwapChain() 4 -> 6
399 // CreateVulkanSwapChain() 6 -> 8
400 uint32_t desiredNumberOfSwapChainImages = m_DesiredBufferCount;
401
402 // Find a supported composite alpha mode - one of these is guaranteed to be set
403 VkCompositeAlphaFlagBitsKHR compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
404 VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = //
405 {
406 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
407 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
408 VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
409 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
410 };
411 for (uint32_t i = 0; i < _countof(compositeAlphaFlags); i++)
412 {
413 if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i])
414 {
415 compositeAlpha = compositeAlphaFlags[i];
416 break;
417 }
418 }
419
420 auto oldSwapchain = m_VkSwapChain;
421 m_VkSwapChain = VK_NULL_HANDLE;
422
423 VkSwapchainCreateInfoKHR swapchain_ci = {};
424
425 swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
426 swapchain_ci.pNext = NULL;
427 swapchain_ci.surface = m_VkSurface;
428 swapchain_ci.minImageCount = desiredNumberOfSwapChainImages;
429 swapchain_ci.imageFormat = m_VkColorFormat;
430 swapchain_ci.imageExtent.width = swapchainExtent.width;
431 swapchain_ci.imageExtent.height = swapchainExtent.height;
432 swapchain_ci.preTransform = vkPreTransform;
433 swapchain_ci.compositeAlpha = compositeAlpha;
434 swapchain_ci.imageArrayLayers = 1;
435 swapchain_ci.presentMode = PresentMode;
436 swapchain_ci.oldSwapchain = oldSwapchain;
437 swapchain_ci.clipped = VK_TRUE;
438 swapchain_ci.imageColorSpace = ColorSpace;
439
440 DEV_CHECK_ERR(m_SwapChainDesc.Usage != 0, "No swap chain usage flags defined");
441 static_assert(SWAP_CHAIN_USAGE_LAST == SWAP_CHAIN_USAGE_COPY_SOURCE, "Please update this function to handle the new swapchain usage");
442 if (m_SwapChainDesc.Usage & SWAP_CHAIN_USAGE_RENDER_TARGET)
443 swapchain_ci.imageUsage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
444 if (m_SwapChainDesc.Usage & SWAP_CHAIN_USAGE_SHADER_INPUT)
445 swapchain_ci.imageUsage |= VK_IMAGE_USAGE_SAMPLED_BIT;
446 if (m_SwapChainDesc.Usage & SWAP_CHAIN_USAGE_COPY_SOURCE)
447 swapchain_ci.imageUsage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
448
449 // vkCmdClearColorImage() command requires the image to use VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL layout
450 // that requires VK_IMAGE_USAGE_TRANSFER_DST_BIT to be set
451 swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
452 swapchain_ci.queueFamilyIndexCount = 0;
453 swapchain_ci.pQueueFamilyIndices = NULL;
454 //uint32_t queueFamilyIndices[] = { (uint32_t)info.graphics_queue_family_index, (uint32_t)info.present_queue_family_index };
455 //if (info.graphics_queue_family_index != info.present_queue_family_index) {
456 // // If the graphics and present queues are from different queue families,
457 // // we either have to explicitly transfer ownership of images between
458 // // the queues, or we have to create the swapchain with imageSharingMode
459 // // as VK_SHARING_MODE_CONCURRENT
460 // swapchain_ci.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
461 // swapchain_ci.queueFamilyIndexCount = 2;
462 // swapchain_ci.pQueueFamilyIndices = queueFamilyIndices;
463 //}
464
465 const auto& LogicalDevice = pRenderDeviceVk->GetLogicalDevice();
466 auto vkDevice = pRenderDeviceVk->GetVkDevice();
467
468 err = vkCreateSwapchainKHR(vkDevice, &swapchain_ci, NULL, &m_VkSwapChain);
469 CHECK_VK_ERROR_AND_THROW(err, "Failed to create Vulkan swapchain");
470
471 if (oldSwapchain != VK_NULL_HANDLE)
472 {
473 vkDestroySwapchainKHR(vkDevice, oldSwapchain, NULL);
474 oldSwapchain = VK_NULL_HANDLE;
475 }
476
477 uint32_t swapchainImageCount = 0;
478
479 err = vkGetSwapchainImagesKHR(vkDevice, m_VkSwapChain, &swapchainImageCount, NULL);
480 CHECK_VK_ERROR_AND_THROW(err, "Failed to request swap chain image count");
481 VERIFY_EXPR(swapchainImageCount > 0);
482 if (swapchainImageCount != m_SwapChainDesc.BufferCount)
483 {
484 LOG_INFO_MESSAGE("Created swap chain with ", swapchainImageCount,
485 " images vs ", m_SwapChainDesc.BufferCount, " requested.");
486 m_SwapChainDesc.BufferCount = swapchainImageCount;
487 }
488
489 m_ImageAcquiredSemaphores.resize(swapchainImageCount);
490 m_DrawCompleteSemaphores.resize(swapchainImageCount);
491 m_ImageAcquiredFences.resize(swapchainImageCount);
492 for (uint32_t i = 0; i < swapchainImageCount; ++i)
493 {
494 VkSemaphoreCreateInfo SemaphoreCI = {};
495
496 SemaphoreCI.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
497 SemaphoreCI.pNext = nullptr;
498 SemaphoreCI.flags = 0; // reserved for future use
499
500 {
501 std::stringstream ss;
502 ss << "Swap chain image acquired semaphore " << i;
503 auto Name = ss.str();
504 auto Semaphore = LogicalDevice.CreateSemaphore(SemaphoreCI, Name.c_str());
505 ManagedSemaphore::Create(pRenderDeviceVk, std::move(Semaphore), Name.c_str(), &m_ImageAcquiredSemaphores[i]);
506 }
507
508 {
509 std::stringstream ss;
510 ss << "Swap chain draw complete semaphore " << i;
511 auto Name = ss.str();
512 auto Semaphore = LogicalDevice.CreateSemaphore(SemaphoreCI, Name.c_str());
513 ManagedSemaphore::Create(pRenderDeviceVk, std::move(Semaphore), Name.c_str(), &m_DrawCompleteSemaphores[i]);
514 }
515
516 VkFenceCreateInfo FenceCI = {};
517
518 FenceCI.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
519 FenceCI.pNext = nullptr;
520 FenceCI.flags = 0;
521 m_ImageAcquiredFences[i] = LogicalDevice.CreateFence(FenceCI);
522 }
55 InitBuffersAndViews(SwapChainImageCount, pSwapChainImages);
52356 }
52457
52558 SwapChainVkImpl::~SwapChainVkImpl()
52659 {
527 if (m_VkSwapChain != VK_NULL_HANDLE)
528 {
529 auto pDeviceContext = m_wpDeviceContext.Lock();
530 auto* pImmediateCtxVk = pDeviceContext.RawPtr<DeviceContextVkImpl>();
531 ReleaseSwapChainResources(pImmediateCtxVk, /*DestroyVkSwapChain=*/true);
532 VERIFY_EXPR(m_VkSwapChain == VK_NULL_HANDLE);
533 }
534
535 if (m_VkSurface != VK_NULL_HANDLE)
536 {
537 vkDestroySurfaceKHR(m_VulkanInstance->GetVkInstance(), m_VkSurface, NULL);
538 }
539 }
540
541 void SwapChainVkImpl::InitBuffersAndViews()
60 auto pDeviceContext = m_wpDeviceContext.Lock();
61 auto* pImmediateCtxVk = pDeviceContext.RawPtr<DeviceContextVkImpl>();
62 ReleaseSwapChainResources(pImmediateCtxVk);
63 }
64
65 void SwapChainVkImpl::InitBuffersAndViews(uint32_t SwapChainImageCount, const VkImage *pSwapChainImages)
54266 {
54367 auto* pDeviceVkImpl = m_pRenderDevice.RawPtr<RenderDeviceVkImpl>();
54468 auto LogicalVkDevice = pDeviceVkImpl->GetVkDevice();
54569
546 #ifdef DILIGENT_DEBUG
547 {
548 uint32_t swapchainImageCount = 0;
549 auto err = vkGetSwapchainImagesKHR(LogicalVkDevice, m_VkSwapChain, &swapchainImageCount, NULL);
550 VERIFY_EXPR(err == VK_SUCCESS);
551 VERIFY(swapchainImageCount == m_SwapChainDesc.BufferCount, "Unexpected swap chain buffer count");
552 }
553 #endif
554
55570 m_pBackBufferRTV.resize(m_SwapChainDesc.BufferCount);
55671 m_SwapChainImagesInitialized.resize(m_pBackBufferRTV.size(), false);
557 m_ImageAcquiredFenceSubmitted.resize(m_pBackBufferRTV.size(), false);
558
559 uint32_t swapchainImageCount = m_SwapChainDesc.BufferCount;
560 std::vector<VkImage> swapchainImages(swapchainImageCount);
561 auto err = vkGetSwapchainImagesKHR(LogicalVkDevice, m_VkSwapChain, &swapchainImageCount, swapchainImages.data());
562 CHECK_VK_ERROR_AND_THROW(err, "Failed to get swap chain images");
563 VERIFY_EXPR(swapchainImageCount == swapchainImages.size());
564
565 for (uint32_t i = 0; i < swapchainImageCount; i++)
72
73 for (uint32_t i = 0; i < SwapChainImageCount; i++)
56674 {
56775 TextureDesc BackBufferDesc;
56876 std::stringstream name_ss;
57785 BackBufferDesc.MipLevels = 1;
57886
57987 RefCntAutoPtr<TextureVkImpl> pBackBufferTex;
580 m_pRenderDevice.RawPtr<RenderDeviceVkImpl>()->CreateTexture(BackBufferDesc, swapchainImages[i], RESOURCE_STATE_UNDEFINED, &pBackBufferTex);
88 m_pRenderDevice.RawPtr<RenderDeviceVkImpl>()->CreateTexture(BackBufferDesc, pSwapChainImages[i], RESOURCE_STATE_UNDEFINED, &pBackBufferTex);
58189
58290 TextureViewDesc RTVDesc;
58391 RTVDesc.ViewType = TEXTURE_VIEW_RENDER_TARGET;
608116 }
609117 }
610118
611 VkResult SwapChainVkImpl::AcquireNextImage(DeviceContextVkImpl* pDeviceCtxVk)
612 {
613 auto* pDeviceVk = m_pRenderDevice.RawPtr<RenderDeviceVkImpl>();
614 const auto& LogicalDevice = pDeviceVk->GetLogicalDevice();
615
616 // Applications should not rely on vkAcquireNextImageKHR blocking in order to
617 // meter their rendering speed. The implementation may return from this function
618 // immediately regardless of how many presentation requests are queued, and regardless
619 // of when queued presentation requests will complete relative to the call. Instead,
620 // applications can use fence to meter their frame generation work to match the
621 // presentation rate.
119 VkResult SwapChainVkImpl::AcquireNextImage()
120 {
121 auto pDeviceContext = m_wpDeviceContext.Lock();
122 if (!pDeviceContext)
123 {
124 LOG_ERROR_MESSAGE("Immediate context has been released");
125 return VK_ERROR_UNKNOWN;
126 }
127
128 auto* pDeviceCtxVk = pDeviceContext.RawPtr<DeviceContextVkImpl>();
129 auto* pDeviceVk = m_pRenderDevice.RawPtr<RenderDeviceVkImpl>();
130
622131
623132 // Explicitly make sure that there are no more pending frames in the command queue
624133 // than the number of the swap chain images.
633142 // When acquiring swap chain image for frame N, we need to make sure that
634143 // frame N-Nsc has completed. To achieve that, we wait for the image acquire
635144 // fence for frame N-Nsc-1. Thus we will have no more than Nsc frames in the queue.
636 auto OldestSubmittedImageFenceInd = (m_SemaphoreIndex + 1u) % static_cast<Uint32>(m_ImageAcquiredFenceSubmitted.size());
637 if (m_ImageAcquiredFenceSubmitted[OldestSubmittedImageFenceInd])
638 {
639 VkFence OldestSubmittedFence = m_ImageAcquiredFences[OldestSubmittedImageFenceInd];
640 if (LogicalDevice.GetFenceStatus(OldestSubmittedFence) == VK_NOT_READY)
641 {
642 auto res = LogicalDevice.WaitForFences(1, &OldestSubmittedFence, VK_TRUE, UINT64_MAX);
643 VERIFY_EXPR(res == VK_SUCCESS);
644 (void)res;
645 }
646 LogicalDevice.ResetFence(OldestSubmittedFence);
647 m_ImageAcquiredFenceSubmitted[OldestSubmittedImageFenceInd] = false;
648 }
649
650 VkFence ImageAcquiredFence = m_ImageAcquiredFences[m_SemaphoreIndex];
651 VkSemaphore ImageAcquiredSemaphore = m_ImageAcquiredSemaphores[m_SemaphoreIndex]->Get();
652
653 auto res = vkAcquireNextImageKHR(LogicalDevice.GetVkDevice(), m_VkSwapChain, UINT64_MAX, ImageAcquiredSemaphore, ImageAcquiredFence, &m_BackBufferIndex);
654
655 m_ImageAcquiredFenceSubmitted[m_SemaphoreIndex] = (res == VK_SUCCESS);
145
146 pDeviceVk->LockCommandQueue(0);
147 VkResult res = m_AcquireImageCallback(m_Cookie, &m_BackBufferIndex);
148 pDeviceVk->UnlockCommandQueue(0);
149
656150 if (res == VK_SUCCESS)
657151 {
658152 // Next command in the device context must wait for the next image to be acquired.
659153 // Unlike fences or events, the act of waiting for a semaphore also unsignals that semaphore (6.4.2).
660154 // Swapchain image may be used as render target or as destination for copy command.
661 pDeviceCtxVk->AddWaitSemaphore(m_ImageAcquiredSemaphores[m_SemaphoreIndex], VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT);
662155 if (!m_SwapChainImagesInitialized[m_BackBufferIndex])
663156 {
664157 // Vulkan validation layers do not like uninitialized memory.
681174 if (SyncInterval != 0 && SyncInterval != 1)
682175 LOG_WARNING_MESSAGE_ONCE("Vulkan only supports 0 and 1 present intervals");
683176
177 PresentImage();
178 }
179
180 void SwapChainVkImpl::PresentImage()
181 {
684182 auto pDeviceContext = m_wpDeviceContext.Lock();
685183 if (!pDeviceContext)
686184 {
697195 if (!m_IsMinimized)
698196 {
699197 // TransitionImageLayout() never triggers flush
700 pImmediateCtxVk->TransitionImageLayout(pBackBuffer, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
198 pImmediateCtxVk->TransitionImageLayout(pBackBuffer, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
701199 // The context can be empty if no render commands were issued by the app
702200 //VERIFY(pImmediateCtxVk->GetNumCommandsInCtx() != 0, "The context must not be flushed");
703 pImmediateCtxVk->AddSignalSemaphore(m_DrawCompleteSemaphores[m_SemaphoreIndex]);
704201 }
705202
706203 pImmediateCtxVk->Flush();
707204
708205 if (!m_IsMinimized)
709206 {
710 VkPresentInfoKHR PresentInfo = {};
711
712 PresentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
713 PresentInfo.pNext = nullptr;
714 PresentInfo.waitSemaphoreCount = 1;
715 // Unlike fences or events, the act of waiting for a semaphore also unsignals that semaphore (6.4.2)
716 VkSemaphore WaitSemaphore[] = {m_DrawCompleteSemaphores[m_SemaphoreIndex]->Get()};
717 PresentInfo.pWaitSemaphores = WaitSemaphore;
718 PresentInfo.swapchainCount = 1;
719 PresentInfo.pSwapchains = &m_VkSwapChain;
720 PresentInfo.pImageIndices = &m_BackBufferIndex;
721 VkResult Result = VK_SUCCESS;
722 PresentInfo.pResults = &Result;
723 pDeviceVk->LockCmdQueueAndRun(
724 0,
725 [&PresentInfo](ICommandQueueVk* pCmdQueueVk) //
726 {
727 pCmdQueueVk->Present(PresentInfo);
728 } //
729 );
730
731 if (Result == VK_SUBOPTIMAL_KHR || Result == VK_ERROR_OUT_OF_DATE_KHR)
732 {
733 RecreateVulkanSwapchain(pImmediateCtxVk);
734 m_SemaphoreIndex = m_SwapChainDesc.BufferCount - 1; // To start with 0 index when acquire next image
735 }
736 else
737 {
738 DEV_CHECK_ERR(Result == VK_SUCCESS, "Present failed");
739 }
207 pDeviceVk->LockCommandQueue(0);
208 m_ReleaseImageCallback(m_Cookie);
209 pDeviceVk->UnlockCommandQueue(0);
740210 }
741211
742212 if (m_SwapChainDesc.IsPrimary)
744214 pImmediateCtxVk->FinishFrame();
745215 pDeviceVk->ReleaseStaleResources();
746216 }
747
748 if (!m_IsMinimized)
749 {
750 ++m_SemaphoreIndex;
751 if (m_SemaphoreIndex >= m_SwapChainDesc.BufferCount)
752 m_SemaphoreIndex = 0;
753
754 bool EnableVSync = SyncInterval != 0;
755
756 auto res = (m_VSyncEnabled == EnableVSync) ? AcquireNextImage(pImmediateCtxVk) : VK_ERROR_OUT_OF_DATE_KHR;
757 if (res == VK_SUBOPTIMAL_KHR || res == VK_ERROR_OUT_OF_DATE_KHR)
758 {
759 m_VSyncEnabled = EnableVSync;
760 RecreateVulkanSwapchain(pImmediateCtxVk);
761 m_SemaphoreIndex = m_SwapChainDesc.BufferCount - 1; // To start with 0 index when acquire next image
762
763 res = AcquireNextImage(pImmediateCtxVk);
764 }
765 DEV_CHECK_ERR(res == VK_SUCCESS, "Failed to acquire next swap chain image");
766 }
767 }
768
769 void SwapChainVkImpl::WaitForImageAcquiredFences()
770 {
771 const auto& LogicalDevice = m_pRenderDevice.RawPtr<RenderDeviceVkImpl>()->GetLogicalDevice();
772 for (size_t i = 0; i < m_ImageAcquiredFences.size(); ++i)
773 {
774 if (m_ImageAcquiredFenceSubmitted[i])
775 {
776 VkFence vkFence = m_ImageAcquiredFences[i];
777 if (LogicalDevice.GetFenceStatus(vkFence) == VK_NOT_READY)
778 LogicalDevice.WaitForFences(1, &vkFence, VK_TRUE, UINT64_MAX);
779 }
780 }
781 }
782
783 void SwapChainVkImpl::ReleaseSwapChainResources(DeviceContextVkImpl* pImmediateCtxVk, bool DestroyVkSwapChain)
784 {
785 if (m_VkSwapChain == VK_NULL_HANDLE)
786 return;
787
217 }
218
219 void SwapChainVkImpl::ReleaseSwapChainResources(DeviceContextVkImpl* pImmediateCtxVk)
220 {
788221 if (pImmediateCtxVk != nullptr)
789222 {
790223 // Flush to submit all pending commands and semaphores to the queue.
809242 // m_pBackBufferRTV[].
810243 pDeviceVk->IdleGPU();
811244
812 // We need to explicitly wait for all submitted Image Acquired Fences to signal.
813 // Just idling the GPU is not enough and results in validation warnings.
814 // As a matter of fact, it is only required to check the fence status.
815 WaitForImageAcquiredFences();
816
817245 // All references to the swap chain must be released before it can be destroyed
818246 m_pBackBufferRTV.clear();
819247 m_SwapChainImagesInitialized.clear();
820 m_ImageAcquiredFenceSubmitted.clear();
821248 m_pDepthBufferDSV.Release();
822
823 // We must wait unitl GPU is idled before destroying the fences as they
824 // are destroyed immediately. The semaphores are managed and will be kept alive
825 // by the device context they are submitted to.
826 m_ImageAcquiredSemaphores.clear();
827 m_DrawCompleteSemaphores.clear();
828 m_ImageAcquiredFences.clear();
829 m_SemaphoreIndex = 0;
830
831 if (DestroyVkSwapChain)
832 {
833 vkDestroySwapchainKHR(pDeviceVk->GetVkDevice(), m_VkSwapChain, NULL);
834 m_VkSwapChain = VK_NULL_HANDLE;
835 }
836 }
837
838 void SwapChainVkImpl::RecreateVulkanSwapchain(DeviceContextVkImpl* pImmediateCtxVk)
839 {
840 // Do not destroy Vulkan swap chain as we will use it as oldSwapchain parameter.
841 ReleaseSwapChainResources(pImmediateCtxVk, /*DestroyVkSwapChain*/ false);
842
843 // Check if the surface is lost
844 {
845 RenderDeviceVkImpl* pDeviceVk = m_pRenderDevice.RawPtr<RenderDeviceVkImpl>();
846 const auto vkDeviceHandle = pDeviceVk->GetPhysicalDevice().GetVkDeviceHandle();
847
848 VkSurfaceCapabilitiesKHR surfCapabilities;
849 // Call vkGetPhysicalDeviceSurfaceCapabilitiesKHR only to check the return code
850 auto err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vkDeviceHandle, m_VkSurface, &surfCapabilities);
851 if (err == VK_ERROR_SURFACE_LOST_KHR)
852 {
853 // Destroy the swap chain associated with the surface
854 if (m_VkSwapChain != VK_NULL_HANDLE)
855 {
856 vkDestroySwapchainKHR(pDeviceVk->GetVkDevice(), m_VkSwapChain, NULL);
857 m_VkSwapChain = VK_NULL_HANDLE;
858 }
859
860 // Recreate the surface
861 CreateSurface();
862 }
863 }
864
865 CreateVulkanSwapChain();
866 InitBuffersAndViews();
867249 }
868250
869251 void SwapChainVkImpl::Resize(Uint32 NewWidth, Uint32 NewHeight, SURFACE_TRANSFORM NewPreTransform)
870252 {
871 bool RecreateSwapChain = false;
872
873 #if PLATFORM_ANDROID
874 if (m_VkSurface != VK_NULL_HANDLE)
875 {
876 // Check orientation
877 const auto* pRenderDeviceVk = m_pRenderDevice.RawPtr<const RenderDeviceVkImpl>();
878 const auto& PhysicalDevice = pRenderDeviceVk->GetPhysicalDevice();
879 const auto vkDeviceHandle = PhysicalDevice.GetVkDeviceHandle();
880
881 VkSurfaceCapabilitiesKHR surfCapabilities = {};
882
883 auto err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vkDeviceHandle, m_VkSurface, &surfCapabilities);
884 if (err == VK_SUCCESS)
885 {
886 if (m_CurrentSurfaceTransform != surfCapabilities.currentTransform)
887 {
888 // Surface orientation has changed - we need to recreate the swap chain
889 RecreateSwapChain = true;
890 }
891
892 constexpr auto Rotate90TransformFlags =
893 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
894 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
895 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
896 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR;
897
898 if (NewWidth == 0 || NewHeight == 0)
899 {
900 NewWidth = m_SurfaceIdentityExtent.width;
901 NewHeight = m_SurfaceIdentityExtent.height;
902
903 if ((surfCapabilities.currentTransform & Rotate90TransformFlags) != 0)
904 {
905 // Swap to get logical dimensions as input NewWidth and NewHeight are
906 // expected to be logical sizes.
907 std::swap(NewWidth, NewHeight);
908 }
909 }
910
911 if (NewPreTransform == SURFACE_TRANSFORM_OPTIMAL)
912 {
913 if ((surfCapabilities.currentTransform & Rotate90TransformFlags) != 0)
914 {
915 // Swap to get physical dimensions
916 std::swap(NewWidth, NewHeight);
917 }
918 }
919 else
920 {
921 // Swap if necessary to get desired sizes after pre-transform
922 if (NewPreTransform == SURFACE_TRANSFORM_ROTATE_90 ||
923 NewPreTransform == SURFACE_TRANSFORM_ROTATE_270 ||
924 NewPreTransform == SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90 ||
925 NewPreTransform == SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270)
926 {
927 std::swap(NewWidth, NewHeight);
928 }
929 }
930 }
931 else
932 {
933 LOG_ERROR_MESSAGE(err, "Failed to query physical device surface capabilities");
934 }
935 }
936 #endif
937
938 if (TSwapChainBase::Resize(NewWidth, NewHeight, NewPreTransform))
939 RecreateSwapChain = true;
940
941 if (RecreateSwapChain)
942 {
943 auto pDeviceContext = m_wpDeviceContext.Lock();
944 VERIFY(pDeviceContext, "Immediate context has been released");
945 if (pDeviceContext)
946 {
947 try
948 {
949 auto* pImmediateCtxVk = pDeviceContext.RawPtr<DeviceContextVkImpl>();
950 // RecreateVulkanSwapchain() unbinds default FB
951 RecreateVulkanSwapchain(pImmediateCtxVk);
952
953 auto res = AcquireNextImage(pImmediateCtxVk);
954 DEV_CHECK_ERR(res == VK_SUCCESS, "Failed to acquire next image for the just resized swap chain");
955 (void)res;
956 }
957 catch (const std::runtime_error&)
958 {
959 LOG_ERROR("Failed to resize the swap chain");
960 }
961 }
962 }
963
964253 m_IsMinimized = (NewWidth == 0 && NewHeight == 0);
965254 }
966255