1#if defined(CONF_BACKEND_VULKAN)
2
3#include <base/dbg.h>
4#include <base/log.h>
5#include <base/math.h>
6#include <base/mem.h>
7#include <base/str.h>
8#include <base/time.h>
9
10#include <engine/client/backend/backend_base.h>
11#include <engine/client/backend/vulkan/backend_vulkan.h>
12#include <engine/client/backend_sdl.h>
13#include <engine/client/graphics_threaded.h>
14#include <engine/gfx/image_manipulation.h>
15#include <engine/graphics.h>
16#include <engine/shared/config.h>
17#include <engine/shared/localization.h>
18#include <engine/storage.h>
19
20#include <SDL_video.h>
21#include <SDL_vulkan.h>
22#include <vulkan/vk_platform.h>
23#include <vulkan/vulkan_core.h>
24
25#include <algorithm>
26#include <array>
27#include <condition_variable>
28#include <cstddef>
29#include <cstdlib>
30#include <functional>
31#include <limits>
32#include <map>
33#include <memory>
34#include <mutex>
35#include <optional>
36#include <set>
37#include <string>
38#include <thread>
39#include <unordered_map>
40#include <utility>
41#include <vector>
42
43#ifndef VK_API_VERSION_MAJOR
44#define VK_API_VERSION_MAJOR VK_VERSION_MAJOR
45#define VK_API_VERSION_MINOR VK_VERSION_MINOR
46#define VK_API_VERSION_PATCH VK_VERSION_PATCH
47#endif
48
49using namespace std::chrono_literals;
50
51class CCommandProcessorFragment_Vulkan : public CCommandProcessorFragment_GLBase
52{
53 enum EMemoryBlockUsage
54 {
55 MEMORY_BLOCK_USAGE_TEXTURE = 0,
56 MEMORY_BLOCK_USAGE_BUFFER,
57 MEMORY_BLOCK_USAGE_STREAM,
58 MEMORY_BLOCK_USAGE_STAGING,
59
60 // whenever dummy is used, make sure to deallocate all memory
61 MEMORY_BLOCK_USAGE_DUMMY,
62 };
63
64 [[nodiscard]] bool IsVerbose()
65 {
66 return g_Config.m_DbgGfx == DEBUG_GFX_MODE_VERBOSE || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL;
67 }
68
69 static const char *MemoryUsageName(EMemoryBlockUsage MemUsage)
70 {
71 switch(MemUsage)
72 {
73 case MEMORY_BLOCK_USAGE_TEXTURE:
74 return "texture";
75 case MEMORY_BLOCK_USAGE_BUFFER:
76 return "buffer";
77 case MEMORY_BLOCK_USAGE_STREAM:
78 return "stream";
79 case MEMORY_BLOCK_USAGE_STAGING:
80 return "staging buffer";
81 default:
82 dbg_assert_failed("Invalid MemUsage: %d", (int)MemUsage);
83 }
84 }
85
86 void VerboseAllocatedMemory(VkDeviceSize Size, size_t FrameImageIndex, EMemoryBlockUsage MemUsage) const
87 {
88 log_debug("gfx/vulkan", "Allocated chunk of memory with size %" PRIzu " for frame %" PRIzu " (%s).",
89 (size_t)Size, (size_t)m_CurImageIndex, MemoryUsageName(MemUsage));
90 }
91
92 void VerboseDeallocatedMemory(VkDeviceSize Size, size_t FrameImageIndex, EMemoryBlockUsage MemUsage) const
93 {
94 log_debug("gfx/vulkan", "Deallocated chunk of memory with size %" PRIzu " for frame %" PRIzu " (%s).",
95 (size_t)Size, (size_t)m_CurImageIndex, MemoryUsageName(MemUsage));
96 }
97
98 /************************
99 * STRUCT DEFINITIONS
100 ************************/
101
102 static constexpr size_t STAGING_BUFFER_CACHE_ID = 0;
103 static constexpr size_t STAGING_BUFFER_IMAGE_CACHE_ID = 1;
104 static constexpr size_t VERTEX_BUFFER_CACHE_ID = 2;
105 static constexpr size_t IMAGE_BUFFER_CACHE_ID = 3;
106
107 struct SDeviceMemoryBlock
108 {
109 VkDeviceMemory m_Mem = VK_NULL_HANDLE;
110 VkDeviceSize m_Size = 0;
111 EMemoryBlockUsage m_UsageType;
112 };
113
114 struct SDeviceDescriptorPools;
115
116 struct SDeviceDescriptorSet
117 {
118 VkDescriptorSet m_Descriptor = VK_NULL_HANDLE;
119 SDeviceDescriptorPools *m_pPools = nullptr;
120 size_t m_PoolIndex = std::numeric_limits<size_t>::max();
121 };
122
123 struct SDeviceDescriptorPool
124 {
125 VkDescriptorPool m_Pool;
126 VkDeviceSize m_Size = 0;
127 VkDeviceSize m_CurSize = 0;
128 };
129
130 struct SDeviceDescriptorPools
131 {
132 std::vector<SDeviceDescriptorPool> m_vPools;
133 VkDeviceSize m_DefaultAllocSize = 0;
134 bool m_IsUniformPool = false;
135 };
136
137 // some mix of queue and binary tree
138 struct SMemoryHeap
139 {
140 struct SMemoryHeapElement;
141 struct SMemoryHeapQueueElement
142 {
143 size_t m_AllocationSize;
144 // only useful information for the heap
145 size_t m_OffsetInHeap;
146 // useful for the user of this element
147 size_t m_OffsetToAlign;
148 SMemoryHeapElement *m_pElementInHeap;
149 [[nodiscard]] bool operator>(const SMemoryHeapQueueElement &Other) const { return m_AllocationSize > Other.m_AllocationSize; }
150 // respects alignment requirements
151 constexpr bool CanFitAllocation(size_t AllocSize, size_t AllocAlignment) const
152 {
153 size_t ExtraSizeAlign = m_OffsetInHeap % AllocAlignment;
154 if(ExtraSizeAlign != 0)
155 ExtraSizeAlign = AllocAlignment - ExtraSizeAlign;
156 size_t RealAllocSize = AllocSize + ExtraSizeAlign;
157 return m_AllocationSize >= RealAllocSize;
158 }
159 };
160
161 typedef std::multiset<SMemoryHeapQueueElement, std::greater<>> TMemoryHeapQueue;
162
163 struct SMemoryHeapElement
164 {
165 size_t m_AllocationSize;
166 size_t m_Offset;
167 SMemoryHeapElement *m_pParent;
168 std::unique_ptr<SMemoryHeapElement> m_pLeft;
169 std::unique_ptr<SMemoryHeapElement> m_pRight;
170
171 bool m_InUse;
172 TMemoryHeapQueue::iterator m_InQueue;
173 };
174
175 SMemoryHeapElement m_Root;
176 TMemoryHeapQueue m_Elements;
177
178 void Init(size_t Size, size_t Offset)
179 {
180 m_Root.m_AllocationSize = Size;
181 m_Root.m_Offset = Offset;
182 m_Root.m_pParent = nullptr;
183 m_Root.m_InUse = false;
184
185 SMemoryHeapQueueElement QueueEl;
186 QueueEl.m_AllocationSize = Size;
187 QueueEl.m_OffsetInHeap = Offset;
188 QueueEl.m_OffsetToAlign = Offset;
189 QueueEl.m_pElementInHeap = &m_Root;
190 m_Root.m_InQueue = m_Elements.insert(x: QueueEl);
191 }
192
193 [[nodiscard]] bool Allocate(size_t AllocSize, size_t AllocAlignment, SMemoryHeapQueueElement &AllocatedMemory)
194 {
195 if(m_Elements.empty())
196 {
197 return false;
198 }
199 else
200 {
201 // check if there is enough space in this instance
202 if(!m_Elements.begin()->CanFitAllocation(AllocSize, AllocAlignment))
203 {
204 return false;
205 }
206 else
207 {
208 // see SMemoryHeapQueueElement::operator>
209 SMemoryHeapQueueElement FindAllocSize;
210 FindAllocSize.m_AllocationSize = AllocSize;
211 // find upper bound for a allocation size
212 auto Upper = m_Elements.upper_bound(x: FindAllocSize);
213 // then find the first entry that respects alignment, this is a linear search!
214 auto FoundEl = m_Elements.rend();
215 for(auto AllocIterator = std::make_reverse_iterator(i: Upper); AllocIterator != m_Elements.rend(); ++AllocIterator)
216 {
217 if(AllocIterator->CanFitAllocation(AllocSize, AllocAlignment))
218 {
219 FoundEl = AllocIterator;
220 break;
221 }
222 }
223
224 auto TopEl = *FoundEl;
225 m_Elements.erase(position: TopEl.m_pElementInHeap->m_InQueue);
226
227 TopEl.m_pElementInHeap->m_InUse = true;
228
229 // calculate the real alloc size + alignment offset
230 size_t ExtraSizeAlign = TopEl.m_OffsetInHeap % AllocAlignment;
231 if(ExtraSizeAlign != 0)
232 ExtraSizeAlign = AllocAlignment - ExtraSizeAlign;
233 size_t RealAllocSize = AllocSize + ExtraSizeAlign;
234
235 // the heap element gets children
236 TopEl.m_pElementInHeap->m_pLeft = std::make_unique<SMemoryHeapElement>();
237 TopEl.m_pElementInHeap->m_pLeft->m_AllocationSize = RealAllocSize;
238 TopEl.m_pElementInHeap->m_pLeft->m_Offset = TopEl.m_OffsetInHeap;
239 TopEl.m_pElementInHeap->m_pLeft->m_pParent = TopEl.m_pElementInHeap;
240 TopEl.m_pElementInHeap->m_pLeft->m_InUse = true;
241
242 if(RealAllocSize < TopEl.m_AllocationSize)
243 {
244 SMemoryHeapQueueElement RemainingEl;
245 RemainingEl.m_OffsetInHeap = TopEl.m_OffsetInHeap + RealAllocSize;
246 RemainingEl.m_AllocationSize = TopEl.m_AllocationSize - RealAllocSize;
247
248 TopEl.m_pElementInHeap->m_pRight = std::make_unique<SMemoryHeapElement>();
249 TopEl.m_pElementInHeap->m_pRight->m_AllocationSize = RemainingEl.m_AllocationSize;
250 TopEl.m_pElementInHeap->m_pRight->m_Offset = RemainingEl.m_OffsetInHeap;
251 TopEl.m_pElementInHeap->m_pRight->m_pParent = TopEl.m_pElementInHeap;
252 TopEl.m_pElementInHeap->m_pRight->m_InUse = false;
253
254 RemainingEl.m_pElementInHeap = TopEl.m_pElementInHeap->m_pRight.get();
255 RemainingEl.m_pElementInHeap->m_InQueue = m_Elements.insert(x: RemainingEl);
256 }
257
258 AllocatedMemory.m_pElementInHeap = TopEl.m_pElementInHeap->m_pLeft.get();
259 AllocatedMemory.m_AllocationSize = RealAllocSize;
260 AllocatedMemory.m_OffsetInHeap = TopEl.m_OffsetInHeap;
261 AllocatedMemory.m_OffsetToAlign = TopEl.m_OffsetInHeap + ExtraSizeAlign;
262 return true;
263 }
264 }
265 }
266
267 void Free(const SMemoryHeapQueueElement &AllocatedMemory)
268 {
269 bool ContinueFree = true;
270 SMemoryHeapQueueElement ThisEl = AllocatedMemory;
271 while(ContinueFree)
272 {
273 // first check if the other block is in use, if not merge them again
274 SMemoryHeapElement *pThisHeapObj = ThisEl.m_pElementInHeap;
275 SMemoryHeapElement *pThisParent = pThisHeapObj->m_pParent;
276 pThisHeapObj->m_InUse = false;
277 SMemoryHeapElement *pOtherHeapObj = nullptr;
278 if(pThisParent != nullptr && pThisHeapObj == pThisParent->m_pLeft.get())
279 pOtherHeapObj = pThisHeapObj->m_pParent->m_pRight.get();
280 else if(pThisParent != nullptr)
281 pOtherHeapObj = pThisHeapObj->m_pParent->m_pLeft.get();
282
283 if((pThisParent != nullptr && pOtherHeapObj == nullptr) || (pOtherHeapObj != nullptr && !pOtherHeapObj->m_InUse))
284 {
285 // merge them
286 if(pOtherHeapObj != nullptr)
287 {
288 m_Elements.erase(position: pOtherHeapObj->m_InQueue);
289 pOtherHeapObj->m_InUse = false;
290 }
291
292 SMemoryHeapQueueElement ParentEl;
293 ParentEl.m_OffsetInHeap = pThisParent->m_Offset;
294 ParentEl.m_AllocationSize = pThisParent->m_AllocationSize;
295 ParentEl.m_pElementInHeap = pThisParent;
296
297 pThisParent->m_pLeft = nullptr;
298 pThisParent->m_pRight = nullptr;
299
300 ThisEl = ParentEl;
301 }
302 else
303 {
304 // else just put this back into queue
305 ThisEl.m_pElementInHeap->m_InQueue = m_Elements.insert(x: ThisEl);
306 ContinueFree = false;
307 }
308 }
309 }
310
311 [[nodiscard]] bool IsUnused() const
312 {
313 return !m_Root.m_InUse;
314 }
315 };
316
317 template<size_t Id>
318 struct SMemoryBlock
319 {
320 SMemoryHeap::SMemoryHeapQueueElement m_HeapData;
321
322 VkDeviceSize m_UsedSize;
323
324 // optional
325 VkBuffer m_Buffer;
326
327 SDeviceMemoryBlock m_BufferMem;
328 void *m_pMappedBuffer;
329
330 bool m_IsCached;
331 SMemoryHeap *m_pHeap;
332 };
333
334 template<size_t Id>
335 struct SMemoryImageBlock : public SMemoryBlock<Id>
336 {
337 uint32_t m_ImageMemoryBits;
338 };
339
340 template<size_t Id>
341 struct SMemoryBlockCache
342 {
343 struct SMemoryCacheType
344 {
345 struct SMemoryCacheHeap
346 {
347 SMemoryHeap m_Heap;
348 VkBuffer m_Buffer;
349
350 SDeviceMemoryBlock m_BufferMem;
351 void *m_pMappedBuffer;
352 };
353 std::vector<SMemoryCacheHeap *> m_vpMemoryHeaps;
354 };
355 SMemoryCacheType m_MemoryCaches;
356 std::vector<std::vector<SMemoryBlock<Id>>> m_vvFrameDelayedCachedBufferCleanup;
357
358 bool m_CanShrink = false;
359
360 void Init(size_t SwapChainImageCount)
361 {
362 m_vvFrameDelayedCachedBufferCleanup.resize(SwapChainImageCount);
363 }
364
365 void DestroyFrameData(size_t ImageCount)
366 {
367 for(size_t i = 0; i < ImageCount; ++i)
368 Cleanup(ImgIndex: i);
369 m_vvFrameDelayedCachedBufferCleanup.clear();
370 }
371
372 void Destroy(VkDevice &Device)
373 {
374 for(auto HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.begin(); HeapIterator != m_MemoryCaches.m_vpMemoryHeaps.end();)
375 {
376 auto *pHeap = *HeapIterator;
377 if(pHeap->m_pMappedBuffer != nullptr)
378 vkUnmapMemory(Device, pHeap->m_BufferMem.m_Mem);
379 if(pHeap->m_Buffer != VK_NULL_HANDLE)
380 vkDestroyBuffer(Device, pHeap->m_Buffer, nullptr);
381 vkFreeMemory(Device, pHeap->m_BufferMem.m_Mem, nullptr);
382
383 delete pHeap;
384 HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.erase(HeapIterator);
385 }
386
387 m_MemoryCaches.m_vpMemoryHeaps.clear();
388 m_vvFrameDelayedCachedBufferCleanup.clear();
389 }
390
391 void Cleanup(size_t ImgIndex)
392 {
393 for(auto &MemBlock : m_vvFrameDelayedCachedBufferCleanup[ImgIndex])
394 {
395 MemBlock.m_UsedSize = 0;
396 MemBlock.m_pHeap->Free(MemBlock.m_HeapData);
397
398 m_CanShrink = true;
399 }
400 m_vvFrameDelayedCachedBufferCleanup[ImgIndex].clear();
401 }
402
403 void FreeMemBlock(SMemoryBlock<Id> &Block, size_t ImgIndex)
404 {
405 m_vvFrameDelayedCachedBufferCleanup[ImgIndex].push_back(Block);
406 }
407
408 // returns the total free'd memory
409 size_t Shrink(VkDevice &Device)
410 {
411 size_t FreedMemory = 0;
412 if(m_CanShrink)
413 {
414 m_CanShrink = false;
415 if(m_MemoryCaches.m_vpMemoryHeaps.size() > 1)
416 {
417 for(auto HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.begin(); HeapIterator != m_MemoryCaches.m_vpMemoryHeaps.end();)
418 {
419 auto *pHeap = *HeapIterator;
420 if(pHeap->m_Heap.IsUnused())
421 {
422 if(pHeap->m_pMappedBuffer != nullptr)
423 vkUnmapMemory(Device, pHeap->m_BufferMem.m_Mem);
424 if(pHeap->m_Buffer != VK_NULL_HANDLE)
425 vkDestroyBuffer(Device, pHeap->m_Buffer, nullptr);
426 vkFreeMemory(Device, pHeap->m_BufferMem.m_Mem, nullptr);
427 FreedMemory += pHeap->m_BufferMem.m_Size;
428
429 delete pHeap;
430 HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.erase(HeapIterator);
431 if(m_MemoryCaches.m_vpMemoryHeaps.size() == 1)
432 break;
433 }
434 else
435 ++HeapIterator;
436 }
437 }
438 }
439
440 return FreedMemory;
441 }
442 };
443
444 struct CTexture
445 {
446 VkImage m_Img = VK_NULL_HANDLE;
447 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_ImgMem;
448 VkImageView m_ImgView = VK_NULL_HANDLE;
449 VkSampler m_aSamplers[2] = {VK_NULL_HANDLE, VK_NULL_HANDLE};
450
451 VkImage m_Img3D = VK_NULL_HANDLE;
452 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_Img3DMem;
453 VkImageView m_Img3DView = VK_NULL_HANDLE;
454 VkSampler m_Sampler3D = VK_NULL_HANDLE;
455
456 uint32_t m_Width = 0;
457 uint32_t m_Height = 0;
458 uint32_t m_RescaleCount = 0;
459
460 uint32_t m_MipMapCount = 1;
461
462 std::array<SDeviceDescriptorSet, 2> m_aVKStandardTexturedDescrSets;
463 SDeviceDescriptorSet m_VKStandard3DTexturedDescrSet;
464 SDeviceDescriptorSet m_VKTextDescrSet;
465 };
466
467 struct SBufferObject
468 {
469 SMemoryBlock<VERTEX_BUFFER_CACHE_ID> m_Mem;
470 };
471
472 struct SBufferObjectFrame
473 {
474 SBufferObject m_BufferObject;
475
476 // since stream buffers can be used the cur buffer should always be used for rendering
477 bool m_IsStreamedBuffer = false;
478 VkBuffer m_CurBuffer = VK_NULL_HANDLE;
479 size_t m_CurBufferOffset = 0;
480 };
481
482 struct SBufferContainer
483 {
484 int m_BufferObjectIndex;
485 };
486
487 struct SFrameBuffers
488 {
489 VkBuffer m_Buffer;
490 SDeviceMemoryBlock m_BufferMem;
491 size_t m_OffsetInBuffer = 0;
492 size_t m_Size;
493 size_t m_UsedSize;
494 uint8_t *m_pMappedBufferData;
495
496 SFrameBuffers(VkBuffer Buffer, SDeviceMemoryBlock BufferMem, size_t OffsetInBuffer, size_t Size, size_t UsedSize, uint8_t *pMappedBufferData) :
497 m_Buffer(Buffer), m_BufferMem(BufferMem), m_OffsetInBuffer(OffsetInBuffer), m_Size(Size), m_UsedSize(UsedSize), m_pMappedBufferData(pMappedBufferData)
498 {
499 }
500 };
501
502 struct SFrameUniformBuffers : public SFrameBuffers
503 {
504 std::array<SDeviceDescriptorSet, 2> m_aUniformSets;
505
506 SFrameUniformBuffers(VkBuffer Buffer, SDeviceMemoryBlock BufferMem, size_t OffsetInBuffer, size_t Size, size_t UsedSize, uint8_t *pMappedBufferData) :
507 SFrameBuffers(Buffer, BufferMem, OffsetInBuffer, Size, UsedSize, pMappedBufferData) {}
508 };
509
510 template<typename TName>
511 struct SStreamMemory
512 {
513 typedef std::vector<std::vector<TName>> TBufferObjectsOfFrame;
514 typedef std::vector<std::vector<VkMappedMemoryRange>> TMemoryMapRangesOfFrame;
515 typedef std::vector<size_t> TStreamUseCount;
516 TBufferObjectsOfFrame m_vvBufferObjectsOfFrame;
517 TMemoryMapRangesOfFrame m_vvBufferObjectsOfFrameRangeData;
518 TStreamUseCount m_vCurrentUsedCount;
519
520 std::vector<TName> &GetBuffers(size_t FrameImageIndex)
521 {
522 return m_vvBufferObjectsOfFrame[FrameImageIndex];
523 }
524
525 std::vector<VkMappedMemoryRange> &GetRanges(size_t FrameImageIndex)
526 {
527 return m_vvBufferObjectsOfFrameRangeData[FrameImageIndex];
528 }
529
530 size_t GetUsedCount(size_t FrameImageIndex)
531 {
532 return m_vCurrentUsedCount[FrameImageIndex];
533 }
534
535 void IncreaseUsedCount(size_t FrameImageIndex)
536 {
537 ++m_vCurrentUsedCount[FrameImageIndex];
538 }
539
540 [[nodiscard]] bool IsUsed(size_t FrameImageIndex)
541 {
542 return GetUsedCount(FrameImageIndex) > 0;
543 }
544
545 void ResetFrame(size_t FrameImageIndex)
546 {
547 m_vCurrentUsedCount[FrameImageIndex] = 0;
548 }
549
550 void Init(size_t FrameImageCount)
551 {
552 m_vvBufferObjectsOfFrame.resize(FrameImageCount);
553 m_vvBufferObjectsOfFrameRangeData.resize(new_size: FrameImageCount);
554 m_vCurrentUsedCount.resize(new_size: FrameImageCount);
555 }
556
557 typedef std::function<void(size_t, TName &)> TDestroyBufferFunc;
558
559 void Destroy(TDestroyBufferFunc &&DestroyBuffer)
560 {
561 size_t ImageIndex = 0;
562 for(auto &vBuffersOfFrame : m_vvBufferObjectsOfFrame)
563 {
564 for(auto &BufferOfFrame : vBuffersOfFrame)
565 {
566 VkDeviceMemory BufferMem = BufferOfFrame.m_BufferMem.m_Mem;
567 DestroyBuffer(ImageIndex, BufferOfFrame);
568
569 // delete similar buffers
570 for(auto &BufferOfFrameDel : vBuffersOfFrame)
571 {
572 if(BufferOfFrameDel.m_BufferMem.m_Mem == BufferMem)
573 {
574 BufferOfFrameDel.m_Buffer = VK_NULL_HANDLE;
575 BufferOfFrameDel.m_BufferMem.m_Mem = VK_NULL_HANDLE;
576 }
577 }
578 }
579 ++ImageIndex;
580 }
581 m_vvBufferObjectsOfFrame.clear();
582 m_vvBufferObjectsOfFrameRangeData.clear();
583 m_vCurrentUsedCount.clear();
584 }
585 };
586
587 struct SShaderModule
588 {
589 VkShaderModule m_VertShaderModule = VK_NULL_HANDLE;
590 VkShaderModule m_FragShaderModule = VK_NULL_HANDLE;
591
592 VkDevice m_VKDevice = VK_NULL_HANDLE;
593
594 ~SShaderModule()
595 {
596 if(m_VKDevice != VK_NULL_HANDLE)
597 {
598 if(m_VertShaderModule != VK_NULL_HANDLE)
599 vkDestroyShaderModule(device: m_VKDevice, shaderModule: m_VertShaderModule, pAllocator: nullptr);
600
601 if(m_FragShaderModule != VK_NULL_HANDLE)
602 vkDestroyShaderModule(device: m_VKDevice, shaderModule: m_FragShaderModule, pAllocator: nullptr);
603 }
604 }
605 };
606
607 enum EVulkanBackendAddressModes
608 {
609 VULKAN_BACKEND_ADDRESS_MODE_REPEAT = 0,
610 VULKAN_BACKEND_ADDRESS_MODE_CLAMP_EDGES,
611
612 VULKAN_BACKEND_ADDRESS_MODE_COUNT,
613 };
614
615 enum EVulkanBackendBlendModes
616 {
617 VULKAN_BACKEND_BLEND_MODE_ALPHA = 0,
618 VULKAN_BACKEND_BLEND_MODE_NONE,
619 VULKAN_BACKEND_BLEND_MODE_ADDITATIVE,
620
621 VULKAN_BACKEND_BLEND_MODE_COUNT,
622 };
623
624 enum EVulkanBackendClipModes
625 {
626 VULKAN_BACKEND_CLIP_MODE_NONE = 0,
627 VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT,
628
629 VULKAN_BACKEND_CLIP_MODE_COUNT,
630 };
631
632 enum EVulkanBackendTextureModes
633 {
634 VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED = 0,
635 VULKAN_BACKEND_TEXTURE_MODE_TEXTURED,
636
637 VULKAN_BACKEND_TEXTURE_MODE_COUNT,
638 };
639
640 struct SPipelineContainer
641 {
642 // 3 blend modes - 2 viewport & scissor modes - 2 texture modes
643 std::array<std::array<std::array<VkPipelineLayout, VULKAN_BACKEND_TEXTURE_MODE_COUNT>, VULKAN_BACKEND_CLIP_MODE_COUNT>, VULKAN_BACKEND_BLEND_MODE_COUNT> m_aaaPipelineLayouts;
644 std::array<std::array<std::array<VkPipeline, VULKAN_BACKEND_TEXTURE_MODE_COUNT>, VULKAN_BACKEND_CLIP_MODE_COUNT>, VULKAN_BACKEND_BLEND_MODE_COUNT> m_aaaPipelines;
645
646 SPipelineContainer()
647 {
648 for(auto &aaPipeLayouts : m_aaaPipelineLayouts)
649 {
650 for(auto &aPipeLayouts : aaPipeLayouts)
651 {
652 for(auto &PipeLayout : aPipeLayouts)
653 {
654 PipeLayout = VK_NULL_HANDLE;
655 }
656 }
657 }
658 for(auto &aaPipe : m_aaaPipelines)
659 {
660 for(auto &aPipe : aaPipe)
661 {
662 for(auto &Pipe : aPipe)
663 {
664 Pipe = VK_NULL_HANDLE;
665 }
666 }
667 }
668 }
669
670 void Destroy(VkDevice &Device)
671 {
672 for(auto &aaPipeLayouts : m_aaaPipelineLayouts)
673 {
674 for(auto &aPipeLayouts : aaPipeLayouts)
675 {
676 for(auto &PipeLayout : aPipeLayouts)
677 {
678 if(PipeLayout != VK_NULL_HANDLE)
679 vkDestroyPipelineLayout(device: Device, pipelineLayout: PipeLayout, pAllocator: nullptr);
680 PipeLayout = VK_NULL_HANDLE;
681 }
682 }
683 }
684 for(auto &aaPipe : m_aaaPipelines)
685 {
686 for(auto &aPipe : aaPipe)
687 {
688 for(auto &Pipe : aPipe)
689 {
690 if(Pipe != VK_NULL_HANDLE)
691 vkDestroyPipeline(device: Device, pipeline: Pipe, pAllocator: nullptr);
692 Pipe = VK_NULL_HANDLE;
693 }
694 }
695 }
696 }
697 };
698
699 /*******************************
700 * UNIFORM PUSH CONSTANT LAYOUTS
701 ********************************/
702
703 struct SUniformGPos
704 {
705 float m_aPos[4 * 2];
706 };
707
708 struct SUniformGTextPos
709 {
710 float m_aPos[4 * 2];
711 float m_TextureSize;
712 };
713
714 typedef vec3 SUniformTextGFragmentOffset;
715
716 struct SUniformTextGFragmentConstants
717 {
718 ColorRGBA m_TextColor;
719 ColorRGBA m_TextOutlineColor;
720 };
721
722 struct SUniformTextFragment
723 {
724 SUniformTextGFragmentConstants m_Constants;
725 };
726
727 struct SUniformTileGPos
728 {
729 float m_aPos[4 * 2];
730 };
731
732 struct SUniformTileGPosBorder : public SUniformTileGPos
733 {
734 vec2 m_Offset;
735 vec2 m_Scale;
736 };
737
738 typedef ColorRGBA SUniformTileGVertColor;
739
740 struct SUniformTileGVertColorAlign
741 {
742 float m_aPad[(64 - 48) / 4];
743 };
744
745 struct SUniformPrimExGPosRotationless
746 {
747 float m_aPos[4 * 2];
748 };
749
750 struct SUniformPrimExGPos : public SUniformPrimExGPosRotationless
751 {
752 vec2 m_Center;
753 float m_Rotation;
754 };
755
756 typedef ColorRGBA SUniformPrimExGVertColor;
757
758 struct SUniformPrimExGVertColorAlign
759 {
760 float m_aPad[(48 - 44) / 4];
761 };
762
763 struct SUniformSpriteMultiGPos
764 {
765 float m_aPos[4 * 2];
766 vec2 m_Center;
767 };
768
769 typedef ColorRGBA SUniformSpriteMultiGVertColor;
770
771 struct SUniformSpriteMultiGVertColorAlign
772 {
773 float m_aPad[(48 - 40) / 4];
774 };
775
776 struct SUniformSpriteMultiPushGPosBase
777 {
778 float m_aPos[4 * 2];
779 vec2 m_Center;
780 vec2 m_Padding;
781 };
782
783 struct SUniformSpriteMultiPushGPos : public SUniformSpriteMultiPushGPosBase
784 {
785 vec4 m_aPSR[1];
786 };
787
788 typedef ColorRGBA SUniformSpriteMultiPushGVertColor;
789
790 struct SUniformQuadGPosBase
791 {
792 float m_aPos[4 * 2];
793 int32_t m_QuadOffset;
794 };
795
796 struct SUniformQuadPushGBufferObject
797 {
798 ColorRGBA m_VertColor;
799 vec2 m_Offset;
800 float m_Rotation;
801 float m_Padding;
802 };
803
804 struct SUniformQuadGroupedGPos
805 {
806 float m_aPos[4 * 2];
807 SUniformQuadPushGBufferObject m_BOPush;
808 };
809
810 struct SUniformQuadGPos
811 {
812 float m_aPos[4 * 2];
813 int32_t m_QuadOffset;
814 };
815
816 enum ESupportedSamplerTypes
817 {
818 SUPPORTED_SAMPLER_TYPE_REPEAT = 0,
819 SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE,
820 SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY,
821
822 SUPPORTED_SAMPLER_TYPE_COUNT,
823 };
824
825 struct SShaderFileCache
826 {
827 std::vector<uint8_t> m_vBinary;
828 };
829
830 struct SSwapImgViewportExtent
831 {
832 VkExtent2D m_SwapImageViewport;
833 bool m_HasForcedViewport = false;
834 VkExtent2D m_ForcedViewport;
835
836 // the viewport of the resulting presented image on the screen
837 // if there is a forced viewport the resulting image is smaller
838 // than the full swap image size
839 VkExtent2D GetPresentedImageViewport() const
840 {
841 uint32_t ViewportWidth = m_SwapImageViewport.width;
842 uint32_t ViewportHeight = m_SwapImageViewport.height;
843 if(m_HasForcedViewport)
844 {
845 ViewportWidth = m_ForcedViewport.width;
846 ViewportHeight = m_ForcedViewport.height;
847 }
848
849 return {.width: ViewportWidth, .height: ViewportHeight};
850 }
851 };
852
853 struct SSwapChainMultiSampleImage
854 {
855 VkImage m_Image = VK_NULL_HANDLE;
856 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_ImgMem;
857 VkImageView m_ImgView = VK_NULL_HANDLE;
858 };
859
860 /************************
861 * MEMBER VARIABLES
862 ************************/
863
864 std::unordered_map<std::string, SShaderFileCache> m_ShaderFiles;
865
866 SMemoryBlockCache<STAGING_BUFFER_CACHE_ID> m_StagingBufferCache;
867 SMemoryBlockCache<STAGING_BUFFER_IMAGE_CACHE_ID> m_StagingBufferCacheImage;
868 SMemoryBlockCache<VERTEX_BUFFER_CACHE_ID> m_VertexBufferCache;
869 std::map<uint32_t, SMemoryBlockCache<IMAGE_BUFFER_CACHE_ID>> m_ImageBufferCaches;
870
871 std::vector<VkMappedMemoryRange> m_vNonFlushedStagingBufferRange;
872
873 std::vector<CTexture> m_vTextures;
874
875 std::atomic<uint64_t> *m_pTextureMemoryUsage;
876 std::atomic<uint64_t> *m_pBufferMemoryUsage;
877 std::atomic<uint64_t> *m_pStreamMemoryUsage;
878 std::atomic<uint64_t> *m_pStagingMemoryUsage;
879
880 TTwGraphicsGpuList *m_pGpuList;
881
882 int m_GlobalTextureLodBIAS;
883 uint32_t m_MultiSamplingCount = 1;
884
885 uint32_t m_NextMultiSamplingCount = std::numeric_limits<uint32_t>::max();
886
887 bool m_RecreateSwapChain = false;
888 bool m_SwapchainCreated = false;
889 bool m_RenderingPaused = false;
890 bool m_HasDynamicViewport = false;
891 VkOffset2D m_DynamicViewportOffset;
892 VkExtent2D m_DynamicViewportSize;
893
894 bool m_AllowsLinearBlitting = false;
895 bool m_OptimalSwapChainImageBlitting = false;
896 bool m_OptimalRGBAImageBlitting = false;
897 bool m_LinearRGBAImageBlitting = false;
898
899 VkBuffer m_IndexBuffer;
900 SDeviceMemoryBlock m_IndexBufferMemory;
901
902 VkBuffer m_RenderIndexBuffer;
903 SDeviceMemoryBlock m_RenderIndexBufferMemory;
904 size_t m_CurRenderIndexPrimitiveCount;
905
906 VkDeviceSize m_NonCoherentMemAlignment;
907 VkDeviceSize m_OptimalImageCopyMemAlignment;
908 uint32_t m_MaxTextureSize;
909 uint32_t m_MaxSamplerAnisotropy;
910 VkSampleCountFlags m_MaxMultiSample;
911
912 uint32_t m_MinUniformAlign;
913
914 std::vector<uint8_t> m_vReadPixelHelper;
915 std::vector<uint8_t> m_vScreenshotHelper;
916
917 SDeviceMemoryBlock m_GetPresentedImgDataHelperMem;
918 VkImage m_GetPresentedImgDataHelperImage = VK_NULL_HANDLE;
919 uint8_t *m_pGetPresentedImgDataHelperMappedMemory = nullptr;
920 VkDeviceSize m_GetPresentedImgDataHelperMappedLayoutOffset = 0;
921 VkDeviceSize m_GetPresentedImgDataHelperMappedLayoutPitch = 0;
922 uint32_t m_GetPresentedImgDataHelperWidth = 0;
923 uint32_t m_GetPresentedImgDataHelperHeight = 0;
924 VkFence m_GetPresentedImgDataHelperFence = VK_NULL_HANDLE;
925
926 std::array<VkSampler, SUPPORTED_SAMPLER_TYPE_COUNT> m_aSamplers;
927
928 class IStorage *m_pStorage;
929
930 struct SDelayedBufferCleanupItem
931 {
932 VkBuffer m_Buffer;
933 SDeviceMemoryBlock m_Mem;
934 void *m_pMappedData = nullptr;
935 };
936
937 std::vector<std::vector<SDelayedBufferCleanupItem>> m_vvFrameDelayedBufferCleanup;
938 std::vector<std::vector<CTexture>> m_vvFrameDelayedTextureCleanup;
939 std::vector<std::vector<std::pair<CTexture, CTexture>>> m_vvFrameDelayedTextTexturesCleanup;
940
941 size_t m_ThreadCount = 1;
942 static constexpr size_t MAIN_THREAD_INDEX = 0;
943 size_t m_CurCommandInPipe = 0;
944 size_t m_CurRenderCallCountInPipe = 0;
945 size_t m_CommandsInPipe = 0;
946 size_t m_RenderCallsInPipe = 0;
947 size_t m_LastCommandsInPipeThreadIndex = 0;
948
949 struct SRenderThread
950 {
951 bool m_IsRendering = false;
952 std::thread m_Thread;
953 std::mutex m_Mutex;
954 std::condition_variable m_Cond;
955 bool m_Finished = false;
956 bool m_Started = false;
957 };
958 std::vector<std::unique_ptr<SRenderThread>> m_vpRenderThreads;
959
960private:
961 std::vector<VkImageView> m_vSwapChainImageViewList;
962 std::vector<SSwapChainMultiSampleImage> m_vSwapChainMultiSamplingImages;
963 std::vector<VkFramebuffer> m_vFramebufferList;
964 std::vector<VkCommandBuffer> m_vMainDrawCommandBuffers;
965
966 std::vector<std::vector<VkCommandBuffer>> m_vvThreadDrawCommandBuffers;
967 std::vector<VkCommandBuffer> m_vHelperThreadDrawCommandBuffers;
968 std::vector<std::vector<bool>> m_vvUsedThreadDrawCommandBuffer;
969
970 std::vector<VkCommandBuffer> m_vMemoryCommandBuffers;
971 std::vector<bool> m_vUsedMemoryCommandBuffer;
972
973 std::vector<VkSemaphore> m_vQueueSubmitSemaphores;
974 std::vector<VkSemaphore> m_vBusyAcquireImageSemaphores;
975 VkSemaphore m_AcquireImageSemaphore;
976
977 std::vector<VkFence> m_vQueueSubmitFences;
978
979 uint64_t m_CurFrame = 0;
980 std::vector<uint64_t> m_vImageLastFrameCheck;
981
982 uint32_t m_LastPresentedSwapChainImageIndex;
983
984 std::vector<SBufferObjectFrame> m_vBufferObjects;
985
986 std::vector<SBufferContainer> m_vBufferContainers;
987
988 VkInstance m_VKInstance;
989 VkPhysicalDevice m_VKGPU;
990 uint32_t m_VKGraphicsQueueIndex = std::numeric_limits<uint32_t>::max();
991 VkDevice m_VKDevice;
992 VkQueue m_VKGraphicsQueue, m_VKPresentQueue;
993 VkSurfaceKHR m_VKPresentSurface;
994 SSwapImgViewportExtent m_VKSwapImgAndViewportExtent;
995
996#ifdef VK_EXT_debug_utils
997 VkDebugUtilsMessengerEXT m_DebugMessenger;
998#endif
999
1000 VkDescriptorSetLayout m_StandardTexturedDescriptorSetLayout;
1001 VkDescriptorSetLayout m_Standard3DTexturedDescriptorSetLayout;
1002
1003 VkDescriptorSetLayout m_TextDescriptorSetLayout;
1004
1005 VkDescriptorSetLayout m_SpriteMultiUniformDescriptorSetLayout;
1006 VkDescriptorSetLayout m_QuadUniformDescriptorSetLayout;
1007
1008 SPipelineContainer m_StandardPipeline;
1009 SPipelineContainer m_StandardLinePipeline;
1010 SPipelineContainer m_Standard3DPipeline;
1011 SPipelineContainer m_TextPipeline;
1012 SPipelineContainer m_TilePipeline;
1013 SPipelineContainer m_TileBorderPipeline;
1014 SPipelineContainer m_PrimExPipeline;
1015 SPipelineContainer m_PrimExRotationlessPipeline;
1016 SPipelineContainer m_SpriteMultiPipeline;
1017 SPipelineContainer m_SpriteMultiPushPipeline;
1018 SPipelineContainer m_QuadPipeline;
1019 SPipelineContainer m_QuadGroupedPipeline;
1020
1021 std::vector<VkPipeline> m_vLastPipeline;
1022
1023 std::vector<VkCommandPool> m_vCommandPools;
1024
1025 VkRenderPass m_VKRenderPass;
1026
1027 VkSurfaceFormatKHR m_VKSurfFormat;
1028
1029 SDeviceDescriptorPools m_StandardTextureDescrPool;
1030 SDeviceDescriptorPools m_TextTextureDescrPool;
1031
1032 std::vector<SDeviceDescriptorPools> m_vUniformBufferDescrPools;
1033
1034 VkSwapchainKHR m_VKSwapChain = VK_NULL_HANDLE;
1035 std::vector<VkImage> m_vSwapChainImages;
1036 uint32_t m_SwapChainImageCount = 0;
1037
1038 std::vector<SStreamMemory<SFrameBuffers>> m_vStreamedVertexBuffers;
1039 std::vector<SStreamMemory<SFrameUniformBuffers>> m_vStreamedUniformBuffers;
1040
1041 uint32_t m_CurImageIndex = 0;
1042
1043 uint32_t m_CanvasWidth;
1044 uint32_t m_CanvasHeight;
1045
1046 SDL_Window *m_pWindow;
1047
1048 std::array<float, 4> m_aClearColor = {0, 0, 0, 0};
1049
1050 struct SRenderCommandExecuteBuffer
1051 {
1052 CCommandBuffer::ECommandBufferCMD m_Command;
1053 const CCommandBuffer::SCommand *m_pRawCommand;
1054 uint32_t m_ThreadIndex;
1055
1056 // must be calculated when the buffer gets filled
1057 size_t m_EstimatedRenderCallCount = 0;
1058
1059 // useful data
1060 VkBuffer m_Buffer;
1061 size_t m_BufferOff;
1062 std::array<SDeviceDescriptorSet, 2> m_aDescriptors;
1063
1064 VkBuffer m_IndexBuffer;
1065
1066 bool m_ClearColorInRenderThread = false;
1067
1068 bool m_HasDynamicState = false;
1069 VkViewport m_Viewport;
1070 VkRect2D m_Scissor;
1071 };
1072
1073 typedef std::vector<SRenderCommandExecuteBuffer> TCommandList;
1074 typedef std::vector<TCommandList> TThreadCommandList;
1075
1076 TThreadCommandList m_vvThreadCommandLists;
1077 std::vector<bool> m_vThreadHelperHadCommands;
1078
1079 typedef std::function<bool(const CCommandBuffer::SCommand *, SRenderCommandExecuteBuffer &)> TCommandBufferCommandCallback;
1080 typedef std::function<void(SRenderCommandExecuteBuffer &, const CCommandBuffer::SCommand *)> TCommandBufferFillExecuteBufferFunc;
1081
1082 struct SCommandCallback
1083 {
1084 bool m_IsRenderCommand;
1085 TCommandBufferFillExecuteBufferFunc m_FillExecuteBuffer;
1086 TCommandBufferCommandCallback m_CommandCB;
1087 // command should be considered handled after it executed
1088 bool m_CMDIsHandled = true;
1089 };
1090 std::array<SCommandCallback, static_cast<int>(CCommandBuffer::CMD_COUNT) - static_cast<int>(CCommandBuffer::CMD_FIRST)> m_aCommandCallbacks;
1091
1092protected:
1093 /************************
1094 * ERROR MANAGEMENT
1095 ************************/
1096 std::mutex m_ErrWarnMutex;
1097 std::string m_ErrorHelper;
1098
1099 bool m_HasError = false;
1100 bool m_CanAssert = false;
1101
1102 /**
1103 * After an error occurred, the rendering stop as soon as possible
1104 * Always stop the current code execution after a call to this function (e.g. return false)
1105 */
1106 void SetError(EGfxErrorType ErrType, const char *pErr, const char *pErrStrExtra = nullptr)
1107 {
1108 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1109 SGfxErrorContainer::SError Err = {.m_RequiresTranslation: false, .m_Err: pErr};
1110 if(std::find(first: m_Error.m_vErrors.begin(), last: m_Error.m_vErrors.end(), val: Err) == m_Error.m_vErrors.end())
1111 m_Error.m_vErrors.emplace_back(args&: Err);
1112 if(pErrStrExtra != nullptr)
1113 {
1114 SGfxErrorContainer::SError ErrExtra = {.m_RequiresTranslation: false, .m_Err: pErrStrExtra};
1115 if(std::find(first: m_Error.m_vErrors.begin(), last: m_Error.m_vErrors.end(), val: ErrExtra) == m_Error.m_vErrors.end())
1116 m_Error.m_vErrors.emplace_back(args&: ErrExtra);
1117 }
1118 if(m_CanAssert)
1119 {
1120 if(pErrStrExtra != nullptr)
1121 log_error("gfx/vulkan", "%s: %s", pErr, pErrStrExtra);
1122 else
1123 log_error("gfx/vulkan", "%s", pErr);
1124 m_HasError = true;
1125 m_Error.m_ErrorType = ErrType;
1126 }
1127 else
1128 {
1129 Lock.unlock();
1130 // during initialization vulkan should not throw any errors but warnings instead
1131 // since most code in the swapchain is shared with runtime code, add this extra code path
1132 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED, pWarning: pErr);
1133 }
1134 }
1135
1136 void SetWarningPreMsg(const char *pWarningPre)
1137 {
1138 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1139 if(std::find(first: m_Warning.m_vWarnings.begin(), last: m_Warning.m_vWarnings.end(), val: pWarningPre) == m_Warning.m_vWarnings.end())
1140 m_Warning.m_vWarnings.emplace(position: m_Warning.m_vWarnings.begin(), args&: pWarningPre);
1141 }
1142
1143 void SetWarning(EGfxWarningType WarningType, const char *pWarning)
1144 {
1145 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1146 log_warn("gfx/vulkan", "%s", pWarning);
1147 if(std::find(first: m_Warning.m_vWarnings.begin(), last: m_Warning.m_vWarnings.end(), val: pWarning) == m_Warning.m_vWarnings.end())
1148 m_Warning.m_vWarnings.emplace_back(args&: pWarning);
1149 m_Warning.m_WarningType = WarningType;
1150 }
1151
1152 const char *CheckVulkanCriticalError(VkResult CallResult)
1153 {
1154 const char *pCriticalError = nullptr;
1155 switch(CallResult)
1156 {
1157 case VK_ERROR_OUT_OF_HOST_MEMORY:
1158 pCriticalError = "Host ran out of memory.";
1159 log_error("gfx/vulkan", "%s", pCriticalError);
1160 break;
1161 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
1162 pCriticalError = "Device ran out of memory.";
1163 log_error("gfx/vulkan", "%s", pCriticalError);
1164 break;
1165 case VK_ERROR_DEVICE_LOST:
1166 pCriticalError = "Device lost.";
1167 log_error("gfx/vulkan", "%s", pCriticalError);
1168 break;
1169 case VK_ERROR_OUT_OF_DATE_KHR:
1170 {
1171 if(IsVerbose())
1172 {
1173 log_debug("gfx/vulkan", "Queueing swap chain recreation because the current is out of date.");
1174 }
1175 m_RecreateSwapChain = true;
1176 break;
1177 }
1178 case VK_ERROR_SURFACE_LOST_KHR:
1179 log_error("gfx/vulkan", "Surface lost.");
1180 break;
1181 case VK_ERROR_INCOMPATIBLE_DRIVER:
1182 pCriticalError = "No compatible driver found. Vulkan 1.1 is required.";
1183 log_error("gfx/vulkan", "%s", pCriticalError);
1184 break;
1185 case VK_ERROR_INITIALIZATION_FAILED:
1186 pCriticalError = "Initialization failed for unknown reason.";
1187 log_error("gfx/vulkan", "%s", pCriticalError);
1188 break;
1189 case VK_ERROR_LAYER_NOT_PRESENT:
1190 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_MISSING_EXTENSION, pWarning: "At least one Vulkan layer was not present. (Try to disable them.)");
1191 break;
1192 case VK_ERROR_EXTENSION_NOT_PRESENT:
1193 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_MISSING_EXTENSION, pWarning: "At least one Vulkan extension was not present. (Try to disable them.)");
1194 break;
1195 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
1196 log_error("gfx/vulkan", "Native window in use.");
1197 break;
1198 case VK_SUCCESS:
1199 break;
1200 case VK_SUBOPTIMAL_KHR:
1201 if(IsVerbose())
1202 {
1203 log_debug("gfx/vulkan", "Queueing swap chain recreation because the current is suboptimal.");
1204 }
1205 m_RecreateSwapChain = true;
1206 break;
1207 default:
1208 m_ErrorHelper = "Unknown error: ";
1209 m_ErrorHelper.append(str: std::to_string(val: CallResult));
1210 pCriticalError = m_ErrorHelper.c_str();
1211 log_error("gfx/vulkan", "%s", pCriticalError);
1212 break;
1213 }
1214
1215 return pCriticalError;
1216 }
1217
1218 void ErroneousCleanup() override
1219 {
1220 CleanupVulkanSDL();
1221 }
1222
1223 /************************
1224 * COMMAND CALLBACKS
1225 ************************/
1226
1227 size_t CommandBufferCMDOff(CCommandBuffer::ECommandBufferCMD CommandBufferCMD)
1228 {
1229 return (size_t)CommandBufferCMD - CCommandBuffer::CMD_FIRST;
1230 }
1231
1232 void RegisterCommands()
1233 {
1234 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXTURE_CREATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Texture_Create(pCommand: static_cast<const CCommandBuffer::SCommand_Texture_Create *>(pBaseCommand)); }};
1235 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXTURE_DESTROY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Texture_Destroy(pCommand: static_cast<const CCommandBuffer::SCommand_Texture_Destroy *>(pBaseCommand)); }};
1236 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURES_CREATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTextures_Create(pCommand: static_cast<const CCommandBuffer::SCommand_TextTextures_Create *>(pBaseCommand)); }};
1237 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURES_DESTROY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTextures_Destroy(pCommand: static_cast<const CCommandBuffer::SCommand_TextTextures_Destroy *>(pBaseCommand)); }};
1238 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURE_UPDATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTexture_Update(pCommand: static_cast<const CCommandBuffer::SCommand_TextTexture_Update *>(pBaseCommand)); }};
1239
1240 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CLEAR)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Clear_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Clear *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Clear(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Clear *>(pBaseCommand)); }};
1241 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Render_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Render *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Render(pCommand: static_cast<const CCommandBuffer::SCommand_Render *>(pBaseCommand), ExecBuffer); }};
1242 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TEX3D)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderTex3D_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderTex3D *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderTex3D(pCommand: static_cast<const CCommandBuffer::SCommand_RenderTex3D *>(pBaseCommand), ExecBuffer); }};
1243
1244 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CREATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CreateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_CreateBufferObject *>(pBaseCommand)); }};
1245 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RECREATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RecreateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_RecreateBufferObject *>(pBaseCommand)); }};
1246 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_UpdateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_UpdateBufferObject *>(pBaseCommand)); }};
1247 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_COPY_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CopyBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_CopyBufferObject *>(pBaseCommand)); }};
1248 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_DELETE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_DeleteBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_DeleteBufferObject *>(pBaseCommand)); }};
1249
1250 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CREATE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CreateBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_CreateBufferContainer *>(pBaseCommand)); }};
1251 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_DELETE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_DeleteBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_DeleteBufferContainer *>(pBaseCommand)); }};
1252 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_UpdateBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_UpdateBufferContainer *>(pBaseCommand)); }};
1253
1254 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_INDICES_REQUIRED_NUM_NOTIFY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_IndicesRequiredNumNotify(pCommand: static_cast<const CCommandBuffer::SCommand_IndicesRequiredNumNotify *>(pBaseCommand)); }};
1255
1256 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TILE_LAYER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderTileLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderTileLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderTileLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderTileLayer *>(pBaseCommand), ExecBuffer); }};
1257 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_BORDER_TILE)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderBorderTile_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderBorderTile *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderBorderTile(pCommand: static_cast<const CCommandBuffer::SCommand_RenderBorderTile *>(pBaseCommand), ExecBuffer); }};
1258 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_LAYER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand), ExecBuffer, Grouped: false); }};
1259 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_LAYER_GROUPED)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand), ExecBuffer, Grouped: true); }};
1260 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TEXT)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderText_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderText *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderText(pCommand: static_cast<const CCommandBuffer::SCommand_RenderText *>(pBaseCommand), ExecBuffer); }};
1261 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainer *>(pBaseCommand), ExecBuffer); }};
1262 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER_EX)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainerEx_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerEx *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainerEx(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerEx *>(pBaseCommand), ExecBuffer); }};
1263 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER_SPRITE_MULTIPLE)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainerAsSpriteMultiple_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainerAsSpriteMultiple(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *>(pBaseCommand), ExecBuffer); }};
1264
1265 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_SWAP)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Swap(pCommand: static_cast<const CCommandBuffer::SCommand_Swap *>(pBaseCommand)); }};
1266
1267 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_VSYNC)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_VSync(pCommand: static_cast<const CCommandBuffer::SCommand_VSync *>(pBaseCommand)); }};
1268 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_MULTISAMPLING)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_MultiSampling(pCommand: static_cast<const CCommandBuffer::SCommand_MultiSampling *>(pBaseCommand)); }};
1269 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TRY_SWAP_AND_READ_PIXEL)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_ReadPixel(pCommand: static_cast<const CCommandBuffer::SCommand_TrySwapAndReadPixel *>(pBaseCommand)); }};
1270 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TRY_SWAP_AND_SCREENSHOT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Screenshot(pCommand: static_cast<const CCommandBuffer::SCommand_TrySwapAndScreenshot *>(pBaseCommand)); }};
1271
1272 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_VIEWPORT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Update_Viewport_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Update_Viewport *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Update_Viewport(pCommand: static_cast<const CCommandBuffer::SCommand_Update_Viewport *>(pBaseCommand)); }};
1273
1274 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_WINDOW_CREATE_NTF)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_WindowCreateNtf(pCommand: static_cast<const CCommandBuffer::SCommand_WindowCreateNtf *>(pBaseCommand)); }, .m_CMDIsHandled: false};
1275 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_WINDOW_DESTROY_NTF)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_WindowDestroyNtf(pCommand: static_cast<const CCommandBuffer::SCommand_WindowDestroyNtf *>(pBaseCommand)); }, .m_CMDIsHandled: false};
1276
1277 for(auto &Callback : m_aCommandCallbacks)
1278 {
1279 if(!(bool)Callback.m_CommandCB)
1280 Callback = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return true; }};
1281 }
1282 }
1283
1284 /*****************************
1285 * VIDEO AND SCREENSHOT HELPER
1286 ******************************/
1287
1288 [[nodiscard]] bool PreparePresentedImageDataImage(uint8_t *&pResImageData, uint32_t Width, uint32_t Height)
1289 {
1290 bool NeedsNewImg = Width != m_GetPresentedImgDataHelperWidth || Height != m_GetPresentedImgDataHelperHeight;
1291 if(m_GetPresentedImgDataHelperImage == VK_NULL_HANDLE || NeedsNewImg)
1292 {
1293 if(m_GetPresentedImgDataHelperImage != VK_NULL_HANDLE)
1294 {
1295 DeletePresentedImageDataImage();
1296 }
1297 m_GetPresentedImgDataHelperWidth = Width;
1298 m_GetPresentedImgDataHelperHeight = Height;
1299
1300 VkImageCreateInfo ImageInfo{};
1301 ImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
1302 ImageInfo.imageType = VK_IMAGE_TYPE_2D;
1303 ImageInfo.extent.width = Width;
1304 ImageInfo.extent.height = Height;
1305 ImageInfo.extent.depth = 1;
1306 ImageInfo.mipLevels = 1;
1307 ImageInfo.arrayLayers = 1;
1308 ImageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1309 ImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
1310 ImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1311 ImageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1312 ImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1313 ImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1314
1315 vkCreateImage(device: m_VKDevice, pCreateInfo: &ImageInfo, pAllocator: nullptr, pImage: &m_GetPresentedImgDataHelperImage);
1316 // Create memory to back up the image
1317 VkMemoryRequirements MemRequirements;
1318 vkGetImageMemoryRequirements(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pMemoryRequirements: &MemRequirements);
1319
1320 VkMemoryAllocateInfo MemAllocInfo{};
1321 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1322 MemAllocInfo.allocationSize = MemRequirements.size;
1323 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: MemRequirements.memoryTypeBits, Properties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
1324
1325 vkAllocateMemory(device: m_VKDevice, pAllocateInfo: &MemAllocInfo, pAllocator: nullptr, pMemory: &m_GetPresentedImgDataHelperMem.m_Mem);
1326 vkBindImageMemory(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, memory: m_GetPresentedImgDataHelperMem.m_Mem, memoryOffset: 0);
1327
1328 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_UNDEFINED, NewLayout: VK_IMAGE_LAYOUT_GENERAL))
1329 return false;
1330
1331 VkImageSubresource SubResource{.aspectMask: VK_IMAGE_ASPECT_COLOR_BIT, .mipLevel: 0, .arrayLayer: 0};
1332 VkSubresourceLayout SubResourceLayout;
1333 vkGetImageSubresourceLayout(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pSubresource: &SubResource, pLayout: &SubResourceLayout);
1334
1335 if(vkMapMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: (void **)&m_pGetPresentedImgDataHelperMappedMemory) != VK_SUCCESS)
1336 return false;
1337 m_GetPresentedImgDataHelperMappedLayoutOffset = SubResourceLayout.offset;
1338 m_GetPresentedImgDataHelperMappedLayoutPitch = SubResourceLayout.rowPitch;
1339 m_pGetPresentedImgDataHelperMappedMemory += m_GetPresentedImgDataHelperMappedLayoutOffset;
1340
1341 VkFenceCreateInfo FenceInfo{};
1342 FenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1343 FenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
1344 vkCreateFence(device: m_VKDevice, pCreateInfo: &FenceInfo, pAllocator: nullptr, pFence: &m_GetPresentedImgDataHelperFence);
1345 }
1346 pResImageData = m_pGetPresentedImgDataHelperMappedMemory;
1347 return true;
1348 }
1349
1350 void DeletePresentedImageDataImage()
1351 {
1352 if(m_GetPresentedImgDataHelperImage != VK_NULL_HANDLE)
1353 {
1354 vkDestroyFence(device: m_VKDevice, fence: m_GetPresentedImgDataHelperFence, pAllocator: nullptr);
1355
1356 m_GetPresentedImgDataHelperFence = VK_NULL_HANDLE;
1357
1358 vkDestroyImage(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pAllocator: nullptr);
1359 vkUnmapMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem);
1360 vkFreeMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem, pAllocator: nullptr);
1361
1362 m_GetPresentedImgDataHelperImage = VK_NULL_HANDLE;
1363 m_GetPresentedImgDataHelperMem = {};
1364 m_pGetPresentedImgDataHelperMappedMemory = nullptr;
1365
1366 m_GetPresentedImgDataHelperWidth = 0;
1367 m_GetPresentedImgDataHelperHeight = 0;
1368 }
1369 }
1370
1371 [[nodiscard]] bool GetPresentedImageDataImpl(uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData, bool ResetAlpha, std::optional<ivec2> PixelOffset)
1372 {
1373 bool IsB8G8R8A8 = m_VKSurfFormat.format == VK_FORMAT_B8G8R8A8_UNORM;
1374 bool UsesRGBALikeFormat = m_VKSurfFormat.format == VK_FORMAT_R8G8B8A8_UNORM || IsB8G8R8A8;
1375 if(UsesRGBALikeFormat && m_LastPresentedSwapChainImageIndex != std::numeric_limits<decltype(m_LastPresentedSwapChainImageIndex)>::max())
1376 {
1377 auto Viewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
1378 VkOffset3D SrcOffset;
1379 if(PixelOffset.has_value())
1380 {
1381 SrcOffset.x = PixelOffset.value().x;
1382 SrcOffset.y = PixelOffset.value().y;
1383 Width = 1;
1384 Height = 1;
1385 }
1386 else
1387 {
1388 SrcOffset.x = 0;
1389 SrcOffset.y = 0;
1390 Width = Viewport.width;
1391 Height = Viewport.height;
1392 }
1393 SrcOffset.z = 0;
1394 Format = CImageInfo::FORMAT_RGBA;
1395
1396 const size_t ImageTotalSize = (size_t)Width * Height * CImageInfo::PixelSize(Format);
1397
1398 uint8_t *pResImageData;
1399 if(!PreparePresentedImageDataImage(pResImageData, Width, Height))
1400 return false;
1401
1402 VkCommandBuffer *pCommandBuffer;
1403 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
1404 return false;
1405 VkCommandBuffer &CommandBuffer = *pCommandBuffer;
1406
1407 auto &SwapImg = m_vSwapChainImages[m_LastPresentedSwapChainImageIndex];
1408
1409 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_GENERAL, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
1410 return false;
1411 if(!ImageBarrier(Image: SwapImg, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: m_VKSurfFormat.format, OldLayout: VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL))
1412 return false;
1413
1414 // If source and destination support blit we'll blit as this also does automatic format conversion (e.g. from BGR to RGB)
1415 if(m_OptimalSwapChainImageBlitting && m_LinearRGBAImageBlitting)
1416 {
1417 VkOffset3D BlitSize;
1418 BlitSize.x = Width;
1419 BlitSize.y = Height;
1420 BlitSize.z = 1;
1421
1422 VkImageBlit ImageBlitRegion{};
1423 ImageBlitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1424 ImageBlitRegion.srcSubresource.layerCount = 1;
1425 ImageBlitRegion.srcOffsets[0] = SrcOffset;
1426 ImageBlitRegion.srcOffsets[1] = {.x: SrcOffset.x + BlitSize.x, .y: SrcOffset.y + BlitSize.y, .z: SrcOffset.z + BlitSize.z};
1427 ImageBlitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1428 ImageBlitRegion.dstSubresource.layerCount = 1;
1429 ImageBlitRegion.dstOffsets[1] = BlitSize;
1430
1431 // Issue the blit command
1432 vkCmdBlitImage(commandBuffer: CommandBuffer, srcImage: SwapImg, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1433 dstImage: m_GetPresentedImgDataHelperImage, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1434 regionCount: 1, pRegions: &ImageBlitRegion, filter: VK_FILTER_NEAREST);
1435
1436 // transformed to RGBA
1437 IsB8G8R8A8 = false;
1438 }
1439 else
1440 {
1441 // Otherwise use image copy (requires us to manually flip components)
1442 VkImageCopy ImageCopyRegion{};
1443 ImageCopyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1444 ImageCopyRegion.srcSubresource.layerCount = 1;
1445 ImageCopyRegion.srcOffset = SrcOffset;
1446 ImageCopyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1447 ImageCopyRegion.dstSubresource.layerCount = 1;
1448 ImageCopyRegion.extent.width = Width;
1449 ImageCopyRegion.extent.height = Height;
1450 ImageCopyRegion.extent.depth = 1;
1451
1452 // Issue the copy command
1453 vkCmdCopyImage(commandBuffer: CommandBuffer, srcImage: SwapImg, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1454 dstImage: m_GetPresentedImgDataHelperImage, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1455 regionCount: 1, pRegions: &ImageCopyRegion);
1456 }
1457
1458 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_GENERAL))
1459 return false;
1460 if(!ImageBarrier(Image: SwapImg, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: m_VKSurfFormat.format, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
1461 return false;
1462
1463 vkEndCommandBuffer(commandBuffer: CommandBuffer);
1464 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
1465
1466 VkSubmitInfo SubmitInfo{};
1467 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1468 SubmitInfo.commandBufferCount = 1;
1469 SubmitInfo.pCommandBuffers = &CommandBuffer;
1470
1471 vkResetFences(device: m_VKDevice, fenceCount: 1, pFences: &m_GetPresentedImgDataHelperFence);
1472 vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, fence: m_GetPresentedImgDataHelperFence);
1473 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_GetPresentedImgDataHelperFence, VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
1474
1475 VkMappedMemoryRange MemRange{};
1476 MemRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1477 MemRange.memory = m_GetPresentedImgDataHelperMem.m_Mem;
1478 MemRange.offset = m_GetPresentedImgDataHelperMappedLayoutOffset;
1479 MemRange.size = VK_WHOLE_SIZE;
1480 vkInvalidateMappedMemoryRanges(device: m_VKDevice, memoryRangeCount: 1, pMemoryRanges: &MemRange);
1481
1482 size_t RealFullImageSize = maximum(a: ImageTotalSize, b: (size_t)(Height * m_GetPresentedImgDataHelperMappedLayoutPitch));
1483 size_t ExtraRowSize = Width * 4;
1484 if(vDstData.size() < RealFullImageSize + ExtraRowSize)
1485 vDstData.resize(new_size: RealFullImageSize + ExtraRowSize);
1486
1487 mem_copy(dest: vDstData.data(), source: pResImageData, size: RealFullImageSize);
1488
1489 // pack image data together without any offset that the driver might require
1490 if(Width * 4 < m_GetPresentedImgDataHelperMappedLayoutPitch)
1491 {
1492 for(uint32_t Y = 0; Y < Height; ++Y)
1493 {
1494 size_t OffsetImagePacked = (Y * Width * 4);
1495 size_t OffsetImageUnpacked = (Y * m_GetPresentedImgDataHelperMappedLayoutPitch);
1496 mem_copy(dest: vDstData.data() + RealFullImageSize, source: vDstData.data() + OffsetImageUnpacked, size: Width * 4);
1497 mem_copy(dest: vDstData.data() + OffsetImagePacked, source: vDstData.data() + RealFullImageSize, size: Width * 4);
1498 }
1499 }
1500
1501 if(IsB8G8R8A8 || ResetAlpha)
1502 {
1503 // swizzle
1504 for(uint32_t Y = 0; Y < Height; ++Y)
1505 {
1506 for(uint32_t X = 0; X < Width; ++X)
1507 {
1508 size_t ImgOff = (Y * Width * 4) + (X * 4);
1509 if(IsB8G8R8A8)
1510 {
1511 std::swap(a&: vDstData[ImgOff], b&: vDstData[ImgOff + 2]);
1512 }
1513 vDstData[ImgOff + 3] = 255;
1514 }
1515 }
1516 }
1517
1518 return true;
1519 }
1520 else
1521 {
1522 if(!UsesRGBALikeFormat)
1523 {
1524 log_error("gfx/vulkan", "Swap chain image was not in an RGBA-like format.");
1525 }
1526 else
1527 {
1528 log_error("gfx/vulkan", "Swap chain image was not ready to be copied.");
1529 }
1530 return false;
1531 }
1532 }
1533
1534 [[nodiscard]] bool GetPresentedImageData(uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData) override
1535 {
1536 return GetPresentedImageDataImpl(Width, Height, Format, vDstData, ResetAlpha: false, PixelOffset: {});
1537 }
1538
1539 /************************
1540 * MEMORY MANAGEMENT
1541 ************************/
1542
1543 [[nodiscard]] bool AllocateVulkanMemory(const VkMemoryAllocateInfo *pAllocateInfo, VkDeviceMemory *pMemory)
1544 {
1545 VkResult Res = vkAllocateMemory(device: m_VKDevice, pAllocateInfo, pAllocator: nullptr, pMemory);
1546 if(Res != VK_SUCCESS)
1547 {
1548 log_warn("gfx/vulkan", "Memory allocation failed, trying to recover.");
1549 if(Res == VK_ERROR_OUT_OF_HOST_MEMORY || Res == VK_ERROR_OUT_OF_DEVICE_MEMORY)
1550 {
1551 // aggressively try to get more memory
1552 vkDeviceWaitIdle(device: m_VKDevice);
1553 for(size_t i = 0; i < m_SwapChainImageCount + 1; ++i)
1554 {
1555 if(!NextFrame())
1556 return false;
1557 }
1558 Res = vkAllocateMemory(device: m_VKDevice, pAllocateInfo, pAllocator: nullptr, pMemory);
1559 }
1560 if(Res != VK_SUCCESS)
1561 {
1562 log_error("gfx/vulkan", "Memory allocation and recovery failed.");
1563 return false;
1564 }
1565 }
1566 return true;
1567 }
1568
1569 [[nodiscard]] bool GetBufferImpl(VkDeviceSize RequiredSize, EMemoryBlockUsage MemUsage, VkBuffer &Buffer, SDeviceMemoryBlock &BufferMemory, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags BufferProperties)
1570 {
1571 return CreateBuffer(BufferSize: RequiredSize, MemUsage, BufferUsage, MemoryProperties: BufferProperties, VKBuffer&: Buffer, VKBufferMemory&: BufferMemory);
1572 }
1573
1574 template<size_t Id,
1575 int64_t MemoryBlockSize, size_t BlockCount,
1576 bool RequiresMapping>
1577 [[nodiscard]] bool GetBufferBlockImpl(SMemoryBlock<Id> &RetBlock, SMemoryBlockCache<Id> &MemoryCache, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags BufferProperties, const void *pBufferData, VkDeviceSize RequiredSize, VkDeviceSize TargetAlignment)
1578 {
1579 bool Res = true;
1580
1581 auto &&CreateCacheBlock = [&]() -> bool {
1582 bool FoundAllocation = false;
1583 SMemoryHeap::SMemoryHeapQueueElement AllocatedMem;
1584 SDeviceMemoryBlock TmpBufferMemory;
1585 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pCacheHeap = nullptr;
1586 auto &Heaps = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps;
1587 for(size_t i = 0; i < Heaps.size(); ++i)
1588 {
1589 auto *pHeap = Heaps[i];
1590 if(pHeap->m_Heap.Allocate(RequiredSize, TargetAlignment, AllocatedMem))
1591 {
1592 TmpBufferMemory = pHeap->m_BufferMem;
1593 FoundAllocation = true;
1594 pCacheHeap = pHeap;
1595 break;
1596 }
1597 }
1598 if(!FoundAllocation)
1599 {
1600 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pNewHeap = new typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap();
1601
1602 VkBuffer TmpBuffer;
1603 if(!GetBufferImpl(RequiredSize: MemoryBlockSize * BlockCount, MemUsage: RequiresMapping ? MEMORY_BLOCK_USAGE_STAGING : MEMORY_BLOCK_USAGE_BUFFER, Buffer&: TmpBuffer, BufferMemory&: TmpBufferMemory, BufferUsage, BufferProperties))
1604 {
1605 delete pNewHeap;
1606 return false;
1607 }
1608
1609 void *pMapData = nullptr;
1610
1611 if(RequiresMapping)
1612 {
1613 if(vkMapMemory(device: m_VKDevice, memory: TmpBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMapData) != VK_SUCCESS)
1614 {
1615 SetError(ErrType: RequiresMapping ? EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_STAGING : EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Failed to map buffer block memory.");
1616 delete pNewHeap;
1617 return false;
1618 }
1619 }
1620
1621 pNewHeap->m_Buffer = TmpBuffer;
1622
1623 pNewHeap->m_BufferMem = TmpBufferMemory;
1624 pNewHeap->m_pMappedBuffer = pMapData;
1625
1626 pCacheHeap = pNewHeap;
1627 Heaps.emplace_back(pNewHeap);
1628 Heaps.back()->m_Heap.Init(MemoryBlockSize * BlockCount, 0);
1629 if(!Heaps.back()->m_Heap.Allocate(RequiredSize, TargetAlignment, AllocatedMem))
1630 {
1631 SetError(ErrType: RequiresMapping ? EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_STAGING : EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Heap allocation failed directly after creating fresh heap.");
1632 return false;
1633 }
1634 }
1635
1636 RetBlock.m_Buffer = pCacheHeap->m_Buffer;
1637 RetBlock.m_BufferMem = TmpBufferMemory;
1638 if(RequiresMapping)
1639 RetBlock.m_pMappedBuffer = ((uint8_t *)pCacheHeap->m_pMappedBuffer) + AllocatedMem.m_OffsetToAlign;
1640 else
1641 RetBlock.m_pMappedBuffer = nullptr;
1642 RetBlock.m_IsCached = true;
1643 RetBlock.m_pHeap = &pCacheHeap->m_Heap;
1644 RetBlock.m_HeapData = AllocatedMem;
1645 RetBlock.m_UsedSize = RequiredSize;
1646
1647 if(RequiresMapping)
1648 mem_copy(RetBlock.m_pMappedBuffer, pBufferData, RequiredSize);
1649
1650 return true;
1651 };
1652
1653 if(RequiredSize < (VkDeviceSize)MemoryBlockSize)
1654 {
1655 Res = CreateCacheBlock();
1656 }
1657 else
1658 {
1659 VkBuffer TmpBuffer;
1660 SDeviceMemoryBlock TmpBufferMemory;
1661 if(!GetBufferImpl(RequiredSize, MemUsage: RequiresMapping ? MEMORY_BLOCK_USAGE_STAGING : MEMORY_BLOCK_USAGE_BUFFER, Buffer&: TmpBuffer, BufferMemory&: TmpBufferMemory, BufferUsage, BufferProperties))
1662 return false;
1663
1664 void *pMapData = nullptr;
1665 if(RequiresMapping)
1666 {
1667 if(vkMapMemory(device: m_VKDevice, memory: TmpBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMapData) != VK_SUCCESS)
1668 return false;
1669 mem_copy(dest: pMapData, source: pBufferData, size: static_cast<size_t>(RequiredSize));
1670 }
1671
1672 RetBlock.m_Buffer = TmpBuffer;
1673 RetBlock.m_BufferMem = TmpBufferMemory;
1674 RetBlock.m_pMappedBuffer = pMapData;
1675 RetBlock.m_pHeap = nullptr;
1676 RetBlock.m_IsCached = false;
1677 RetBlock.m_HeapData.m_OffsetToAlign = 0;
1678 RetBlock.m_HeapData.m_AllocationSize = RequiredSize;
1679 RetBlock.m_UsedSize = RequiredSize;
1680 }
1681
1682 return Res;
1683 }
1684
1685 [[nodiscard]] bool GetStagingBuffer(SMemoryBlock<STAGING_BUFFER_CACHE_ID> &ResBlock, const void *pBufferData, VkDeviceSize RequiredSize)
1686 {
1687 return GetBufferBlockImpl<STAGING_BUFFER_CACHE_ID, 8 * 1024 * 1024, 3, true>(RetBlock&: ResBlock, MemoryCache&: m_StagingBufferCache, BufferUsage: VK_BUFFER_USAGE_TRANSFER_SRC_BIT, BufferProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, pBufferData, RequiredSize, TargetAlignment: maximum<VkDeviceSize>(a: m_NonCoherentMemAlignment, b: 16));
1688 }
1689
1690 [[nodiscard]] bool GetStagingBufferImage(SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> &ResBlock, const void *pBufferData, VkDeviceSize RequiredSize)
1691 {
1692 return GetBufferBlockImpl<STAGING_BUFFER_IMAGE_CACHE_ID, 8 * 1024 * 1024, 3, true>(RetBlock&: ResBlock, MemoryCache&: m_StagingBufferCacheImage, BufferUsage: VK_BUFFER_USAGE_TRANSFER_SRC_BIT, BufferProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, pBufferData, RequiredSize, TargetAlignment: maximum<VkDeviceSize>(a: m_OptimalImageCopyMemAlignment, b: maximum<VkDeviceSize>(a: m_NonCoherentMemAlignment, b: 16)));
1693 }
1694
1695 template<size_t Id>
1696 void PrepareStagingMemRange(SMemoryBlock<Id> &Block)
1697 {
1698 VkMappedMemoryRange UploadRange{};
1699 UploadRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1700 UploadRange.memory = Block.m_BufferMem.m_Mem;
1701 UploadRange.offset = Block.m_HeapData.m_OffsetToAlign;
1702
1703 auto AlignmentMod = ((VkDeviceSize)Block.m_HeapData.m_AllocationSize % m_NonCoherentMemAlignment);
1704 auto AlignmentReq = (m_NonCoherentMemAlignment - AlignmentMod);
1705 if(AlignmentMod == 0)
1706 AlignmentReq = 0;
1707 UploadRange.size = Block.m_HeapData.m_AllocationSize + AlignmentReq;
1708
1709 if(UploadRange.offset + UploadRange.size > Block.m_BufferMem.m_Size)
1710 UploadRange.size = VK_WHOLE_SIZE;
1711
1712 m_vNonFlushedStagingBufferRange.push_back(x: UploadRange);
1713 }
1714
1715 void UploadAndFreeStagingMemBlock(SMemoryBlock<STAGING_BUFFER_CACHE_ID> &Block)
1716 {
1717 PrepareStagingMemRange(Block);
1718 if(!Block.m_IsCached)
1719 {
1720 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: Block.m_pMappedBuffer});
1721 }
1722 else
1723 {
1724 m_StagingBufferCache.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1725 }
1726 }
1727
1728 void UploadAndFreeStagingImageMemBlock(SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> &Block)
1729 {
1730 PrepareStagingMemRange(Block);
1731 if(!Block.m_IsCached)
1732 {
1733 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: Block.m_pMappedBuffer});
1734 }
1735 else
1736 {
1737 m_StagingBufferCacheImage.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1738 }
1739 }
1740
1741 [[nodiscard]] bool GetVertexBuffer(SMemoryBlock<VERTEX_BUFFER_CACHE_ID> &ResBlock, VkDeviceSize RequiredSize)
1742 {
1743 return GetBufferBlockImpl<VERTEX_BUFFER_CACHE_ID, 8 * 1024 * 1024, 3, false>(RetBlock&: ResBlock, MemoryCache&: m_VertexBufferCache, BufferUsage: VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, BufferProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, pBufferData: nullptr, RequiredSize, TargetAlignment: 16);
1744 }
1745
1746 void FreeVertexMemBlock(SMemoryBlock<VERTEX_BUFFER_CACHE_ID> &Block)
1747 {
1748 if(!Block.m_IsCached)
1749 {
1750 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: nullptr});
1751 }
1752 else
1753 {
1754 m_VertexBufferCache.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1755 }
1756 }
1757
1758 static size_t ImageMipLevelCount(size_t Width, size_t Height, size_t Depth)
1759 {
1760 return std::floor(x: std::log2(x: maximum(a: Width, b: maximum(a: Height, b: Depth)))) + 1;
1761 }
1762
1763 static size_t ImageMipLevelCount(const VkExtent3D &ImgExtent)
1764 {
1765 return ImageMipLevelCount(Width: ImgExtent.width, Height: ImgExtent.height, Depth: ImgExtent.depth);
1766 }
1767
1768 // good approximation of 1024x1024 image with mipmaps
1769 static constexpr int64_t IMAGE_SIZE_1024X1024_APPROXIMATION = (1024 * 1024 * 4) * 2;
1770
1771 [[nodiscard]] bool GetImageMemoryImpl(VkDeviceSize RequiredSize, uint32_t RequiredMemoryTypeBits, SDeviceMemoryBlock &BufferMemory, VkMemoryPropertyFlags BufferProperties)
1772 {
1773 VkMemoryAllocateInfo MemAllocInfo{};
1774 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1775 MemAllocInfo.allocationSize = RequiredSize;
1776 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: RequiredMemoryTypeBits, Properties: BufferProperties);
1777
1778 BufferMemory.m_Size = RequiredSize;
1779 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) + RequiredSize, m: std::memory_order_relaxed);
1780
1781 if(IsVerbose())
1782 {
1783 VerboseAllocatedMemory(Size: RequiredSize, FrameImageIndex: m_CurImageIndex, MemUsage: MEMORY_BLOCK_USAGE_TEXTURE);
1784 }
1785
1786 if(!AllocateVulkanMemory(pAllocateInfo: &MemAllocInfo, pMemory: &BufferMemory.m_Mem))
1787 {
1788 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_IMAGE, pErr: "Allocation for image memory failed.");
1789 return false;
1790 }
1791
1792 BufferMemory.m_UsageType = MEMORY_BLOCK_USAGE_TEXTURE;
1793
1794 return true;
1795 }
1796
1797 template<size_t Id,
1798 int64_t MemoryBlockSize, size_t BlockCount>
1799 [[nodiscard]] bool GetImageMemoryBlockImpl(SMemoryImageBlock<Id> &RetBlock, SMemoryBlockCache<Id> &MemoryCache, VkMemoryPropertyFlags BufferProperties, VkDeviceSize RequiredSize, VkDeviceSize RequiredAlignment, uint32_t RequiredMemoryTypeBits)
1800 {
1801 auto &&CreateCacheBlock = [&]() -> bool {
1802 bool FoundAllocation = false;
1803 SMemoryHeap::SMemoryHeapQueueElement AllocatedMem;
1804 SDeviceMemoryBlock TmpBufferMemory;
1805 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pCacheHeap = nullptr;
1806 for(size_t i = 0; i < MemoryCache.m_MemoryCaches.m_vpMemoryHeaps.size(); ++i)
1807 {
1808 auto *pHeap = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps[i];
1809 if(pHeap->m_Heap.Allocate(RequiredSize, RequiredAlignment, AllocatedMem))
1810 {
1811 TmpBufferMemory = pHeap->m_BufferMem;
1812 FoundAllocation = true;
1813 pCacheHeap = pHeap;
1814 break;
1815 }
1816 }
1817 if(!FoundAllocation)
1818 {
1819 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pNewHeap = new typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap();
1820
1821 if(!GetImageMemoryImpl(RequiredSize: MemoryBlockSize * BlockCount, RequiredMemoryTypeBits, BufferMemory&: TmpBufferMemory, BufferProperties))
1822 {
1823 delete pNewHeap;
1824 return false;
1825 }
1826
1827 pNewHeap->m_Buffer = VK_NULL_HANDLE;
1828
1829 pNewHeap->m_BufferMem = TmpBufferMemory;
1830 pNewHeap->m_pMappedBuffer = nullptr;
1831
1832 auto &Heaps = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps;
1833 pCacheHeap = pNewHeap;
1834 Heaps.emplace_back(pNewHeap);
1835 Heaps.back()->m_Heap.Init(MemoryBlockSize * BlockCount, 0);
1836 if(!Heaps.back()->m_Heap.Allocate(RequiredSize, RequiredAlignment, AllocatedMem))
1837 {
1838 dbg_assert_failed("Heap allocation failed directly after creating fresh heap for image");
1839 }
1840 }
1841
1842 RetBlock.m_Buffer = VK_NULL_HANDLE;
1843 RetBlock.m_BufferMem = TmpBufferMemory;
1844 RetBlock.m_pMappedBuffer = nullptr;
1845 RetBlock.m_IsCached = true;
1846 RetBlock.m_pHeap = &pCacheHeap->m_Heap;
1847 RetBlock.m_HeapData = AllocatedMem;
1848 RetBlock.m_UsedSize = RequiredSize;
1849
1850 return true;
1851 };
1852
1853 if(RequiredSize < (VkDeviceSize)MemoryBlockSize)
1854 {
1855 if(!CreateCacheBlock())
1856 return false;
1857 }
1858 else
1859 {
1860 SDeviceMemoryBlock TmpBufferMemory;
1861 if(!GetImageMemoryImpl(RequiredSize, RequiredMemoryTypeBits, BufferMemory&: TmpBufferMemory, BufferProperties))
1862 return false;
1863
1864 RetBlock.m_Buffer = VK_NULL_HANDLE;
1865 RetBlock.m_BufferMem = TmpBufferMemory;
1866 RetBlock.m_pMappedBuffer = nullptr;
1867 RetBlock.m_IsCached = false;
1868 RetBlock.m_pHeap = nullptr;
1869 RetBlock.m_HeapData.m_OffsetToAlign = 0;
1870 RetBlock.m_HeapData.m_AllocationSize = RequiredSize;
1871 RetBlock.m_UsedSize = RequiredSize;
1872 }
1873
1874 RetBlock.m_ImageMemoryBits = RequiredMemoryTypeBits;
1875
1876 return true;
1877 }
1878
1879 [[nodiscard]] bool GetImageMemory(SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &RetBlock, VkDeviceSize RequiredSize, VkDeviceSize RequiredAlignment, uint32_t RequiredMemoryTypeBits)
1880 {
1881 auto BufferCacheIterator = m_ImageBufferCaches.find(x: RequiredMemoryTypeBits);
1882 if(BufferCacheIterator == m_ImageBufferCaches.end())
1883 {
1884 BufferCacheIterator = m_ImageBufferCaches.insert(x: {RequiredMemoryTypeBits, {}}).first;
1885
1886 BufferCacheIterator->second.Init(SwapChainImageCount: m_SwapChainImageCount);
1887 }
1888 return GetImageMemoryBlockImpl<IMAGE_BUFFER_CACHE_ID, IMAGE_SIZE_1024X1024_APPROXIMATION, 2>(RetBlock, MemoryCache&: BufferCacheIterator->second, BufferProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, RequiredSize, RequiredAlignment, RequiredMemoryTypeBits);
1889 }
1890
1891 void FreeImageMemBlock(SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &Block)
1892 {
1893 if(!Block.m_IsCached)
1894 {
1895 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: nullptr});
1896 }
1897 else
1898 {
1899 m_ImageBufferCaches[Block.m_ImageMemoryBits].FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1900 }
1901 }
1902
1903 template<bool FlushForRendering, typename TName>
1904 void UploadStreamedBuffer(SStreamMemory<TName> &StreamedBuffer)
1905 {
1906 size_t RangeUpdateCount = 0;
1907 if(StreamedBuffer.IsUsed(m_CurImageIndex))
1908 {
1909 for(size_t i = 0; i < StreamedBuffer.GetUsedCount(m_CurImageIndex); ++i)
1910 {
1911 auto &BufferOfFrame = StreamedBuffer.GetBuffers(m_CurImageIndex)[i];
1912 auto &MemRange = StreamedBuffer.GetRanges(m_CurImageIndex)[RangeUpdateCount++];
1913 MemRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1914 MemRange.memory = BufferOfFrame.m_BufferMem.m_Mem;
1915 MemRange.offset = BufferOfFrame.m_OffsetInBuffer;
1916 auto AlignmentMod = ((VkDeviceSize)BufferOfFrame.m_UsedSize % m_NonCoherentMemAlignment);
1917 auto AlignmentReq = (m_NonCoherentMemAlignment - AlignmentMod);
1918 if(AlignmentMod == 0)
1919 AlignmentReq = 0;
1920 MemRange.size = BufferOfFrame.m_UsedSize + AlignmentReq;
1921
1922 if(MemRange.offset + MemRange.size > BufferOfFrame.m_BufferMem.m_Size)
1923 MemRange.size = VK_WHOLE_SIZE;
1924
1925 BufferOfFrame.m_UsedSize = 0;
1926 }
1927 if(RangeUpdateCount > 0 && FlushForRendering)
1928 {
1929 vkFlushMappedMemoryRanges(m_VKDevice, RangeUpdateCount, StreamedBuffer.GetRanges(m_CurImageIndex).data());
1930 }
1931 }
1932 StreamedBuffer.ResetFrame(m_CurImageIndex);
1933 }
1934
1935 void CleanBufferPair(size_t ImageIndex, VkBuffer &Buffer, SDeviceMemoryBlock &BufferMem)
1936 {
1937 bool IsBuffer = Buffer != VK_NULL_HANDLE;
1938 if(IsBuffer)
1939 {
1940 vkDestroyBuffer(device: m_VKDevice, buffer: Buffer, pAllocator: nullptr);
1941
1942 Buffer = VK_NULL_HANDLE;
1943 }
1944 if(BufferMem.m_Mem != VK_NULL_HANDLE)
1945 {
1946 vkFreeMemory(device: m_VKDevice, memory: BufferMem.m_Mem, pAllocator: nullptr);
1947 if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_BUFFER)
1948 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1949 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_TEXTURE)
1950 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1951 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_STREAM)
1952 m_pStreamMemoryUsage->store(i: m_pStreamMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1953 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_STAGING)
1954 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1955
1956 if(IsVerbose())
1957 {
1958 VerboseDeallocatedMemory(Size: BufferMem.m_Size, FrameImageIndex: ImageIndex, MemUsage: BufferMem.m_UsageType);
1959 }
1960
1961 BufferMem.m_Mem = VK_NULL_HANDLE;
1962 }
1963 }
1964
1965 void DestroyTexture(CTexture &Texture)
1966 {
1967 if(Texture.m_Img != VK_NULL_HANDLE)
1968 {
1969 FreeImageMemBlock(Block&: Texture.m_ImgMem);
1970 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img, pAllocator: nullptr);
1971
1972 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_ImgView, pAllocator: nullptr);
1973 }
1974
1975 if(Texture.m_Img3D != VK_NULL_HANDLE)
1976 {
1977 FreeImageMemBlock(Block&: Texture.m_Img3DMem);
1978 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img3D, pAllocator: nullptr);
1979
1980 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_Img3DView, pAllocator: nullptr);
1981 }
1982
1983 DestroyTexturedStandardDescriptorSets(Texture, DescrIndex: 0);
1984 DestroyTexturedStandardDescriptorSets(Texture, DescrIndex: 1);
1985
1986 DestroyTextured3DStandardDescriptorSets(Texture);
1987 }
1988
1989 void DestroyTextTexture(CTexture &Texture, CTexture &TextureOutline)
1990 {
1991 if(Texture.m_Img != VK_NULL_HANDLE)
1992 {
1993 FreeImageMemBlock(Block&: Texture.m_ImgMem);
1994 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img, pAllocator: nullptr);
1995
1996 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_ImgView, pAllocator: nullptr);
1997 }
1998
1999 if(TextureOutline.m_Img != VK_NULL_HANDLE)
2000 {
2001 FreeImageMemBlock(Block&: TextureOutline.m_ImgMem);
2002 vkDestroyImage(device: m_VKDevice, image: TextureOutline.m_Img, pAllocator: nullptr);
2003
2004 vkDestroyImageView(device: m_VKDevice, imageView: TextureOutline.m_ImgView, pAllocator: nullptr);
2005 }
2006
2007 DestroyTextDescriptorSets(Texture, TextureOutline);
2008 }
2009
2010 void ClearFrameData(size_t FrameImageIndex)
2011 {
2012 UploadStagingBuffers();
2013
2014 // clear pending buffers, that require deletion
2015 for(auto &BufferPair : m_vvFrameDelayedBufferCleanup[FrameImageIndex])
2016 {
2017 if(BufferPair.m_pMappedData != nullptr)
2018 {
2019 vkUnmapMemory(device: m_VKDevice, memory: BufferPair.m_Mem.m_Mem);
2020 }
2021 CleanBufferPair(ImageIndex: FrameImageIndex, Buffer&: BufferPair.m_Buffer, BufferMem&: BufferPair.m_Mem);
2022 }
2023 m_vvFrameDelayedBufferCleanup[FrameImageIndex].clear();
2024
2025 // clear pending textures, that require deletion
2026 for(auto &Texture : m_vvFrameDelayedTextureCleanup[FrameImageIndex])
2027 {
2028 DestroyTexture(Texture);
2029 }
2030 m_vvFrameDelayedTextureCleanup[FrameImageIndex].clear();
2031
2032 for(auto &TexturePair : m_vvFrameDelayedTextTexturesCleanup[FrameImageIndex])
2033 {
2034 DestroyTextTexture(Texture&: TexturePair.first, TextureOutline&: TexturePair.second);
2035 }
2036 m_vvFrameDelayedTextTexturesCleanup[FrameImageIndex].clear();
2037
2038 m_StagingBufferCache.Cleanup(ImgIndex: FrameImageIndex);
2039 m_StagingBufferCacheImage.Cleanup(ImgIndex: FrameImageIndex);
2040 m_VertexBufferCache.Cleanup(ImgIndex: FrameImageIndex);
2041 for(auto &ImageBufferCache : m_ImageBufferCaches)
2042 ImageBufferCache.second.Cleanup(ImgIndex: FrameImageIndex);
2043 }
2044
2045 void ShrinkUnusedCaches()
2046 {
2047 size_t FreedMemory = 0;
2048 FreedMemory += m_StagingBufferCache.Shrink(Device&: m_VKDevice);
2049 FreedMemory += m_StagingBufferCacheImage.Shrink(Device&: m_VKDevice);
2050 if(FreedMemory > 0)
2051 {
2052 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2053 if(IsVerbose())
2054 {
2055 log_debug("gfx/vulkan", "Deallocated chunks of memory with size %" PRIzu " from all frames (staging buffer).", FreedMemory);
2056 }
2057 }
2058 FreedMemory = 0;
2059 FreedMemory += m_VertexBufferCache.Shrink(Device&: m_VKDevice);
2060 if(FreedMemory > 0)
2061 {
2062 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2063 if(IsVerbose())
2064 {
2065 log_debug("gfx/vulkan", "Deallocated chunks of memory with size %" PRIzu " from all frames (buffer).", FreedMemory);
2066 }
2067 }
2068 FreedMemory = 0;
2069 for(auto &ImageBufferCache : m_ImageBufferCaches)
2070 FreedMemory += ImageBufferCache.second.Shrink(Device&: m_VKDevice);
2071 if(FreedMemory > 0)
2072 {
2073 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2074 if(IsVerbose())
2075 {
2076 log_debug("gfx/vulkan", "Deallocated chunks of memory with size %" PRIzu " from all frames (texture).", FreedMemory);
2077 }
2078 }
2079 }
2080
2081 [[nodiscard]] bool MemoryBarrier(VkBuffer Buffer, VkDeviceSize Offset, VkDeviceSize Size, VkAccessFlags BufferAccessType, bool BeforeCommand)
2082 {
2083 VkCommandBuffer *pMemCommandBuffer;
2084 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2085 return false;
2086 auto &MemCommandBuffer = *pMemCommandBuffer;
2087
2088 VkBufferMemoryBarrier Barrier{};
2089 Barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
2090 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2091 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2092 Barrier.buffer = Buffer;
2093 Barrier.offset = Offset;
2094 Barrier.size = Size;
2095
2096 VkPipelineStageFlags SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2097 VkPipelineStageFlags DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2098
2099 if(BeforeCommand)
2100 {
2101 Barrier.srcAccessMask = BufferAccessType;
2102 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2103
2104 SourceStage = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2105 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2106 }
2107 else
2108 {
2109 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2110 Barrier.dstAccessMask = BufferAccessType;
2111
2112 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2113 DestinationStage = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2114 }
2115
2116 vkCmdPipelineBarrier(
2117 commandBuffer: MemCommandBuffer,
2118 srcStageMask: SourceStage, dstStageMask: DestinationStage,
2119 dependencyFlags: 0,
2120 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2121 bufferMemoryBarrierCount: 1, pBufferMemoryBarriers: &Barrier,
2122 imageMemoryBarrierCount: 0, pImageMemoryBarriers: nullptr);
2123
2124 return true;
2125 }
2126
2127 /************************
2128 * SWAPPING MECHANISM
2129 ************************/
2130
2131 void StartRenderThread(size_t ThreadIndex)
2132 {
2133 auto &List = m_vvThreadCommandLists[ThreadIndex];
2134 if(!List.empty())
2135 {
2136 m_vThreadHelperHadCommands[ThreadIndex] = true;
2137 auto *pThread = m_vpRenderThreads[ThreadIndex].get();
2138 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
2139 pThread->m_IsRendering = true;
2140 pThread->m_Cond.notify_one();
2141 }
2142 }
2143
2144 void FinishRenderThreads()
2145 {
2146 if(m_ThreadCount > 1)
2147 {
2148 // execute threads
2149
2150 for(size_t ThreadIndex = 0; ThreadIndex < m_ThreadCount - 1; ++ThreadIndex)
2151 {
2152 if(!m_vThreadHelperHadCommands[ThreadIndex])
2153 {
2154 StartRenderThread(ThreadIndex);
2155 }
2156 }
2157
2158 for(size_t ThreadIndex = 0; ThreadIndex < m_ThreadCount - 1; ++ThreadIndex)
2159 {
2160 if(m_vThreadHelperHadCommands[ThreadIndex])
2161 {
2162 auto &pRenderThread = m_vpRenderThreads[ThreadIndex];
2163 m_vThreadHelperHadCommands[ThreadIndex] = false;
2164 std::unique_lock<std::mutex> Lock(pRenderThread->m_Mutex);
2165 pRenderThread->m_Cond.wait(lock&: Lock, p: [&pRenderThread] { return !pRenderThread->m_IsRendering; });
2166 m_vLastPipeline[ThreadIndex + 1] = VK_NULL_HANDLE;
2167 }
2168 }
2169 }
2170 }
2171
2172 void ExecuteMemoryCommandBuffer()
2173 {
2174 if(m_vUsedMemoryCommandBuffer[m_CurImageIndex])
2175 {
2176 auto &MemoryCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
2177 vkEndCommandBuffer(commandBuffer: MemoryCommandBuffer);
2178
2179 VkSubmitInfo SubmitInfo{};
2180 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2181
2182 SubmitInfo.commandBufferCount = 1;
2183 SubmitInfo.pCommandBuffers = &MemoryCommandBuffer;
2184 vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, VK_NULL_HANDLE);
2185 vkQueueWaitIdle(queue: m_VKGraphicsQueue);
2186
2187 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
2188 }
2189 }
2190
2191 void ClearFrameMemoryUsage()
2192 {
2193 ClearFrameData(FrameImageIndex: m_CurImageIndex);
2194 ShrinkUnusedCaches();
2195 }
2196
2197 [[nodiscard]] bool WaitFrame()
2198 {
2199 FinishRenderThreads();
2200 m_LastCommandsInPipeThreadIndex = 0;
2201
2202 UploadNonFlushedBuffers<true>();
2203
2204 auto &CommandBuffer = GetMainGraphicCommandBuffer();
2205
2206 // render threads
2207 if(m_ThreadCount > 1)
2208 {
2209 size_t ThreadedCommandsUsedCount = 0;
2210 size_t RenderThreadCount = m_ThreadCount - 1;
2211 for(size_t i = 0; i < RenderThreadCount; ++i)
2212 {
2213 if(m_vvUsedThreadDrawCommandBuffer[i + 1][m_CurImageIndex])
2214 {
2215 const auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[i + 1][m_CurImageIndex];
2216 m_vHelperThreadDrawCommandBuffers[ThreadedCommandsUsedCount++] = GraphicThreadCommandBuffer;
2217
2218 m_vvUsedThreadDrawCommandBuffer[i + 1][m_CurImageIndex] = false;
2219 }
2220 }
2221 if(ThreadedCommandsUsedCount > 0)
2222 {
2223 vkCmdExecuteCommands(commandBuffer: CommandBuffer, commandBufferCount: ThreadedCommandsUsedCount, pCommandBuffers: m_vHelperThreadDrawCommandBuffers.data());
2224 }
2225
2226 // special case if swap chain was not completed in one runbuffer call
2227
2228 if(m_vvUsedThreadDrawCommandBuffer[0][m_CurImageIndex])
2229 {
2230 auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[0][m_CurImageIndex];
2231 vkEndCommandBuffer(commandBuffer: GraphicThreadCommandBuffer);
2232
2233 vkCmdExecuteCommands(commandBuffer: CommandBuffer, commandBufferCount: 1, pCommandBuffers: &GraphicThreadCommandBuffer);
2234
2235 m_vvUsedThreadDrawCommandBuffer[0][m_CurImageIndex] = false;
2236 }
2237 }
2238
2239 vkCmdEndRenderPass(commandBuffer: CommandBuffer);
2240
2241 if(vkEndCommandBuffer(commandBuffer: CommandBuffer) != VK_SUCCESS)
2242 {
2243 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be ended anymore.");
2244 return false;
2245 }
2246
2247 VkSubmitInfo SubmitInfo{};
2248 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2249
2250 SubmitInfo.commandBufferCount = 1;
2251 SubmitInfo.pCommandBuffers = &CommandBuffer;
2252
2253 std::array<VkCommandBuffer, 2> aCommandBuffers = {};
2254
2255 if(m_vUsedMemoryCommandBuffer[m_CurImageIndex])
2256 {
2257 auto &MemoryCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
2258 vkEndCommandBuffer(commandBuffer: MemoryCommandBuffer);
2259
2260 aCommandBuffers[0] = MemoryCommandBuffer;
2261 aCommandBuffers[1] = CommandBuffer;
2262 SubmitInfo.commandBufferCount = 2;
2263 SubmitInfo.pCommandBuffers = aCommandBuffers.data();
2264
2265 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
2266 }
2267
2268 std::array<VkSemaphore, 1> aWaitSemaphores = {m_AcquireImageSemaphore};
2269 std::array<VkPipelineStageFlags, 1> aWaitStages = {(VkPipelineStageFlags)VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT};
2270 SubmitInfo.waitSemaphoreCount = aWaitSemaphores.size();
2271 SubmitInfo.pWaitSemaphores = aWaitSemaphores.data();
2272 SubmitInfo.pWaitDstStageMask = aWaitStages.data();
2273
2274 std::array<VkSemaphore, 1> aSignalSemaphores = {m_vQueueSubmitSemaphores[m_CurImageIndex]};
2275 SubmitInfo.signalSemaphoreCount = aSignalSemaphores.size();
2276 SubmitInfo.pSignalSemaphores = aSignalSemaphores.data();
2277
2278 vkResetFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[m_CurImageIndex]);
2279
2280 VkResult QueueSubmitRes = vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, fence: m_vQueueSubmitFences[m_CurImageIndex]);
2281 if(QueueSubmitRes != VK_SUCCESS)
2282 {
2283 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: QueueSubmitRes);
2284 if(pCritErrorMsg != nullptr)
2285 {
2286 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_SUBMIT_FAILED, pErr: "Submitting to graphics queue failed.", pErrStrExtra: pCritErrorMsg);
2287 return false;
2288 }
2289 }
2290
2291 std::swap(a&: m_vBusyAcquireImageSemaphores[m_CurImageIndex], b&: m_AcquireImageSemaphore);
2292
2293 VkPresentInfoKHR PresentInfo{};
2294 PresentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
2295
2296 PresentInfo.waitSemaphoreCount = aSignalSemaphores.size();
2297 PresentInfo.pWaitSemaphores = aSignalSemaphores.data();
2298
2299 std::array<VkSwapchainKHR, 1> aSwapChains = {m_VKSwapChain};
2300 PresentInfo.swapchainCount = aSwapChains.size();
2301 PresentInfo.pSwapchains = aSwapChains.data();
2302
2303 PresentInfo.pImageIndices = &m_CurImageIndex;
2304
2305 m_LastPresentedSwapChainImageIndex = m_CurImageIndex;
2306
2307 VkResult QueuePresentRes = vkQueuePresentKHR(queue: m_VKPresentQueue, pPresentInfo: &PresentInfo);
2308 if(QueuePresentRes != VK_SUCCESS && QueuePresentRes != VK_SUBOPTIMAL_KHR)
2309 {
2310 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: QueuePresentRes);
2311 if(pCritErrorMsg != nullptr)
2312 {
2313 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_SWAP_FAILED, pErr: "Presenting graphics queue failed.", pErrStrExtra: pCritErrorMsg);
2314 return false;
2315 }
2316 }
2317
2318 return true;
2319 }
2320
2321 [[nodiscard]] bool PrepareFrame()
2322 {
2323 if(m_RecreateSwapChain)
2324 {
2325 m_RecreateSwapChain = false;
2326 if(IsVerbose())
2327 {
2328 log_debug("gfx/vulkan", "Recreating swap chain requested by user (prepare frame).");
2329 }
2330 RecreateSwapChain();
2331 }
2332
2333 auto AcqResult = vkAcquireNextImageKHR(device: m_VKDevice, swapchain: m_VKSwapChain, timeout: std::numeric_limits<uint64_t>::max(), semaphore: m_AcquireImageSemaphore, VK_NULL_HANDLE, pImageIndex: &m_CurImageIndex);
2334 if(AcqResult != VK_SUCCESS)
2335 {
2336 if(AcqResult == VK_ERROR_OUT_OF_DATE_KHR || m_RecreateSwapChain)
2337 {
2338 m_RecreateSwapChain = false;
2339 if(IsVerbose())
2340 {
2341 log_debug("gfx/vulkan", "Recreating swap chain requested by acquire next image (prepare frame).");
2342 }
2343 RecreateSwapChain();
2344 return PrepareFrame();
2345 }
2346 else
2347 {
2348 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: AcqResult);
2349 if(pCritErrorMsg != nullptr)
2350 {
2351 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_SWAP_FAILED, pErr: "Acquiring next image failed.", pErrStrExtra: pCritErrorMsg);
2352 return false;
2353 }
2354 else if(AcqResult == VK_ERROR_SURFACE_LOST_KHR)
2355 {
2356 m_RenderingPaused = true;
2357 return true;
2358 }
2359 }
2360 }
2361
2362 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[m_CurImageIndex], VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
2363
2364 // next frame
2365 m_CurFrame++;
2366 m_vImageLastFrameCheck[m_CurImageIndex] = m_CurFrame;
2367
2368 // check if older frames weren't used in a long time
2369 for(size_t FrameImageIndex = 0; FrameImageIndex < m_vImageLastFrameCheck.size(); ++FrameImageIndex)
2370 {
2371 auto LastFrame = m_vImageLastFrameCheck[FrameImageIndex];
2372 if(m_CurFrame - LastFrame > (uint64_t)m_SwapChainImageCount)
2373 {
2374 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[FrameImageIndex], VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
2375 ClearFrameData(FrameImageIndex);
2376 m_vImageLastFrameCheck[FrameImageIndex] = m_CurFrame;
2377 }
2378 }
2379
2380 // clear frame's memory data
2381 ClearFrameMemoryUsage();
2382
2383 // clear frame
2384 vkResetCommandBuffer(commandBuffer: GetMainGraphicCommandBuffer(), flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
2385
2386 auto &CommandBuffer = GetMainGraphicCommandBuffer();
2387 VkCommandBufferBeginInfo BeginInfo{};
2388 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2389 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2390
2391 if(vkBeginCommandBuffer(commandBuffer: CommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
2392 {
2393 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be filled anymore.");
2394 return false;
2395 }
2396
2397 VkRenderPassBeginInfo RenderPassInfo{};
2398 RenderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
2399 RenderPassInfo.renderPass = m_VKRenderPass;
2400 RenderPassInfo.framebuffer = m_vFramebufferList[m_CurImageIndex];
2401 RenderPassInfo.renderArea.offset = {.x: 0, .y: 0};
2402 RenderPassInfo.renderArea.extent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
2403
2404 VkClearValue ClearColorVal = {.color: {.float32: {m_aClearColor[0], m_aClearColor[1], m_aClearColor[2], m_aClearColor[3]}}};
2405 RenderPassInfo.clearValueCount = 1;
2406 RenderPassInfo.pClearValues = &ClearColorVal;
2407
2408 vkCmdBeginRenderPass(commandBuffer: CommandBuffer, pRenderPassBegin: &RenderPassInfo, contents: m_ThreadCount > 1 ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS : VK_SUBPASS_CONTENTS_INLINE);
2409
2410 for(auto &LastPipe : m_vLastPipeline)
2411 LastPipe = VK_NULL_HANDLE;
2412
2413 return true;
2414 }
2415
2416 void UploadStagingBuffers()
2417 {
2418 if(!m_vNonFlushedStagingBufferRange.empty())
2419 {
2420 vkFlushMappedMemoryRanges(device: m_VKDevice, memoryRangeCount: m_vNonFlushedStagingBufferRange.size(), pMemoryRanges: m_vNonFlushedStagingBufferRange.data());
2421
2422 m_vNonFlushedStagingBufferRange.clear();
2423 }
2424 }
2425
2426 template<bool FlushForRendering>
2427 void UploadNonFlushedBuffers()
2428 {
2429 // streamed vertices
2430 for(auto &StreamVertexBuffer : m_vStreamedVertexBuffers)
2431 UploadStreamedBuffer<FlushForRendering>(StreamVertexBuffer);
2432 // now the buffer objects
2433 for(auto &StreamUniformBuffer : m_vStreamedUniformBuffers)
2434 UploadStreamedBuffer<FlushForRendering>(StreamUniformBuffer);
2435
2436 UploadStagingBuffers();
2437 }
2438
2439 [[nodiscard]] bool PureMemoryFrame()
2440 {
2441 ExecuteMemoryCommandBuffer();
2442
2443 // reset streamed data
2444 UploadNonFlushedBuffers<false>();
2445
2446 ClearFrameMemoryUsage();
2447
2448 return true;
2449 }
2450
2451 [[nodiscard]] bool NextFrame()
2452 {
2453 if(!m_RenderingPaused)
2454 {
2455 if(!WaitFrame())
2456 return false;
2457 if(!PrepareFrame())
2458 return false;
2459 }
2460 // else only execute the memory command buffer
2461 else
2462 {
2463 if(!PureMemoryFrame())
2464 return false;
2465 }
2466
2467 return true;
2468 }
2469
2470 /************************
2471 * TEXTURES
2472 ************************/
2473
2474 size_t VulkanFormatToPixelSize(VkFormat Format)
2475 {
2476 if(Format == VK_FORMAT_R8G8B8_UNORM)
2477 return 3;
2478 else if(Format == VK_FORMAT_R8G8B8A8_UNORM)
2479 return 4;
2480 else if(Format == VK_FORMAT_R8_UNORM)
2481 return 1;
2482 return 4;
2483 }
2484
2485 [[nodiscard]] bool UpdateTexture(size_t TextureSlot, VkFormat Format, uint8_t *&pData, int64_t XOff, int64_t YOff, size_t Width, size_t Height)
2486 {
2487 const size_t ImageSize = Width * Height * VulkanFormatToPixelSize(Format);
2488 SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> StagingBuffer;
2489 if(!GetStagingBufferImage(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: ImageSize))
2490 return false;
2491
2492 auto &Tex = m_vTextures[TextureSlot];
2493
2494 if(Tex.m_RescaleCount > 0)
2495 {
2496 for(uint32_t i = 0; i < Tex.m_RescaleCount; ++i)
2497 {
2498 Width >>= 1;
2499 Height >>= 1;
2500
2501 XOff /= 2;
2502 YOff /= 2;
2503 }
2504
2505 uint8_t *pTmpData = ResizeImage(pImageData: pData, Width, Height, NewWidth: Width, NewHeight: Height, BPP: VulkanFormatToPixelSize(Format));
2506 free(ptr: pData);
2507 pData = pTmpData;
2508 }
2509
2510 if(!ImageBarrier(Image: Tex.m_Img, MipMapBase: 0, MipMapCount: Tex.m_MipMapCount, LayerBase: 0, LayerCount: 1, Format, OldLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
2511 return false;
2512 if(!CopyBufferToImage(Buffer: StagingBuffer.m_Buffer, BufferOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, Image: Tex.m_Img, X: XOff, Y: YOff, Width, Height, Depth: 1))
2513 return false;
2514
2515 if(Tex.m_MipMapCount > 1)
2516 {
2517 if(!BuildMipmaps(Image: Tex.m_Img, ImageFormat: Format, Width, Height, Depth: 1, MipMapLevelCount: Tex.m_MipMapCount))
2518 return false;
2519 }
2520 else
2521 {
2522 if(!ImageBarrier(Image: Tex.m_Img, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
2523 return false;
2524 }
2525
2526 UploadAndFreeStagingImageMemBlock(Block&: StagingBuffer);
2527
2528 return true;
2529 }
2530
2531 [[nodiscard]] bool CreateTextureCMD(
2532 int Slot,
2533 int Width,
2534 int Height,
2535 VkFormat Format,
2536 VkFormat StoreFormat,
2537 int Flags,
2538 uint8_t *&pData)
2539 {
2540 size_t ImageIndex = (size_t)Slot;
2541 const size_t PixelSize = VulkanFormatToPixelSize(Format);
2542
2543 while(ImageIndex >= m_vTextures.size())
2544 {
2545 m_vTextures.resize(new_size: (m_vTextures.size() * 2) + 1);
2546 }
2547
2548 // resample if needed
2549 uint32_t RescaleCount = 0;
2550 if((size_t)Width > m_MaxTextureSize || (size_t)Height > m_MaxTextureSize)
2551 {
2552 do
2553 {
2554 Width >>= 1;
2555 Height >>= 1;
2556 ++RescaleCount;
2557 } while((size_t)Width > m_MaxTextureSize || (size_t)Height > m_MaxTextureSize);
2558
2559 uint8_t *pTmpData = ResizeImage(pImageData: pData, Width, Height, NewWidth: Width, NewHeight: Height, BPP: PixelSize);
2560 free(ptr: pData);
2561 pData = pTmpData;
2562 }
2563
2564 bool Requires2DTexture = (Flags & TextureFlag::NO_2D_TEXTURE) == 0;
2565 bool Requires2DTextureArray = (Flags & TextureFlag::TO_2D_ARRAY_TEXTURE) != 0;
2566 bool RequiresMipMaps = (Flags & TextureFlag::NO_MIPMAPS) == 0;
2567 size_t MipMapLevelCount = 1;
2568 if(RequiresMipMaps)
2569 {
2570 VkExtent3D ImgSize{.width: (uint32_t)Width, .height: (uint32_t)Height, .depth: 1};
2571 MipMapLevelCount = ImageMipLevelCount(ImgExtent: ImgSize);
2572 if(!m_OptimalRGBAImageBlitting)
2573 MipMapLevelCount = 1;
2574 }
2575
2576 CTexture &Texture = m_vTextures[ImageIndex];
2577
2578 Texture.m_Width = Width;
2579 Texture.m_Height = Height;
2580 Texture.m_RescaleCount = RescaleCount;
2581 Texture.m_MipMapCount = MipMapLevelCount;
2582
2583 if(Requires2DTexture)
2584 {
2585 if(!CreateTextureImage(ImageIndex, NewImage&: Texture.m_Img, NewImgMem&: Texture.m_ImgMem, pData, Format, Width, Height, Depth: 1, PixelSize, MipMapLevelCount))
2586 return false;
2587 VkFormat ImgFormat = Format;
2588 VkImageView ImgView = CreateTextureImageView(TexImage: Texture.m_Img, ImgFormat, ViewType: VK_IMAGE_VIEW_TYPE_2D, Depth: 1, MipMapLevelCount);
2589 Texture.m_ImgView = ImgView;
2590 VkSampler ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_REPEAT);
2591 Texture.m_aSamplers[0] = ImgSampler;
2592 ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE);
2593 Texture.m_aSamplers[1] = ImgSampler;
2594
2595 if(!CreateNewTexturedStandardDescriptorSets(TextureSlot: ImageIndex, DescrIndex: 0))
2596 return false;
2597 if(!CreateNewTexturedStandardDescriptorSets(TextureSlot: ImageIndex, DescrIndex: 1))
2598 return false;
2599 }
2600
2601 if(Requires2DTextureArray)
2602 {
2603 int Image3DWidth = Width;
2604 int Image3DHeight = Height;
2605
2606 int ConvertWidth = Width;
2607 int ConvertHeight = Height;
2608
2609 if(ConvertWidth == 0 || (ConvertWidth % 16) != 0 || ConvertHeight == 0 || (ConvertHeight % 16) != 0)
2610 {
2611 int NewWidth = maximum<int>(a: HighestBit(OfVar: ConvertWidth), b: 16);
2612 int NewHeight = maximum<int>(a: HighestBit(OfVar: ConvertHeight), b: 16);
2613 uint8_t *pNewTexData = ResizeImage(pImageData: pData, Width: ConvertWidth, Height: ConvertHeight, NewWidth, NewHeight, BPP: PixelSize);
2614 if(IsVerbose())
2615 {
2616 log_debug("gfx/vulkan", "3D/2D array texture was resized. Slot=%d Size=(%d, %d) Resized=(%d, %d)", Slot, ConvertWidth, ConvertHeight, NewWidth, NewHeight);
2617 }
2618
2619 ConvertWidth = NewWidth;
2620 ConvertHeight = NewHeight;
2621
2622 free(ptr: pData);
2623 pData = pNewTexData;
2624 }
2625
2626 bool Needs3DTexDel = false;
2627 uint8_t *pTexData3D = static_cast<uint8_t *>(malloc(size: (size_t)PixelSize * ConvertWidth * ConvertHeight));
2628 if(!Texture2DTo3D(pImageBuffer: pData, ImageWidth: ConvertWidth, ImageHeight: ConvertHeight, PixelSize, SplitCountWidth: 16, SplitCountHeight: 16, pTarget3DImageData: pTexData3D, Target3DImageWidth&: Image3DWidth, Target3DImageHeight&: Image3DHeight))
2629 {
2630 free(ptr: pTexData3D);
2631 pTexData3D = nullptr;
2632 }
2633 Needs3DTexDel = true;
2634
2635 if(pTexData3D != nullptr)
2636 {
2637 const size_t ImageDepth2DArray = (size_t)16 * 16;
2638 VkExtent3D ImgSize{.width: (uint32_t)Image3DWidth, .height: (uint32_t)Image3DHeight, .depth: 1};
2639 if(RequiresMipMaps)
2640 {
2641 MipMapLevelCount = ImageMipLevelCount(ImgExtent: ImgSize);
2642 if(!m_OptimalRGBAImageBlitting)
2643 MipMapLevelCount = 1;
2644 }
2645
2646 if(!CreateTextureImage(ImageIndex, NewImage&: Texture.m_Img3D, NewImgMem&: Texture.m_Img3DMem, pData: pTexData3D, Format, Width: Image3DWidth, Height: Image3DHeight, Depth: ImageDepth2DArray, PixelSize, MipMapLevelCount))
2647 return false;
2648 VkFormat ImgFormat = Format;
2649 VkImageView ImgView = CreateTextureImageView(TexImage: Texture.m_Img3D, ImgFormat, ViewType: VK_IMAGE_VIEW_TYPE_2D_ARRAY, Depth: ImageDepth2DArray, MipMapLevelCount);
2650 Texture.m_Img3DView = ImgView;
2651 VkSampler ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY);
2652 Texture.m_Sampler3D = ImgSampler;
2653
2654 if(!CreateNew3DTexturedStandardDescriptorSets(TextureSlot: ImageIndex))
2655 return false;
2656
2657 if(Needs3DTexDel)
2658 free(ptr: pTexData3D);
2659 }
2660 }
2661 return true;
2662 }
2663
2664 [[nodiscard]] bool BuildMipmaps(VkImage Image, VkFormat ImageFormat, size_t Width, size_t Height, size_t Depth, size_t MipMapLevelCount)
2665 {
2666 VkCommandBuffer *pMemCommandBuffer;
2667 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2668 return false;
2669 auto &MemCommandBuffer = *pMemCommandBuffer;
2670
2671 VkImageMemoryBarrier Barrier{};
2672 Barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2673 Barrier.image = Image;
2674 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2675 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2676 Barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2677 Barrier.subresourceRange.levelCount = 1;
2678 Barrier.subresourceRange.baseArrayLayer = 0;
2679 Barrier.subresourceRange.layerCount = Depth;
2680
2681 int32_t TmpMipWidth = (int32_t)Width;
2682 int32_t TmpMipHeight = (int32_t)Height;
2683
2684 for(size_t i = 1; i < MipMapLevelCount; ++i)
2685 {
2686 Barrier.subresourceRange.baseMipLevel = i - 1;
2687 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2688 Barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2689 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2690 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2691
2692 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer, srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dependencyFlags: 0, memoryBarrierCount: 0, pMemoryBarriers: nullptr, bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr, imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2693
2694 VkImageBlit Blit{};
2695 Blit.srcOffsets[0] = {.x: 0, .y: 0, .z: 0};
2696 Blit.srcOffsets[1] = {.x: TmpMipWidth, .y: TmpMipHeight, .z: 1};
2697 Blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2698 Blit.srcSubresource.mipLevel = i - 1;
2699 Blit.srcSubresource.baseArrayLayer = 0;
2700 Blit.srcSubresource.layerCount = Depth;
2701 Blit.dstOffsets[0] = {.x: 0, .y: 0, .z: 0};
2702 Blit.dstOffsets[1] = {.x: TmpMipWidth > 1 ? TmpMipWidth / 2 : 1, .y: TmpMipHeight > 1 ? TmpMipHeight / 2 : 1, .z: 1};
2703 Blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2704 Blit.dstSubresource.mipLevel = i;
2705 Blit.dstSubresource.baseArrayLayer = 0;
2706 Blit.dstSubresource.layerCount = Depth;
2707
2708 vkCmdBlitImage(commandBuffer: MemCommandBuffer,
2709 srcImage: Image, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2710 dstImage: Image, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2711 regionCount: 1, pRegions: &Blit,
2712 filter: m_AllowsLinearBlitting ? VK_FILTER_LINEAR : VK_FILTER_NEAREST);
2713
2714 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2715 Barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2716 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2717 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2718
2719 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer,
2720 srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, dependencyFlags: 0,
2721 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2722 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2723 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2724
2725 if(TmpMipWidth > 1)
2726 TmpMipWidth /= 2;
2727 if(TmpMipHeight > 1)
2728 TmpMipHeight /= 2;
2729 }
2730
2731 Barrier.subresourceRange.baseMipLevel = MipMapLevelCount - 1;
2732 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2733 Barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2734 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2735 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2736
2737 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer,
2738 srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, dependencyFlags: 0,
2739 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2740 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2741 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2742
2743 return true;
2744 }
2745
2746 [[nodiscard]] bool CreateTextureImage(size_t ImageIndex, VkImage &NewImage, SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &NewImgMem, const uint8_t *pData, VkFormat Format, size_t Width, size_t Height, size_t Depth, size_t PixelSize, size_t MipMapLevelCount)
2747 {
2748 VkDeviceSize ImageSize = Width * Height * Depth * PixelSize;
2749
2750 SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> StagingBuffer;
2751 if(!GetStagingBufferImage(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: ImageSize))
2752 return false;
2753
2754 VkFormat ImgFormat = Format;
2755
2756 if(!CreateImage(Width, Height, Depth, MipMapLevelCount, Format: ImgFormat, Tiling: VK_IMAGE_TILING_OPTIMAL, Image&: NewImage, ImageMemory&: NewImgMem))
2757 return false;
2758
2759 if(!ImageBarrier(Image: NewImage, MipMapBase: 0, MipMapCount: MipMapLevelCount, LayerBase: 0, LayerCount: Depth, Format: ImgFormat, OldLayout: VK_IMAGE_LAYOUT_UNDEFINED, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
2760 return false;
2761 if(!CopyBufferToImage(Buffer: StagingBuffer.m_Buffer, BufferOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, Image: NewImage, X: 0, Y: 0, Width: static_cast<uint32_t>(Width), Height: static_cast<uint32_t>(Height), Depth))
2762 return false;
2763
2764 UploadAndFreeStagingImageMemBlock(Block&: StagingBuffer);
2765
2766 if(MipMapLevelCount > 1)
2767 {
2768 if(!BuildMipmaps(Image: NewImage, ImageFormat: ImgFormat, Width, Height, Depth, MipMapLevelCount))
2769 return false;
2770 }
2771 else
2772 {
2773 if(!ImageBarrier(Image: NewImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: Depth, Format: ImgFormat, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
2774 return false;
2775 }
2776
2777 return true;
2778 }
2779
2780 VkImageView CreateTextureImageView(VkImage TexImage, VkFormat ImgFormat, VkImageViewType ViewType, size_t Depth, size_t MipMapLevelCount)
2781 {
2782 return CreateImageView(Image: TexImage, Format: ImgFormat, ViewType, Depth, MipMapLevelCount);
2783 }
2784
2785 [[nodiscard]] bool CreateTextureSamplersImpl(VkSampler &CreatedSampler, VkSamplerAddressMode AddrModeU, VkSamplerAddressMode AddrModeV, VkSamplerAddressMode AddrModeW)
2786 {
2787 VkSamplerCreateInfo SamplerInfo{};
2788 SamplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
2789 SamplerInfo.magFilter = VK_FILTER_LINEAR;
2790 SamplerInfo.minFilter = VK_FILTER_LINEAR;
2791 SamplerInfo.addressModeU = AddrModeU;
2792 SamplerInfo.addressModeV = AddrModeV;
2793 SamplerInfo.addressModeW = AddrModeW;
2794 SamplerInfo.anisotropyEnable = VK_FALSE;
2795 SamplerInfo.maxAnisotropy = m_MaxSamplerAnisotropy;
2796 SamplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2797 SamplerInfo.unnormalizedCoordinates = VK_FALSE;
2798 SamplerInfo.compareEnable = VK_FALSE;
2799 SamplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
2800 SamplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
2801 SamplerInfo.mipLodBias = (m_GlobalTextureLodBIAS / 1000.0f);
2802 SamplerInfo.minLod = -1000;
2803 SamplerInfo.maxLod = 1000;
2804
2805 if(vkCreateSampler(device: m_VKDevice, pCreateInfo: &SamplerInfo, pAllocator: nullptr, pSampler: &CreatedSampler) != VK_SUCCESS)
2806 {
2807 log_error("gfx/vulkan", "Failed to create texture sampler.");
2808 return false;
2809 }
2810 return true;
2811 }
2812
2813 [[nodiscard]] bool CreateTextureSamplers()
2814 {
2815 bool Ret = true;
2816 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_REPEAT], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT);
2817 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE);
2818 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT);
2819 return Ret;
2820 }
2821
2822 void DestroyTextureSamplers()
2823 {
2824 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_REPEAT], pAllocator: nullptr);
2825 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE], pAllocator: nullptr);
2826 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY], pAllocator: nullptr);
2827 }
2828
2829 VkSampler GetTextureSampler(ESupportedSamplerTypes SamplerType)
2830 {
2831 return m_aSamplers[SamplerType];
2832 }
2833
2834 VkImageView CreateImageView(VkImage Image, VkFormat Format, VkImageViewType ViewType, size_t Depth, size_t MipMapLevelCount)
2835 {
2836 VkImageViewCreateInfo ViewCreateInfo{};
2837 ViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2838 ViewCreateInfo.image = Image;
2839 ViewCreateInfo.viewType = ViewType;
2840 ViewCreateInfo.format = Format;
2841 ViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2842 ViewCreateInfo.subresourceRange.baseMipLevel = 0;
2843 ViewCreateInfo.subresourceRange.levelCount = MipMapLevelCount;
2844 ViewCreateInfo.subresourceRange.baseArrayLayer = 0;
2845 ViewCreateInfo.subresourceRange.layerCount = Depth;
2846
2847 VkImageView ImageView;
2848 if(vkCreateImageView(device: m_VKDevice, pCreateInfo: &ViewCreateInfo, pAllocator: nullptr, pView: &ImageView) != VK_SUCCESS)
2849 {
2850 return VK_NULL_HANDLE;
2851 }
2852
2853 return ImageView;
2854 }
2855
2856 [[nodiscard]] bool CreateImage(uint32_t Width, uint32_t Height, uint32_t Depth, size_t MipMapLevelCount, VkFormat Format, VkImageTiling Tiling, VkImage &Image, SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &ImageMemory, VkImageUsageFlags ImageUsage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT)
2857 {
2858 VkImageCreateInfo ImageInfo{};
2859 ImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
2860 ImageInfo.imageType = VK_IMAGE_TYPE_2D;
2861 ImageInfo.extent.width = Width;
2862 ImageInfo.extent.height = Height;
2863 ImageInfo.extent.depth = 1;
2864 ImageInfo.mipLevels = MipMapLevelCount;
2865 ImageInfo.arrayLayers = Depth;
2866 ImageInfo.format = Format;
2867 ImageInfo.tiling = Tiling;
2868 ImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2869 ImageInfo.usage = ImageUsage;
2870 ImageInfo.samples = (ImageUsage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) == 0 ? VK_SAMPLE_COUNT_1_BIT : GetSampleCount();
2871 ImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2872
2873 if(vkCreateImage(device: m_VKDevice, pCreateInfo: &ImageInfo, pAllocator: nullptr, pImage: &Image) != VK_SUCCESS)
2874 {
2875 log_error("gfx/vulkan", "Failed to create image.");
2876 return false;
2877 }
2878
2879 VkMemoryRequirements MemRequirements;
2880 vkGetImageMemoryRequirements(device: m_VKDevice, image: Image, pMemoryRequirements: &MemRequirements);
2881
2882 if(!GetImageMemory(RetBlock&: ImageMemory, RequiredSize: MemRequirements.size, RequiredAlignment: MemRequirements.alignment, RequiredMemoryTypeBits: MemRequirements.memoryTypeBits))
2883 return false;
2884
2885 vkBindImageMemory(device: m_VKDevice, image: Image, memory: ImageMemory.m_BufferMem.m_Mem, memoryOffset: ImageMemory.m_HeapData.m_OffsetToAlign);
2886
2887 return true;
2888 }
2889
2890 [[nodiscard]] bool ImageBarrier(const VkImage &Image, size_t MipMapBase, size_t MipMapCount, size_t LayerBase, size_t LayerCount, VkFormat Format, VkImageLayout OldLayout, VkImageLayout NewLayout)
2891 {
2892 VkCommandBuffer *pMemCommandBuffer;
2893 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2894 return false;
2895 auto &MemCommandBuffer = *pMemCommandBuffer;
2896
2897 VkImageMemoryBarrier Barrier{};
2898 Barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2899 Barrier.oldLayout = OldLayout;
2900 Barrier.newLayout = NewLayout;
2901 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2902 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2903 Barrier.image = Image;
2904 Barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2905 Barrier.subresourceRange.baseMipLevel = MipMapBase;
2906 Barrier.subresourceRange.levelCount = MipMapCount;
2907 Barrier.subresourceRange.baseArrayLayer = LayerBase;
2908 Barrier.subresourceRange.layerCount = LayerCount;
2909
2910 VkPipelineStageFlags SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2911 VkPipelineStageFlags DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2912
2913 if(OldLayout == VK_IMAGE_LAYOUT_UNDEFINED && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2914 {
2915 Barrier.srcAccessMask = 0;
2916 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2917
2918 SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2919 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2920 }
2921 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
2922 {
2923 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2924 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2925
2926 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2927 DestinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2928 }
2929 else if(OldLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2930 {
2931 Barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2932 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2933
2934 SourceStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2935 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2936 }
2937 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
2938 {
2939 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2940 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2941
2942 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2943 DestinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
2944 }
2945 else if(OldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
2946 {
2947 Barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2948 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2949
2950 SourceStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
2951 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2952 }
2953 else if(OldLayout == VK_IMAGE_LAYOUT_UNDEFINED && NewLayout == VK_IMAGE_LAYOUT_GENERAL)
2954 {
2955 Barrier.srcAccessMask = 0;
2956 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2957
2958 SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2959 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2960 }
2961 else if(OldLayout == VK_IMAGE_LAYOUT_GENERAL && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2962 {
2963 Barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2964 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2965
2966 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2967 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2968 }
2969 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_GENERAL)
2970 {
2971 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2972 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2973
2974 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2975 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2976 }
2977 else
2978 {
2979 dbg_assert_failed("Unsupported layout transition. OldLayout=%d NewLayout=%d", (int)OldLayout, (int)NewLayout);
2980 }
2981
2982 vkCmdPipelineBarrier(
2983 commandBuffer: MemCommandBuffer,
2984 srcStageMask: SourceStage, dstStageMask: DestinationStage,
2985 dependencyFlags: 0,
2986 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2987 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2988 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2989
2990 return true;
2991 }
2992
2993 [[nodiscard]] bool CopyBufferToImage(VkBuffer Buffer, VkDeviceSize BufferOffset, VkImage Image, int32_t X, int32_t Y, uint32_t Width, uint32_t Height, size_t Depth)
2994 {
2995 VkCommandBuffer *pCommandBuffer;
2996 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
2997 return false;
2998 auto &CommandBuffer = *pCommandBuffer;
2999
3000 VkBufferImageCopy Region{};
3001 Region.bufferOffset = BufferOffset;
3002 Region.bufferRowLength = 0;
3003 Region.bufferImageHeight = 0;
3004 Region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3005 Region.imageSubresource.mipLevel = 0;
3006 Region.imageSubresource.baseArrayLayer = 0;
3007 Region.imageSubresource.layerCount = Depth;
3008 Region.imageOffset = {.x: X, .y: Y, .z: 0};
3009 Region.imageExtent = {
3010 .width: Width,
3011 .height: Height,
3012 .depth: 1};
3013
3014 vkCmdCopyBufferToImage(commandBuffer: CommandBuffer, srcBuffer: Buffer, dstImage: Image, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, regionCount: 1, pRegions: &Region);
3015
3016 return true;
3017 }
3018
3019 /************************
3020 * BUFFERS
3021 ************************/
3022
3023 [[nodiscard]] bool CreateBufferObject(size_t BufferIndex, const void *pUploadData, VkDeviceSize BufferDataSize, bool IsOneFrameBuffer)
3024 {
3025 std::vector<uint8_t> UploadDataTmp;
3026 if(pUploadData == nullptr)
3027 {
3028 UploadDataTmp.resize(new_size: BufferDataSize);
3029 pUploadData = UploadDataTmp.data();
3030 }
3031
3032 while(BufferIndex >= m_vBufferObjects.size())
3033 {
3034 m_vBufferObjects.resize(new_size: (m_vBufferObjects.size() * 2) + 1);
3035 }
3036 auto &BufferObject = m_vBufferObjects[BufferIndex];
3037
3038 VkBuffer VertexBuffer;
3039 size_t BufferOffset = 0;
3040 if(!IsOneFrameBuffer)
3041 {
3042 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
3043 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pUploadData, RequiredSize: BufferDataSize))
3044 return false;
3045
3046 SMemoryBlock<VERTEX_BUFFER_CACHE_ID> Mem;
3047 if(!GetVertexBuffer(ResBlock&: Mem, RequiredSize: BufferDataSize))
3048 return false;
3049
3050 BufferObject.m_BufferObject.m_Mem = Mem;
3051 VertexBuffer = Mem.m_Buffer;
3052 BufferOffset = Mem.m_HeapData.m_OffsetToAlign;
3053
3054 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Mem.m_HeapData.m_OffsetToAlign, Size: BufferDataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
3055 return false;
3056 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: Mem.m_HeapData.m_OffsetToAlign, CopySize: BufferDataSize))
3057 return false;
3058 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Mem.m_HeapData.m_OffsetToAlign, Size: BufferDataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
3059 return false;
3060 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
3061 }
3062 else
3063 {
3064 SDeviceMemoryBlock VertexBufferMemory;
3065 if(!CreateStreamVertexBuffer(RenderThreadIndex: MAIN_THREAD_INDEX, NewBuffer&: VertexBuffer, NewBufferMem&: VertexBufferMemory, BufferOffset, pData: pUploadData, DataSize: BufferDataSize))
3066 return false;
3067 }
3068 BufferObject.m_IsStreamedBuffer = IsOneFrameBuffer;
3069 BufferObject.m_CurBuffer = VertexBuffer;
3070 BufferObject.m_CurBufferOffset = BufferOffset;
3071
3072 return true;
3073 }
3074
3075 void DeleteBufferObject(size_t BufferIndex)
3076 {
3077 auto &BufferObject = m_vBufferObjects[BufferIndex];
3078 if(!BufferObject.m_IsStreamedBuffer)
3079 {
3080 FreeVertexMemBlock(Block&: BufferObject.m_BufferObject.m_Mem);
3081 }
3082 BufferObject = {};
3083 }
3084
3085 [[nodiscard]] bool CopyBuffer(VkBuffer SrcBuffer, VkBuffer DstBuffer, VkDeviceSize SrcOffset, VkDeviceSize DstOffset, VkDeviceSize CopySize)
3086 {
3087 VkCommandBuffer *pCommandBuffer;
3088 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
3089 return false;
3090 auto &CommandBuffer = *pCommandBuffer;
3091 VkBufferCopy CopyRegion{};
3092 CopyRegion.srcOffset = SrcOffset;
3093 CopyRegion.dstOffset = DstOffset;
3094 CopyRegion.size = CopySize;
3095 vkCmdCopyBuffer(commandBuffer: CommandBuffer, srcBuffer: SrcBuffer, dstBuffer: DstBuffer, regionCount: 1, pRegions: &CopyRegion);
3096
3097 return true;
3098 }
3099
3100 /************************
3101 * RENDER STATES
3102 ************************/
3103
3104 void GetStateMatrix(const CCommandBuffer::SState &State, std::array<float, (size_t)4 * 2> &Matrix)
3105 {
3106 Matrix = {
3107 // column 1
3108 2.f / (State.m_ScreenBR.x - State.m_ScreenTL.x),
3109 0,
3110 // column 2
3111 0,
3112 2.f / (State.m_ScreenBR.y - State.m_ScreenTL.y),
3113 // column 3
3114 0,
3115 0,
3116 // column 4
3117 -((State.m_ScreenTL.x + State.m_ScreenBR.x) / (State.m_ScreenBR.x - State.m_ScreenTL.x)),
3118 -((State.m_ScreenTL.y + State.m_ScreenBR.y) / (State.m_ScreenBR.y - State.m_ScreenTL.y)),
3119 };
3120 }
3121
3122 [[nodiscard]] bool GetIsTextured(const CCommandBuffer::SState &State)
3123 {
3124 return State.m_Texture != -1;
3125 }
3126
3127 size_t GetAddressModeIndex(const CCommandBuffer::SState &State)
3128 {
3129 switch(State.m_WrapMode)
3130 {
3131 case EWrapMode::REPEAT:
3132 return VULKAN_BACKEND_ADDRESS_MODE_REPEAT;
3133 case EWrapMode::CLAMP:
3134 return VULKAN_BACKEND_ADDRESS_MODE_CLAMP_EDGES;
3135 default:
3136 dbg_assert_failed("Invalid wrap mode: %d", (int)State.m_WrapMode);
3137 };
3138 }
3139
3140 size_t GetBlendModeIndex(const CCommandBuffer::SState &State)
3141 {
3142 switch(State.m_BlendMode)
3143 {
3144 case EBlendMode::NONE:
3145 return VULKAN_BACKEND_BLEND_MODE_NONE;
3146 case EBlendMode::ALPHA:
3147 return VULKAN_BACKEND_BLEND_MODE_ALPHA;
3148 case EBlendMode::ADDITIVE:
3149 return VULKAN_BACKEND_BLEND_MODE_ADDITATIVE;
3150 default:
3151 dbg_assert_failed("Invalid blend mode: %d", (int)State.m_BlendMode);
3152 };
3153 }
3154
3155 size_t GetDynamicModeIndexFromState(const CCommandBuffer::SState &State) const
3156 {
3157 return (State.m_ClipEnable || m_HasDynamicViewport || m_VKSwapImgAndViewportExtent.m_HasForcedViewport) ? VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT : VULKAN_BACKEND_CLIP_MODE_NONE;
3158 }
3159
3160 size_t GetDynamicModeIndexFromExecBuffer(const SRenderCommandExecuteBuffer &ExecBuffer)
3161 {
3162 return (ExecBuffer.m_HasDynamicState) ? VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT : VULKAN_BACKEND_CLIP_MODE_NONE;
3163 }
3164
3165 VkPipeline &GetPipeline(SPipelineContainer &Container, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3166 {
3167 return Container.m_aaaPipelines[BlendModeIndex][DynamicIndex][(size_t)IsTextured];
3168 }
3169
3170 VkPipelineLayout &GetPipeLayout(SPipelineContainer &Container, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3171 {
3172 return Container.m_aaaPipelineLayouts[BlendModeIndex][DynamicIndex][(size_t)IsTextured];
3173 }
3174
3175 VkPipelineLayout &GetStandardPipeLayout(bool IsLineGeometry, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3176 {
3177 if(IsLineGeometry)
3178 return GetPipeLayout(Container&: m_StandardLinePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3179 else
3180 return GetPipeLayout(Container&: m_StandardPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3181 }
3182
3183 VkPipeline &GetStandardPipe(bool IsLineGeometry, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3184 {
3185 if(IsLineGeometry)
3186 return GetPipeline(Container&: m_StandardLinePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3187 else
3188 return GetPipeline(Container&: m_StandardPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3189 }
3190
3191 VkPipelineLayout &GetTileLayerPipeLayout(bool IsBorder, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3192 {
3193 if(!IsBorder)
3194 return GetPipeLayout(Container&: m_TilePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3195 else
3196 return GetPipeLayout(Container&: m_TileBorderPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3197 }
3198
3199 VkPipeline &GetTileLayerPipe(bool IsBorder, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3200 {
3201 if(!IsBorder)
3202 return GetPipeline(Container&: m_TilePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3203 else
3204 return GetPipeline(Container&: m_TileBorderPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3205 }
3206
3207 void GetStateIndices(const SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, bool &IsTextured, size_t &BlendModeIndex, size_t &DynamicIndex, size_t &AddressModeIndex)
3208 {
3209 IsTextured = GetIsTextured(State);
3210 AddressModeIndex = GetAddressModeIndex(State);
3211 BlendModeIndex = GetBlendModeIndex(State);
3212 DynamicIndex = GetDynamicModeIndexFromExecBuffer(ExecBuffer);
3213 }
3214
3215 void ExecBufferFillDynamicStates(const CCommandBuffer::SState &State, SRenderCommandExecuteBuffer &ExecBuffer)
3216 {
3217 // Workaround for a bug in molten-vk: https://github.com/KhronosGroup/MoltenVK/issues/2304
3218#ifdef CONF_PLATFORM_MACOS
3219 auto HasDynamicState = true;
3220#else
3221 size_t DynamicStateIndex = GetDynamicModeIndexFromState(State);
3222 auto HasDynamicState = DynamicStateIndex == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT;
3223#endif
3224
3225 if(HasDynamicState)
3226 {
3227 VkViewport Viewport;
3228 if(m_HasDynamicViewport)
3229 {
3230 Viewport.x = (float)m_DynamicViewportOffset.x;
3231 Viewport.y = (float)m_DynamicViewportOffset.y;
3232 Viewport.width = (float)m_DynamicViewportSize.width;
3233 Viewport.height = (float)m_DynamicViewportSize.height;
3234 Viewport.minDepth = 0.0f;
3235 Viewport.maxDepth = 1.0f;
3236 }
3237 // else check if there is a forced viewport
3238 else if(m_VKSwapImgAndViewportExtent.m_HasForcedViewport)
3239 {
3240 Viewport.x = 0.0f;
3241 Viewport.y = 0.0f;
3242 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_ForcedViewport.width;
3243 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_ForcedViewport.height;
3244 Viewport.minDepth = 0.0f;
3245 Viewport.maxDepth = 1.0f;
3246 }
3247 else
3248 {
3249 Viewport.x = 0.0f;
3250 Viewport.y = 0.0f;
3251 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
3252 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
3253 Viewport.minDepth = 0.0f;
3254 Viewport.maxDepth = 1.0f;
3255 }
3256
3257 VkRect2D Scissor;
3258 // convert from OGL to vulkan clip
3259
3260 // the scissor always assumes the presented viewport, because the front-end keeps the calculation
3261 // for the forced viewport in sync
3262 auto ScissorViewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
3263 if(State.m_ClipEnable)
3264 {
3265 int32_t ScissorY = (int32_t)ScissorViewport.height - ((int32_t)State.m_ClipY + (int32_t)State.m_ClipH);
3266 uint32_t ScissorH = (int32_t)State.m_ClipH;
3267 Scissor.offset = {.x: (int32_t)State.m_ClipX, .y: ScissorY};
3268 Scissor.extent = {.width: (uint32_t)State.m_ClipW, .height: ScissorH};
3269 }
3270 else
3271 {
3272 Scissor.offset = {.x: 0, .y: 0};
3273 Scissor.extent = {.width: ScissorViewport.width, .height: ScissorViewport.height};
3274 }
3275
3276 // if there is a dynamic viewport make sure the scissor data is scaled down to that
3277 if(m_HasDynamicViewport)
3278 {
3279 Scissor.offset.x = (int32_t)(((float)Scissor.offset.x / (float)ScissorViewport.width) * (float)m_DynamicViewportSize.width) + m_DynamicViewportOffset.x;
3280 Scissor.offset.y = (int32_t)(((float)Scissor.offset.y / (float)ScissorViewport.height) * (float)m_DynamicViewportSize.height) + m_DynamicViewportOffset.y;
3281 Scissor.extent.width = (uint32_t)(((float)Scissor.extent.width / (float)ScissorViewport.width) * (float)m_DynamicViewportSize.width);
3282 Scissor.extent.height = (uint32_t)(((float)Scissor.extent.height / (float)ScissorViewport.height) * (float)m_DynamicViewportSize.height);
3283 }
3284
3285 Viewport.x = std::clamp(val: Viewport.x, lo: 0.0f, hi: std::numeric_limits<decltype(Viewport.x)>::max());
3286 Viewport.y = std::clamp(val: Viewport.y, lo: 0.0f, hi: std::numeric_limits<decltype(Viewport.y)>::max());
3287
3288 Scissor.offset.x = std::clamp(val: Scissor.offset.x, lo: 0, hi: std::numeric_limits<decltype(Scissor.offset.x)>::max());
3289 Scissor.offset.y = std::clamp(val: Scissor.offset.y, lo: 0, hi: std::numeric_limits<decltype(Scissor.offset.y)>::max());
3290
3291 ExecBuffer.m_HasDynamicState = true;
3292 ExecBuffer.m_Viewport = Viewport;
3293 ExecBuffer.m_Scissor = Scissor;
3294 }
3295 else
3296 {
3297 ExecBuffer.m_HasDynamicState = false;
3298 }
3299 }
3300
3301 void BindPipeline(size_t RenderThreadIndex, VkCommandBuffer &CommandBuffer, SRenderCommandExecuteBuffer &ExecBuffer, VkPipeline &BindingPipe, const CCommandBuffer::SState &State)
3302 {
3303 if(m_vLastPipeline[RenderThreadIndex] != BindingPipe)
3304 {
3305 vkCmdBindPipeline(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline: BindingPipe);
3306 m_vLastPipeline[RenderThreadIndex] = BindingPipe;
3307 }
3308
3309 size_t DynamicStateIndex = GetDynamicModeIndexFromExecBuffer(ExecBuffer);
3310 if(DynamicStateIndex == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT)
3311 {
3312 vkCmdSetViewport(commandBuffer: CommandBuffer, firstViewport: 0, viewportCount: 1, pViewports: &ExecBuffer.m_Viewport);
3313 vkCmdSetScissor(commandBuffer: CommandBuffer, firstScissor: 0, scissorCount: 1, pScissors: &ExecBuffer.m_Scissor);
3314 }
3315 }
3316
3317 /**************************
3318 * RENDERING IMPLEMENTATION
3319 ***************************/
3320
3321 void RenderTileLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, size_t DrawCalls, const CCommandBuffer::SState &State, size_t BufferContainerIndex)
3322 {
3323 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
3324 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
3325
3326 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
3327 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
3328
3329 bool IsTextured = GetIsTextured(State);
3330 if(IsTextured)
3331 {
3332 ExecBuffer.m_aDescriptors[0] = m_vTextures[State.m_Texture].m_VKStandard3DTexturedDescrSet;
3333 }
3334
3335 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
3336
3337 ExecBuffer.m_EstimatedRenderCallCount = DrawCalls;
3338
3339 ExecBufferFillDynamicStates(State, ExecBuffer);
3340 }
3341
3342 [[nodiscard]] bool RenderTileLayer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, bool IsBorder, const GL_SColorf &Color, const vec2 &Scale, const vec2 &Off, size_t IndicesDrawNum, char *const *pIndicesOffsets, const unsigned int *pDrawCount)
3343 {
3344 std::array<float, (size_t)4 * 2> m;
3345 GetStateMatrix(State, Matrix&: m);
3346
3347 bool IsTextured;
3348 size_t BlendModeIndex;
3349 size_t DynamicIndex;
3350 size_t AddressModeIndex;
3351 GetStateIndices(ExecBuffer, State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
3352 auto &PipeLayout = GetTileLayerPipeLayout(IsBorder, IsTextured, BlendModeIndex, DynamicIndex);
3353 auto &PipeLine = GetTileLayerPipe(IsBorder, IsTextured, BlendModeIndex, DynamicIndex);
3354
3355 VkCommandBuffer *pCommandBuffer;
3356 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
3357 return false;
3358 auto &CommandBuffer = *pCommandBuffer;
3359
3360 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State);
3361
3362 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
3363 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
3364 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
3365
3366 if(IsTextured)
3367 {
3368 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
3369 }
3370
3371 SUniformTileGPosBorder VertexPushConstants;
3372 size_t VertexPushConstantSize = sizeof(SUniformTileGPos);
3373 SUniformTileGVertColor FragPushConstants;
3374 size_t FragPushConstantSize = sizeof(SUniformTileGVertColor);
3375
3376 mem_copy(dest: VertexPushConstants.m_aPos, source: m.data(), size: m.size() * sizeof(float));
3377 FragPushConstants = Color;
3378
3379 if(IsBorder)
3380 {
3381 VertexPushConstants.m_Scale = Scale;
3382 VertexPushConstants.m_Offset = Off;
3383 VertexPushConstantSize = sizeof(SUniformTileGPosBorder);
3384 }
3385
3386 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: VertexPushConstantSize, pValues: &VertexPushConstants);
3387 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformTileGPosBorder) + sizeof(SUniformTileGVertColorAlign), size: FragPushConstantSize, pValues: &FragPushConstants);
3388
3389 size_t DrawCount = IndicesDrawNum;
3390 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
3391 for(size_t i = 0; i < DrawCount; ++i)
3392 {
3393 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pIndicesOffsets[i] / sizeof(uint32_t));
3394
3395 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pDrawCount[i]), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
3396 }
3397
3398 return true;
3399 }
3400
3401 template<typename TName, bool Is3DTextured>
3402 [[nodiscard]] bool RenderStandard(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, EPrimitiveType PrimType, const TName *pVertices, int PrimitiveCount)
3403 {
3404 std::array<float, (size_t)4 * 2> m;
3405 GetStateMatrix(State, Matrix&: m);
3406
3407 bool IsLineGeometry = PrimType == EPrimitiveType::LINES;
3408
3409 bool IsTextured;
3410 size_t BlendModeIndex;
3411 size_t DynamicIndex;
3412 size_t AddressModeIndex;
3413 GetStateIndices(ExecBuffer, State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
3414 auto &PipeLayout = Is3DTextured ? GetPipeLayout(Container&: m_Standard3DPipeline, IsTextured, BlendModeIndex, DynamicIndex) : GetStandardPipeLayout(IsLineGeometry, IsTextured, BlendModeIndex, DynamicIndex);
3415 auto &PipeLine = Is3DTextured ? GetPipeline(Container&: m_Standard3DPipeline, IsTextured, BlendModeIndex, DynamicIndex) : GetStandardPipe(IsLineGeometry, IsTextured, BlendModeIndex, DynamicIndex);
3416
3417 VkCommandBuffer *pCommandBuffer;
3418 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
3419 return false;
3420 auto &CommandBuffer = *pCommandBuffer;
3421
3422 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State);
3423
3424 size_t VertPerPrim = 2;
3425 bool IsIndexed = false;
3426 if(PrimType == EPrimitiveType::QUADS)
3427 {
3428 VertPerPrim = 4;
3429 IsIndexed = true;
3430 }
3431 else if(PrimType == EPrimitiveType::TRIANGLES)
3432 {
3433 VertPerPrim = 3;
3434 }
3435
3436 VkBuffer VKBuffer;
3437 SDeviceMemoryBlock VKBufferMem;
3438 size_t BufferOff = 0;
3439 if(!CreateStreamVertexBuffer(RenderThreadIndex: ExecBuffer.m_ThreadIndex, NewBuffer&: VKBuffer, NewBufferMem&: VKBufferMem, BufferOffset&: BufferOff, pData: pVertices, DataSize: VertPerPrim * sizeof(TName) * PrimitiveCount))
3440 return false;
3441
3442 std::array<VkBuffer, 1> aVertexBuffers = {VKBuffer};
3443 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)BufferOff};
3444 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
3445
3446 if(IsIndexed)
3447 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
3448
3449 if(IsTextured)
3450 {
3451 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
3452 }
3453
3454 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGPos), pValues: m.data());
3455
3456 if(IsIndexed)
3457 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(PrimitiveCount * 6), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
3458 else
3459 vkCmdDraw(commandBuffer: CommandBuffer, vertexCount: static_cast<uint32_t>(PrimitiveCount * VertPerPrim), instanceCount: 1, firstVertex: 0, firstInstance: 0);
3460
3461 return true;
3462 }
3463
3464public:
3465 CCommandProcessorFragment_Vulkan()
3466 {
3467 m_vTextures.reserve(n: CCommandBuffer::MAX_TEXTURES);
3468 }
3469
3470 /************************
3471 * VULKAN SETUP CODE
3472 ************************/
3473
3474 [[nodiscard]] bool GetVulkanExtensions(SDL_Window *pWindow, std::vector<std::string> &vVKExtensions)
3475 {
3476 unsigned int ExtCount = 0;
3477 if(!SDL_Vulkan_GetInstanceExtensions(window: pWindow, pCount: &ExtCount, pNames: nullptr))
3478 {
3479 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get instance extensions from SDL.");
3480 return false;
3481 }
3482
3483 std::vector<const char *> vExtensionList(ExtCount);
3484 if(!SDL_Vulkan_GetInstanceExtensions(window: pWindow, pCount: &ExtCount, pNames: vExtensionList.data()))
3485 {
3486 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get instance extensions from SDL.");
3487 return false;
3488 }
3489
3490 vVKExtensions.reserve(n: ExtCount);
3491 for(uint32_t i = 0; i < ExtCount; i++)
3492 {
3493 vVKExtensions.emplace_back(args&: vExtensionList[i]);
3494 }
3495
3496 return true;
3497 }
3498
3499 std::set<std::string> OurVKLayers()
3500 {
3501 std::set<std::string> OurLayers;
3502
3503 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
3504 {
3505 OurLayers.emplace(args: "VK_LAYER_KHRONOS_validation");
3506 // deprecated, but VK_LAYER_KHRONOS_validation was released after vulkan 1.1
3507 OurLayers.emplace(args: "VK_LAYER_LUNARG_standard_validation");
3508 }
3509
3510 return OurLayers;
3511 }
3512
3513 std::set<std::string> OurDeviceExtensions()
3514 {
3515 std::set<std::string> OurExt;
3516 OurExt.emplace(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
3517 return OurExt;
3518 }
3519
3520 std::vector<VkImageUsageFlags> OurImageUsages()
3521 {
3522 std::vector<VkImageUsageFlags> vImgUsages;
3523
3524 vImgUsages.emplace_back(args: VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
3525 vImgUsages.emplace_back(args: VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
3526
3527 return vImgUsages;
3528 }
3529
3530 [[nodiscard]] bool GetVulkanLayers(std::vector<std::string> &vVKLayers)
3531 {
3532 uint32_t LayerCount = 0;
3533 VkResult Res = vkEnumerateInstanceLayerProperties(pPropertyCount: &LayerCount, NULL);
3534 if(Res != VK_SUCCESS)
3535 {
3536 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get Vulkan layers.");
3537 return false;
3538 }
3539
3540 std::vector<VkLayerProperties> vVKInstanceLayers(LayerCount);
3541 Res = vkEnumerateInstanceLayerProperties(pPropertyCount: &LayerCount, pProperties: vVKInstanceLayers.data());
3542 if(Res != VK_SUCCESS)
3543 {
3544 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get Vulkan layers.");
3545 return false;
3546 }
3547
3548 std::set<std::string> ReqLayerNames = OurVKLayers();
3549 vVKLayers.clear();
3550 for(const auto &LayerName : vVKInstanceLayers)
3551 {
3552 if(ReqLayerNames.contains(x: std::string(LayerName.layerName)))
3553 {
3554 vVKLayers.emplace_back(args: LayerName.layerName);
3555 }
3556 }
3557
3558 return true;
3559 }
3560
3561 bool IsGpuDenied(uint32_t Vendor, uint32_t DriverVersion, uint32_t ApiMajor, uint32_t ApiMinor, uint32_t ApiPatch)
3562 {
3563#ifdef CONF_FAMILY_WINDOWS
3564 // AMD
3565 if(0x1002 == Vendor)
3566 {
3567 auto Major = (DriverVersion >> 22);
3568 auto Minor = (DriverVersion >> 12) & 0x3ff;
3569 auto Patch = DriverVersion & 0xfff;
3570
3571 return Major == 2 && Minor == 0 && Patch > 137 && Patch < 220 && ((ApiMajor <= 1 && ApiMinor < 3) || (ApiMajor <= 1 && ApiMinor == 3 && ApiPatch < 206));
3572 }
3573#endif
3574 return false;
3575 }
3576
3577 [[nodiscard]] bool CreateVulkanInstance(const std::vector<std::string> &vVKLayers, const std::vector<std::string> &vVKExtensions, bool TryDebugExtensions)
3578 {
3579 std::vector<const char *> vLayersCStr;
3580 vLayersCStr.reserve(n: vVKLayers.size());
3581 for(const auto &Layer : vVKLayers)
3582 vLayersCStr.emplace_back(args: Layer.c_str());
3583
3584 std::vector<const char *> vExtCStr;
3585 vExtCStr.reserve(n: vVKExtensions.size() + 1);
3586 for(const auto &Ext : vVKExtensions)
3587 vExtCStr.emplace_back(args: Ext.c_str());
3588
3589#ifdef VK_EXT_debug_utils
3590 if(TryDebugExtensions && (g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL))
3591 {
3592 // debug message support
3593 vExtCStr.emplace_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
3594 }
3595#endif
3596
3597 VkApplicationInfo VKAppInfo = {};
3598 VKAppInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
3599 VKAppInfo.pNext = NULL;
3600 VKAppInfo.pApplicationName = "DDNet";
3601 VKAppInfo.applicationVersion = 1;
3602 VKAppInfo.pEngineName = "DDNet-Vulkan";
3603 VKAppInfo.engineVersion = 1;
3604 VKAppInfo.apiVersion = VK_API_VERSION_1_1;
3605
3606 void *pExt = nullptr;
3607#if defined(VK_EXT_validation_features) && VK_EXT_VALIDATION_FEATURES_SPEC_VERSION >= 5
3608 VkValidationFeaturesEXT Features = {};
3609 std::array<VkValidationFeatureEnableEXT, 2> aEnables = {VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT};
3610 if(TryDebugExtensions && (g_Config.m_DbgGfx == DEBUG_GFX_MODE_AFFECTS_PERFORMANCE || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL))
3611 {
3612 Features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
3613 Features.enabledValidationFeatureCount = aEnables.size();
3614 Features.pEnabledValidationFeatures = aEnables.data();
3615
3616 pExt = &Features;
3617 }
3618#endif
3619
3620 VkInstanceCreateInfo VKInstanceInfo = {};
3621 VKInstanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
3622 VKInstanceInfo.pNext = pExt;
3623 VKInstanceInfo.flags = 0;
3624 VKInstanceInfo.pApplicationInfo = &VKAppInfo;
3625 VKInstanceInfo.enabledExtensionCount = static_cast<uint32_t>(vExtCStr.size());
3626 VKInstanceInfo.ppEnabledExtensionNames = vExtCStr.data();
3627 VKInstanceInfo.enabledLayerCount = static_cast<uint32_t>(vLayersCStr.size());
3628 VKInstanceInfo.ppEnabledLayerNames = vLayersCStr.data();
3629
3630 bool TryAgain = false;
3631
3632 VkResult Res = vkCreateInstance(pCreateInfo: &VKInstanceInfo, NULL, pInstance: &m_VKInstance);
3633 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: Res);
3634 if(pCritErrorMsg != nullptr)
3635 {
3636 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating instance failed.", pErrStrExtra: pCritErrorMsg);
3637 return false;
3638 }
3639 else if(Res == VK_ERROR_LAYER_NOT_PRESENT || Res == VK_ERROR_EXTENSION_NOT_PRESENT)
3640 TryAgain = true;
3641
3642 if(TryAgain && TryDebugExtensions)
3643 return CreateVulkanInstance(vVKLayers, vVKExtensions, TryDebugExtensions: false);
3644
3645 return true;
3646 }
3647
3648 STWGraphicGpu::ETWGraphicsGpuType VKGPUTypeToGraphicsGpuType(VkPhysicalDeviceType VKGPUType)
3649 {
3650 if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU)
3651 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_DISCRETE;
3652 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU)
3653 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INTEGRATED;
3654 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU)
3655 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_VIRTUAL;
3656 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_CPU)
3657 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_CPU;
3658
3659 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_CPU;
3660 }
3661
3662 static void GetVendorString(uint32_t VendorId, char *pVendorStr, size_t Size)
3663 {
3664 switch(VendorId)
3665 {
3666 case 0x1002:
3667 case 0x1022:
3668 str_copy(dst: pVendorStr, src: "AMD", dst_size: Size);
3669 break;
3670 case 0x1010:
3671 str_copy(dst: pVendorStr, src: "ImgTec", dst_size: Size);
3672 break;
3673 case 0x106B:
3674 str_copy(dst: pVendorStr, src: "Apple", dst_size: Size);
3675 break;
3676 case 0x10DE:
3677 str_copy(dst: pVendorStr, src: "NVIDIA", dst_size: Size);
3678 break;
3679 case 0x13B5:
3680 str_copy(dst: pVendorStr, src: "ARM", dst_size: Size);
3681 break;
3682 case 0x5143:
3683 str_copy(dst: pVendorStr, src: "Qualcomm", dst_size: Size);
3684 break;
3685 case 0x8086:
3686 str_copy(dst: pVendorStr, src: "Intel", dst_size: Size);
3687 break;
3688 case 0x10005:
3689 str_copy(dst: pVendorStr, src: "Mesa", dst_size: Size);
3690 break;
3691 default:
3692 log_warn("gfx/vulkan", "Unknown GPU vendor ID %08X.", VendorId);
3693 str_format(buffer: pVendorStr, buffer_size: Size, format: "Unknown (%08X)", VendorId);
3694 break;
3695 }
3696 }
3697
3698 // from: https://github.com/SaschaWillems/vulkan.gpuinfo.org/blob/5c3986798afc39d736b825bf8a5fbf92b8d9ed49/includes/functions.php#L364
3699 void FormatDriverVersion(char (&aDriverVersion)[256], uint32_t DriverVersion, uint32_t VendorId)
3700 {
3701 if(VendorId == 0x10DE) // NVIDIA
3702 {
3703 str_format(buffer: aDriverVersion, buffer_size: std::size(aDriverVersion), format: "%d.%d.%d.%d",
3704 (DriverVersion >> 22) & 0x3ff,
3705 (DriverVersion >> 14) & 0x0ff,
3706 (DriverVersion >> 6) & 0x0ff,
3707 (DriverVersion) & 0x003f);
3708 }
3709#ifdef CONF_FAMILY_WINDOWS
3710 else if(VendorId == 0x8086) // Windows with Intel only
3711 {
3712 str_format(aDriverVersion, std::size(aDriverVersion),
3713 "%d.%d",
3714 (DriverVersion >> 14),
3715 (DriverVersion) & 0x3fff);
3716 }
3717#endif
3718 else
3719 {
3720 // Use Vulkan version conventions if vendor mapping is not available
3721 str_format(buffer: aDriverVersion, buffer_size: std::size(aDriverVersion),
3722 format: "%d.%d.%d",
3723 (DriverVersion >> 22),
3724 (DriverVersion >> 12) & 0x3ff,
3725 DriverVersion & 0xfff);
3726 }
3727 }
3728
3729 [[nodiscard]] bool SelectGpu(char *pRendererName, char *pVendorName, char *pVersionName)
3730 {
3731 uint32_t DevicesCount = 0;
3732 auto Res = vkEnumeratePhysicalDevices(instance: m_VKInstance, pPhysicalDeviceCount: &DevicesCount, pPhysicalDevices: nullptr);
3733 if(Res != VK_SUCCESS)
3734 {
3735 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: CheckVulkanCriticalError(CallResult: Res));
3736 return false;
3737 }
3738 if(DevicesCount == 0)
3739 {
3740 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No Vulkan compatible devices found.");
3741 return false;
3742 }
3743
3744 std::vector<VkPhysicalDevice> vDeviceList(DevicesCount);
3745 Res = vkEnumeratePhysicalDevices(instance: m_VKInstance, pPhysicalDeviceCount: &DevicesCount, pPhysicalDevices: vDeviceList.data());
3746 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
3747 {
3748 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: CheckVulkanCriticalError(CallResult: Res));
3749 return false;
3750 }
3751 if(DevicesCount == 0)
3752 {
3753 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED_MISSING_INTEGRATED_GPU_DRIVER, pWarning: "No Vulkan compatible devices found.");
3754 return false;
3755 }
3756 // make sure to use the correct amount of devices available
3757 // the amount of physical devices can be smaller than the amount of devices reported
3758 // see vkEnumeratePhysicalDevices for details
3759 vDeviceList.resize(new_size: DevicesCount);
3760
3761 size_t Index = 0;
3762 std::vector<VkPhysicalDeviceProperties> vDevicePropList(vDeviceList.size());
3763 m_pGpuList->m_vGpus.reserve(n: vDeviceList.size());
3764
3765 size_t FoundDeviceIndex = 0;
3766
3767 STWGraphicGpu::ETWGraphicsGpuType AutoGpuType = STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INVALID;
3768
3769 bool IsAutoGpu = str_comp(a: g_Config.m_GfxGpuName, b: "auto") == 0;
3770
3771 bool UserSelectedGpuChosen = false;
3772 for(auto &CurDevice : vDeviceList)
3773 {
3774 vkGetPhysicalDeviceProperties(physicalDevice: CurDevice, pProperties: &(vDevicePropList[Index]));
3775
3776 auto &DeviceProp = vDevicePropList[Index];
3777
3778 STWGraphicGpu::ETWGraphicsGpuType GPUType = VKGPUTypeToGraphicsGpuType(VKGPUType: DeviceProp.deviceType);
3779
3780 int DevApiMajor = (int)VK_API_VERSION_MAJOR(DeviceProp.apiVersion);
3781 int DevApiMinor = (int)VK_API_VERSION_MINOR(DeviceProp.apiVersion);
3782 int DevApiPatch = (int)VK_API_VERSION_PATCH(DeviceProp.apiVersion);
3783
3784 auto IsDenied = CCommandProcessorFragment_Vulkan::IsGpuDenied(Vendor: DeviceProp.vendorID, DriverVersion: DeviceProp.driverVersion, ApiMajor: DevApiMajor, ApiMinor: DevApiMinor, ApiPatch: DevApiPatch);
3785 if((DevApiMajor > BACKEND_VULKAN_VERSION_MAJOR || (DevApiMajor == BACKEND_VULKAN_VERSION_MAJOR && DevApiMinor >= BACKEND_VULKAN_VERSION_MINOR)) && !IsDenied)
3786 {
3787 STWGraphicGpu::STWGraphicGpuItem NewGpu;
3788 str_copy(dst&: NewGpu.m_aName, src: DeviceProp.deviceName);
3789 NewGpu.m_GpuType = GPUType;
3790 m_pGpuList->m_vGpus.push_back(x: NewGpu);
3791
3792 // We always decide what the 'auto' GPU would be, even if user is forcing a GPU by name in config
3793 // Reminder: A worse GPU enumeration has a higher value than a better GPU enumeration, thus the '>'
3794 if(AutoGpuType > STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INTEGRATED)
3795 {
3796 str_copy(dst&: m_pGpuList->m_AutoGpu.m_aName, src: DeviceProp.deviceName);
3797 m_pGpuList->m_AutoGpu.m_GpuType = GPUType;
3798
3799 AutoGpuType = GPUType;
3800
3801 if(IsAutoGpu)
3802 FoundDeviceIndex = Index;
3803 }
3804 // We only select the first GPU that matches, because it comes first in the enumeration array, it's preferred by the system
3805 // Reminder: We can't break the cycle here if the name matches because we need to choose the best GPU for 'auto' mode
3806 if(!IsAutoGpu && !UserSelectedGpuChosen && str_comp(a: DeviceProp.deviceName, b: g_Config.m_GfxGpuName) == 0)
3807 {
3808 FoundDeviceIndex = Index;
3809 UserSelectedGpuChosen = true;
3810 }
3811 }
3812 Index++;
3813 }
3814
3815 if(m_pGpuList->m_vGpus.empty())
3816 {
3817 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED_NO_DEVICE_WITH_REQUIRED_VERSION, pWarning: "No devices with required Vulkan version found.");
3818 return false;
3819 }
3820
3821 {
3822 auto &DeviceProp = vDevicePropList[FoundDeviceIndex];
3823
3824 int DevApiMajor = (int)VK_API_VERSION_MAJOR(DeviceProp.apiVersion);
3825 int DevApiMinor = (int)VK_API_VERSION_MINOR(DeviceProp.apiVersion);
3826 int DevApiPatch = (int)VK_API_VERSION_PATCH(DeviceProp.apiVersion);
3827
3828 str_copy(dst: pRendererName, src: DeviceProp.deviceName, dst_size: GPU_INFO_STRING_SIZE);
3829 GetVendorString(VendorId: DeviceProp.vendorID, pVendorStr: pVendorName, Size: GPU_INFO_STRING_SIZE);
3830 char aDriverVersion[256];
3831 FormatDriverVersion(aDriverVersion, DriverVersion: DeviceProp.driverVersion, VendorId: DeviceProp.vendorID);
3832 str_format(buffer: pVersionName, buffer_size: GPU_INFO_STRING_SIZE, format: "Vulkan %d.%d.%d (driver: %s)",
3833 DevApiMajor, DevApiMinor, DevApiPatch, aDriverVersion);
3834
3835 // get important device limits
3836 m_NonCoherentMemAlignment = DeviceProp.limits.nonCoherentAtomSize;
3837 m_OptimalImageCopyMemAlignment = DeviceProp.limits.optimalBufferCopyOffsetAlignment;
3838 m_MaxTextureSize = DeviceProp.limits.maxImageDimension2D;
3839 m_MaxSamplerAnisotropy = DeviceProp.limits.maxSamplerAnisotropy;
3840
3841 m_MinUniformAlign = DeviceProp.limits.minUniformBufferOffsetAlignment;
3842 m_MaxMultiSample = DeviceProp.limits.framebufferColorSampleCounts;
3843
3844 if(IsVerbose())
3845 {
3846 log_debug("gfx/vulkan", "Device prop: non-coherent align: %" PRIzu ", optimal image copy align: %" PRIzu ", max texture size: %u, max sampler anisotropy: %u",
3847 (size_t)m_NonCoherentMemAlignment, (size_t)m_OptimalImageCopyMemAlignment, m_MaxTextureSize, m_MaxSamplerAnisotropy);
3848 log_debug("gfx/vulkan", "Device prop: min uniform align: %u, multi sample: %u",
3849 m_MinUniformAlign, (uint32_t)m_MaxMultiSample);
3850 }
3851 }
3852
3853 VkPhysicalDevice CurDevice = vDeviceList[FoundDeviceIndex];
3854
3855 uint32_t FamQueueCount = 0;
3856 vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice: CurDevice, pQueueFamilyPropertyCount: &FamQueueCount, pQueueFamilyProperties: nullptr);
3857 if(FamQueueCount == 0)
3858 {
3859 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No Vulkan queue family properties found.");
3860 return false;
3861 }
3862
3863 std::vector<VkQueueFamilyProperties> vQueuePropList(FamQueueCount);
3864 vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice: CurDevice, pQueueFamilyPropertyCount: &FamQueueCount, pQueueFamilyProperties: vQueuePropList.data());
3865
3866 uint32_t QueueNodeIndex = std::numeric_limits<uint32_t>::max();
3867 for(uint32_t i = 0; i < FamQueueCount; i++)
3868 {
3869 if(vQueuePropList[i].queueCount > 0 && (vQueuePropList[i].queueFlags & VK_QUEUE_GRAPHICS_BIT))
3870 {
3871 QueueNodeIndex = i;
3872 }
3873 /*if(vQueuePropList[i].queueCount > 0 && (vQueuePropList[i].queueFlags & VK_QUEUE_COMPUTE_BIT))
3874 {
3875 QueueNodeIndex = i;
3876 }*/
3877 }
3878
3879 if(QueueNodeIndex == std::numeric_limits<uint32_t>::max())
3880 {
3881 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No Vulkan queue found that matches the requirements: graphics queue.");
3882 return false;
3883 }
3884
3885 m_VKGPU = CurDevice;
3886 m_VKGraphicsQueueIndex = QueueNodeIndex;
3887 return true;
3888 }
3889
3890 [[nodiscard]] bool CreateLogicalDevice(const std::vector<std::string> &vVKLayers)
3891 {
3892 std::vector<const char *> vLayerCNames;
3893 vLayerCNames.reserve(n: vVKLayers.size());
3894 for(const auto &Layer : vVKLayers)
3895 vLayerCNames.emplace_back(args: Layer.c_str());
3896
3897 uint32_t DevPropCount = 0;
3898 if(vkEnumerateDeviceExtensionProperties(physicalDevice: m_VKGPU, NULL, pPropertyCount: &DevPropCount, NULL) != VK_SUCCESS)
3899 {
3900 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Querying logical device extension properties failed.");
3901 return false;
3902 }
3903
3904 std::vector<VkExtensionProperties> vDevPropList(DevPropCount);
3905 if(vkEnumerateDeviceExtensionProperties(physicalDevice: m_VKGPU, NULL, pPropertyCount: &DevPropCount, pProperties: vDevPropList.data()) != VK_SUCCESS)
3906 {
3907 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Querying logical device extension properties failed.");
3908 return false;
3909 }
3910
3911 std::vector<const char *> vDevPropCNames;
3912 std::set<std::string> OurDevExt = OurDeviceExtensions();
3913
3914 for(const auto &CurExtProp : vDevPropList)
3915 {
3916 if(OurDevExt.contains(x: std::string(CurExtProp.extensionName)))
3917 {
3918 vDevPropCNames.emplace_back(args: CurExtProp.extensionName);
3919 }
3920 }
3921
3922 VkDeviceQueueCreateInfo VKQueueCreateInfo;
3923 VKQueueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
3924 VKQueueCreateInfo.queueFamilyIndex = m_VKGraphicsQueueIndex;
3925 VKQueueCreateInfo.queueCount = 1;
3926 float QueuePrio = 1.0f;
3927 VKQueueCreateInfo.pQueuePriorities = &QueuePrio;
3928 VKQueueCreateInfo.pNext = NULL;
3929 VKQueueCreateInfo.flags = 0;
3930
3931 VkDeviceCreateInfo VKCreateInfo;
3932 VKCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
3933 VKCreateInfo.queueCreateInfoCount = 1;
3934 VKCreateInfo.pQueueCreateInfos = &VKQueueCreateInfo;
3935 VKCreateInfo.ppEnabledLayerNames = vLayerCNames.data();
3936 VKCreateInfo.enabledLayerCount = static_cast<uint32_t>(vLayerCNames.size());
3937 VKCreateInfo.ppEnabledExtensionNames = vDevPropCNames.data();
3938 VKCreateInfo.enabledExtensionCount = static_cast<uint32_t>(vDevPropCNames.size());
3939 VKCreateInfo.pNext = NULL;
3940 VKCreateInfo.pEnabledFeatures = NULL;
3941 VKCreateInfo.flags = 0;
3942
3943 if(vkCreateDevice(physicalDevice: m_VKGPU, pCreateInfo: &VKCreateInfo, pAllocator: nullptr, pDevice: &m_VKDevice) != VK_SUCCESS)
3944 {
3945 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Logical device could not be created.");
3946 return false;
3947 }
3948
3949 return true;
3950 }
3951
3952 [[nodiscard]] bool CreateSurface(SDL_Window *pWindow)
3953 {
3954 if(!SDL_Vulkan_CreateSurface(window: pWindow, instance: m_VKInstance, surface: &m_VKPresentSurface))
3955 {
3956 log_error("gfx/vulkan", "Failed to create surface. SDL error: %s", SDL_GetError());
3957 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating a Vulkan surface for the SDL window failed.");
3958 return false;
3959 }
3960
3961 VkBool32 IsSupported = false;
3962 vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice: m_VKGPU, queueFamilyIndex: m_VKGraphicsQueueIndex, surface: m_VKPresentSurface, pSupported: &IsSupported);
3963 if(!IsSupported)
3964 {
3965 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface does not support presenting the framebuffer to a screen. Maybe the wrong GPU was selected?");
3966 return false;
3967 }
3968
3969 return true;
3970 }
3971
3972 void DestroySurface()
3973 {
3974 vkDestroySurfaceKHR(instance: m_VKInstance, surface: m_VKPresentSurface, pAllocator: nullptr);
3975 }
3976
3977 [[nodiscard]] bool GetPresentationMode(VkPresentModeKHR &VKIOMode)
3978 {
3979 uint32_t PresentModeCount = 0;
3980 if(vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pPresentModeCount: &PresentModeCount, NULL) != VK_SUCCESS)
3981 {
3982 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface presentation modes could not be fetched.");
3983 return false;
3984 }
3985
3986 std::vector<VkPresentModeKHR> vPresentModeList(PresentModeCount);
3987 if(vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pPresentModeCount: &PresentModeCount, pPresentModes: vPresentModeList.data()) != VK_SUCCESS)
3988 {
3989 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface presentation modes could not be fetched.");
3990 return false;
3991 }
3992
3993 VKIOMode = g_Config.m_GfxVsync ? VK_PRESENT_MODE_FIFO_KHR : VK_PRESENT_MODE_IMMEDIATE_KHR;
3994 for(const auto &Mode : vPresentModeList)
3995 {
3996 if(Mode == VKIOMode)
3997 return true;
3998 }
3999
4000 log_warn("gfx/vulkan", "Requested presentation mode was not available. Falling back to mailbox / FIFO relaxed.");
4001 VKIOMode = g_Config.m_GfxVsync ? VK_PRESENT_MODE_FIFO_RELAXED_KHR : VK_PRESENT_MODE_MAILBOX_KHR;
4002 for(const auto &Mode : vPresentModeList)
4003 {
4004 if(Mode == VKIOMode)
4005 return true;
4006 }
4007
4008 log_warn("gfx/vulkan", "Requested presentation mode was not available. Using first available.");
4009 if(PresentModeCount > 0)
4010 VKIOMode = vPresentModeList[0];
4011
4012 return true;
4013 }
4014
4015 [[nodiscard]] bool GetSurfaceProperties(VkSurfaceCapabilitiesKHR &VKSurfCapabilities)
4016 {
4017 if(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceCapabilities: &VKSurfCapabilities) != VK_SUCCESS)
4018 {
4019 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface capabilities could not be fetched.");
4020 return false;
4021 }
4022 return true;
4023 }
4024
4025 uint32_t GetNumberOfSwapImages(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4026 {
4027 uint32_t ImgNumber = VKCapabilities.minImageCount + 1;
4028 if(IsVerbose())
4029 {
4030 log_debug("gfx/vulkan", "Minimal swap image count: %u", VKCapabilities.minImageCount);
4031 }
4032 return (VKCapabilities.maxImageCount > 0 && ImgNumber > VKCapabilities.maxImageCount) ? VKCapabilities.maxImageCount : ImgNumber;
4033 }
4034
4035 SSwapImgViewportExtent GetSwapImageSize(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4036 {
4037 VkExtent2D RetSize = {.width: m_CanvasWidth, .height: m_CanvasHeight};
4038
4039 if(VKCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max())
4040 {
4041 RetSize.width = std::clamp<uint32_t>(val: RetSize.width, lo: VKCapabilities.minImageExtent.width, hi: VKCapabilities.maxImageExtent.width);
4042 RetSize.height = std::clamp<uint32_t>(val: RetSize.height, lo: VKCapabilities.minImageExtent.height, hi: VKCapabilities.maxImageExtent.height);
4043 }
4044 else
4045 {
4046 RetSize = VKCapabilities.currentExtent;
4047 }
4048
4049 VkExtent2D AutoViewportExtent = RetSize;
4050 bool UsesForcedViewport = false;
4051 // keep this in sync with graphics_threaded AdjustViewport's check
4052 if(AutoViewportExtent.height > 4 * AutoViewportExtent.width / 5)
4053 {
4054 AutoViewportExtent.height = 4 * AutoViewportExtent.width / 5;
4055 UsesForcedViewport = true;
4056 }
4057
4058 SSwapImgViewportExtent Ext;
4059 Ext.m_SwapImageViewport = RetSize;
4060 Ext.m_ForcedViewport = AutoViewportExtent;
4061 Ext.m_HasForcedViewport = UsesForcedViewport;
4062
4063 return Ext;
4064 }
4065
4066 [[nodiscard]] bool GetImageUsage(const VkSurfaceCapabilitiesKHR &VKCapabilities, VkImageUsageFlags &VKOutUsage)
4067 {
4068 std::vector<VkImageUsageFlags> vOurImgUsages = OurImageUsages();
4069 if(vOurImgUsages.empty())
4070 {
4071 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Framebuffer image attachment types not supported.");
4072 return false;
4073 }
4074
4075 VKOutUsage = vOurImgUsages[0];
4076
4077 for(const auto &ImgUsage : vOurImgUsages)
4078 {
4079 VkImageUsageFlags ImgUsageFlags = ImgUsage & VKCapabilities.supportedUsageFlags;
4080 if(ImgUsageFlags != ImgUsage)
4081 {
4082 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Framebuffer image attachment types not supported.");
4083 return false;
4084 }
4085
4086 VKOutUsage = (VKOutUsage | ImgUsage);
4087 }
4088
4089 return true;
4090 }
4091
4092 VkSurfaceTransformFlagBitsKHR GetTransform(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4093 {
4094 if(VKCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
4095 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
4096 return VKCapabilities.currentTransform;
4097 }
4098
4099 [[nodiscard]] bool GetFormat()
4100 {
4101 uint32_t SurfFormats = 0;
4102 VkResult Res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceFormatCount: &SurfFormats, pSurfaceFormats: nullptr);
4103 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
4104 {
4105 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface format fetching failed.");
4106 return false;
4107 }
4108
4109 std::vector<VkSurfaceFormatKHR> vSurfFormatList(SurfFormats);
4110 Res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceFormatCount: &SurfFormats, pSurfaceFormats: vSurfFormatList.data());
4111 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
4112 {
4113 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface format fetching failed.");
4114 return false;
4115 }
4116
4117 if(Res == VK_INCOMPLETE)
4118 {
4119 log_warn("gfx/vulkan", "Not all surface formats are requestable with your current settings.");
4120 }
4121
4122 if(vSurfFormatList.size() == 1 && vSurfFormatList[0].format == VK_FORMAT_UNDEFINED)
4123 {
4124 m_VKSurfFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
4125 m_VKSurfFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
4126 log_warn("gfx/vulkan", "Surface format was undefined. This can potentially cause bugs.");
4127 return true;
4128 }
4129
4130 for(const auto &FindFormat : vSurfFormatList)
4131 {
4132 if(FindFormat.format == VK_FORMAT_B8G8R8A8_UNORM && FindFormat.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
4133 {
4134 m_VKSurfFormat = FindFormat;
4135 return true;
4136 }
4137 else if(FindFormat.format == VK_FORMAT_R8G8B8A8_UNORM && FindFormat.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
4138 {
4139 m_VKSurfFormat = FindFormat;
4140 return true;
4141 }
4142 }
4143
4144 log_warn("gfx/vulkan", "Surface format was not RGBA (or variants of it). This can potentially cause weird looking images (too bright etc.).");
4145 m_VKSurfFormat = vSurfFormatList[0];
4146 return true;
4147 }
4148
4149 [[nodiscard]] bool CreateSwapChain(VkSwapchainKHR &OldSwapChain)
4150 {
4151 VkSurfaceCapabilitiesKHR VKSurfCap;
4152 if(!GetSurfaceProperties(VKSurfCapabilities&: VKSurfCap))
4153 return false;
4154
4155 VkPresentModeKHR PresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
4156 if(!GetPresentationMode(VKIOMode&: PresentMode))
4157 return false;
4158
4159 uint32_t SwapImgCount = GetNumberOfSwapImages(VKCapabilities: VKSurfCap);
4160
4161 m_VKSwapImgAndViewportExtent = GetSwapImageSize(VKCapabilities: VKSurfCap);
4162
4163 VkImageUsageFlags UsageFlags;
4164 if(!GetImageUsage(VKCapabilities: VKSurfCap, VKOutUsage&: UsageFlags))
4165 return false;
4166
4167 VkSurfaceTransformFlagBitsKHR TransformFlagBits = GetTransform(VKCapabilities: VKSurfCap);
4168
4169 if(!GetFormat())
4170 return false;
4171
4172 OldSwapChain = m_VKSwapChain;
4173
4174 VkSwapchainCreateInfoKHR SwapInfo;
4175 SwapInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
4176 SwapInfo.pNext = nullptr;
4177 SwapInfo.flags = 0;
4178 SwapInfo.surface = m_VKPresentSurface;
4179 SwapInfo.minImageCount = SwapImgCount;
4180 SwapInfo.imageFormat = m_VKSurfFormat.format;
4181 SwapInfo.imageColorSpace = m_VKSurfFormat.colorSpace;
4182 SwapInfo.imageExtent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
4183 SwapInfo.imageArrayLayers = 1;
4184 SwapInfo.imageUsage = UsageFlags;
4185 SwapInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
4186 SwapInfo.queueFamilyIndexCount = 0;
4187 SwapInfo.pQueueFamilyIndices = nullptr;
4188 SwapInfo.preTransform = TransformFlagBits;
4189 SwapInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
4190 SwapInfo.presentMode = PresentMode;
4191 SwapInfo.clipped = true;
4192 SwapInfo.oldSwapchain = OldSwapChain;
4193
4194 m_VKSwapChain = VK_NULL_HANDLE;
4195 VkResult SwapchainCreateRes = vkCreateSwapchainKHR(device: m_VKDevice, pCreateInfo: &SwapInfo, pAllocator: nullptr, pSwapchain: &m_VKSwapChain);
4196 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: SwapchainCreateRes);
4197 if(pCritErrorMsg != nullptr)
4198 {
4199 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the swap chain failed.", pErrStrExtra: pCritErrorMsg);
4200 return false;
4201 }
4202 else if(SwapchainCreateRes == VK_ERROR_NATIVE_WINDOW_IN_USE_KHR)
4203 return false;
4204
4205 return true;
4206 }
4207
4208 void DestroySwapChain(bool ForceDestroy)
4209 {
4210 if(ForceDestroy)
4211 {
4212 vkDestroySwapchainKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pAllocator: nullptr);
4213 m_VKSwapChain = VK_NULL_HANDLE;
4214 }
4215 }
4216
4217 [[nodiscard]] bool GetSwapChainImageHandles()
4218 {
4219 uint32_t ImgCount = 0;
4220 if(vkGetSwapchainImagesKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pSwapchainImageCount: &ImgCount, pSwapchainImages: nullptr) != VK_SUCCESS)
4221 {
4222 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get swap chain images.");
4223 return false;
4224 }
4225
4226 m_SwapChainImageCount = ImgCount;
4227
4228 m_vSwapChainImages.resize(new_size: ImgCount);
4229 if(vkGetSwapchainImagesKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pSwapchainImageCount: &ImgCount, pSwapchainImages: m_vSwapChainImages.data()) != VK_SUCCESS)
4230 {
4231 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get swap chain images.");
4232 return false;
4233 }
4234
4235 return true;
4236 }
4237
4238 void ClearSwapChainImageHandles()
4239 {
4240 m_vSwapChainImages.clear();
4241 }
4242
4243 void GetDeviceQueue()
4244 {
4245 vkGetDeviceQueue(device: m_VKDevice, queueFamilyIndex: m_VKGraphicsQueueIndex, queueIndex: 0, pQueue: &m_VKGraphicsQueue);
4246 vkGetDeviceQueue(device: m_VKDevice, queueFamilyIndex: m_VKGraphicsQueueIndex, queueIndex: 0, pQueue: &m_VKPresentQueue);
4247 }
4248
4249#ifdef VK_EXT_debug_utils
4250 static VKAPI_ATTR VkBool32 VKAPI_CALL VKDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT MessageSeverity, VkDebugUtilsMessageTypeFlagsEXT MessageType, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData)
4251 {
4252 if((MessageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0)
4253 {
4254 log_error("gfx/vulkan", "Validation error: %s", pCallbackData->pMessage);
4255 }
4256 else
4257 {
4258 log_info("gfx/vulkan", "Validation info: %s", pCallbackData->pMessage);
4259 }
4260
4261 return VK_FALSE;
4262 }
4263
4264 VkResult CreateDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pDebugMessenger)
4265 {
4266 auto pfnVulkanCreateDebugUtilsFunction = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance: m_VKInstance, pName: "vkCreateDebugUtilsMessengerEXT");
4267 if(pfnVulkanCreateDebugUtilsFunction != nullptr)
4268 {
4269 return pfnVulkanCreateDebugUtilsFunction(m_VKInstance, pCreateInfo, pAllocator, pDebugMessenger);
4270 }
4271 else
4272 {
4273 return VK_ERROR_EXTENSION_NOT_PRESENT;
4274 }
4275 }
4276
4277 void DestroyDebugUtilsMessengerEXT(VkDebugUtilsMessengerEXT &DebugMessenger)
4278 {
4279 auto pfnVulkanDestroyDebugUtilsFunction = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance: m_VKInstance, pName: "vkDestroyDebugUtilsMessengerEXT");
4280 if(pfnVulkanDestroyDebugUtilsFunction != nullptr)
4281 {
4282 pfnVulkanDestroyDebugUtilsFunction(m_VKInstance, DebugMessenger, nullptr);
4283 }
4284 }
4285#endif
4286
4287 void SetupDebugCallback()
4288 {
4289#ifdef VK_EXT_debug_utils
4290 VkDebugUtilsMessengerCreateInfoEXT CreateInfo = {};
4291 CreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
4292 CreateInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
4293 CreateInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; // | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT <- too annoying
4294 CreateInfo.pfnUserCallback = VKDebugCallback;
4295
4296 if(CreateDebugUtilsMessengerEXT(pCreateInfo: &CreateInfo, pAllocator: nullptr, pDebugMessenger: &m_DebugMessenger) != VK_SUCCESS)
4297 {
4298 m_DebugMessenger = VK_NULL_HANDLE;
4299 log_warn("gfx/vulkan", "Could not find Vulkan debug layer.");
4300 }
4301 else
4302 {
4303 log_info("gfx/vulkan", "Enabled Vulkan debug context.");
4304 }
4305#endif
4306 }
4307
4308 void UnregisterDebugCallback()
4309 {
4310#ifdef VK_EXT_debug_utils
4311 if(m_DebugMessenger != VK_NULL_HANDLE)
4312 DestroyDebugUtilsMessengerEXT(DebugMessenger&: m_DebugMessenger);
4313#endif
4314 }
4315
4316 [[nodiscard]] bool CreateImageViews()
4317 {
4318 m_vSwapChainImageViewList.resize(new_size: m_SwapChainImageCount);
4319
4320 for(size_t i = 0; i < m_SwapChainImageCount; i++)
4321 {
4322 VkImageViewCreateInfo CreateInfo{};
4323 CreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
4324 CreateInfo.image = m_vSwapChainImages[i];
4325 CreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
4326 CreateInfo.format = m_VKSurfFormat.format;
4327 CreateInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
4328 CreateInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
4329 CreateInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
4330 CreateInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
4331 CreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4332 CreateInfo.subresourceRange.baseMipLevel = 0;
4333 CreateInfo.subresourceRange.levelCount = 1;
4334 CreateInfo.subresourceRange.baseArrayLayer = 0;
4335 CreateInfo.subresourceRange.layerCount = 1;
4336
4337 if(vkCreateImageView(device: m_VKDevice, pCreateInfo: &CreateInfo, pAllocator: nullptr, pView: &m_vSwapChainImageViewList[i]) != VK_SUCCESS)
4338 {
4339 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not create image views for the swap chain framebuffers.");
4340 return false;
4341 }
4342 }
4343
4344 return true;
4345 }
4346
4347 void DestroyImageViews()
4348 {
4349 for(auto &ImageView : m_vSwapChainImageViewList)
4350 {
4351 vkDestroyImageView(device: m_VKDevice, imageView: ImageView, pAllocator: nullptr);
4352 }
4353
4354 m_vSwapChainImageViewList.clear();
4355 }
4356
4357 [[nodiscard]] bool CreateMultiSamplerImageAttachments()
4358 {
4359 m_vSwapChainMultiSamplingImages.resize(new_size: m_SwapChainImageCount);
4360 if(HasMultiSampling())
4361 {
4362 for(size_t i = 0; i < m_SwapChainImageCount; ++i)
4363 {
4364 if(!CreateImage(Width: m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width, Height: m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height, Depth: 1, MipMapLevelCount: 1, Format: m_VKSurfFormat.format, Tiling: VK_IMAGE_TILING_OPTIMAL, Image&: m_vSwapChainMultiSamplingImages[i].m_Image, ImageMemory&: m_vSwapChainMultiSamplingImages[i].m_ImgMem, ImageUsage: VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
4365 return false;
4366 m_vSwapChainMultiSamplingImages[i].m_ImgView = CreateImageView(Image: m_vSwapChainMultiSamplingImages[i].m_Image, Format: m_VKSurfFormat.format, ViewType: VK_IMAGE_VIEW_TYPE_2D, Depth: 1, MipMapLevelCount: 1);
4367 }
4368 }
4369
4370 return true;
4371 }
4372
4373 void DestroyMultiSamplerImageAttachments()
4374 {
4375 if(HasMultiSampling())
4376 {
4377 m_vSwapChainMultiSamplingImages.resize(new_size: m_SwapChainImageCount);
4378 for(size_t i = 0; i < m_SwapChainImageCount; ++i)
4379 {
4380 vkDestroyImage(device: m_VKDevice, image: m_vSwapChainMultiSamplingImages[i].m_Image, pAllocator: nullptr);
4381 vkDestroyImageView(device: m_VKDevice, imageView: m_vSwapChainMultiSamplingImages[i].m_ImgView, pAllocator: nullptr);
4382 FreeImageMemBlock(Block&: m_vSwapChainMultiSamplingImages[i].m_ImgMem);
4383 }
4384 }
4385 m_vSwapChainMultiSamplingImages.clear();
4386 }
4387
4388 [[nodiscard]] bool CreateRenderPass(bool ClearAttachments)
4389 {
4390 bool HasMultiSamplingTargets = HasMultiSampling();
4391 VkAttachmentDescription MultiSamplingColorAttachment{};
4392 MultiSamplingColorAttachment.format = m_VKSurfFormat.format;
4393 MultiSamplingColorAttachment.samples = GetSampleCount();
4394 MultiSamplingColorAttachment.loadOp = ClearAttachments ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4395 MultiSamplingColorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4396 MultiSamplingColorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4397 MultiSamplingColorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4398 MultiSamplingColorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4399 MultiSamplingColorAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4400
4401 VkAttachmentDescription ColorAttachment{};
4402 ColorAttachment.format = m_VKSurfFormat.format;
4403 ColorAttachment.samples = VK_SAMPLE_COUNT_1_BIT;
4404 ColorAttachment.loadOp = ClearAttachments && !HasMultiSamplingTargets ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4405 ColorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
4406 ColorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4407 ColorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4408 ColorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4409 ColorAttachment.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
4410
4411 VkAttachmentReference MultiSamplingColorAttachmentRef{};
4412 MultiSamplingColorAttachmentRef.attachment = 0;
4413 MultiSamplingColorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4414
4415 VkAttachmentReference ColorAttachmentRef{};
4416 ColorAttachmentRef.attachment = HasMultiSamplingTargets ? 1 : 0;
4417 ColorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4418
4419 VkSubpassDescription Subpass{};
4420 Subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
4421 Subpass.colorAttachmentCount = 1;
4422 Subpass.pColorAttachments = HasMultiSamplingTargets ? &MultiSamplingColorAttachmentRef : &ColorAttachmentRef;
4423 Subpass.pResolveAttachments = HasMultiSamplingTargets ? &ColorAttachmentRef : nullptr;
4424
4425 std::array<VkAttachmentDescription, 2> aAttachments;
4426 aAttachments[0] = MultiSamplingColorAttachment;
4427 aAttachments[1] = ColorAttachment;
4428
4429 VkSubpassDependency Dependency{};
4430 Dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
4431 Dependency.dstSubpass = 0;
4432 Dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
4433 Dependency.srcAccessMask = 0;
4434 Dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
4435 Dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
4436
4437 VkRenderPassCreateInfo CreateRenderPassInfo{};
4438 CreateRenderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
4439 CreateRenderPassInfo.attachmentCount = HasMultiSamplingTargets ? 2 : 1;
4440 CreateRenderPassInfo.pAttachments = HasMultiSamplingTargets ? aAttachments.data() : aAttachments.data() + 1;
4441 CreateRenderPassInfo.subpassCount = 1;
4442 CreateRenderPassInfo.pSubpasses = &Subpass;
4443 CreateRenderPassInfo.dependencyCount = 1;
4444 CreateRenderPassInfo.pDependencies = &Dependency;
4445
4446 if(vkCreateRenderPass(device: m_VKDevice, pCreateInfo: &CreateRenderPassInfo, pAllocator: nullptr, pRenderPass: &m_VKRenderPass) != VK_SUCCESS)
4447 {
4448 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the render pass failed.");
4449 return false;
4450 }
4451
4452 return true;
4453 }
4454
4455 void DestroyRenderPass()
4456 {
4457 vkDestroyRenderPass(device: m_VKDevice, renderPass: m_VKRenderPass, pAllocator: nullptr);
4458 }
4459
4460 [[nodiscard]] bool CreateFramebuffers()
4461 {
4462 m_vFramebufferList.resize(new_size: m_SwapChainImageCount);
4463
4464 for(size_t i = 0; i < m_SwapChainImageCount; i++)
4465 {
4466 std::array<VkImageView, 2> aAttachments = {
4467 m_vSwapChainMultiSamplingImages[i].m_ImgView,
4468 m_vSwapChainImageViewList[i]};
4469
4470 bool HasMultiSamplingTargets = HasMultiSampling();
4471
4472 VkFramebufferCreateInfo FramebufferInfo{};
4473 FramebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
4474 FramebufferInfo.renderPass = m_VKRenderPass;
4475 FramebufferInfo.attachmentCount = HasMultiSamplingTargets ? aAttachments.size() : aAttachments.size() - 1;
4476 FramebufferInfo.pAttachments = HasMultiSamplingTargets ? aAttachments.data() : aAttachments.data() + 1;
4477 FramebufferInfo.width = m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
4478 FramebufferInfo.height = m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
4479 FramebufferInfo.layers = 1;
4480
4481 if(vkCreateFramebuffer(device: m_VKDevice, pCreateInfo: &FramebufferInfo, pAllocator: nullptr, pFramebuffer: &m_vFramebufferList[i]) != VK_SUCCESS)
4482 {
4483 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the framebuffers failed.");
4484 return false;
4485 }
4486 }
4487
4488 return true;
4489 }
4490
4491 void DestroyFramebuffers()
4492 {
4493 for(auto &FrameBuffer : m_vFramebufferList)
4494 {
4495 vkDestroyFramebuffer(device: m_VKDevice, framebuffer: FrameBuffer, pAllocator: nullptr);
4496 }
4497
4498 m_vFramebufferList.clear();
4499 }
4500
4501 [[nodiscard]] bool CreateShaderModule(const std::vector<uint8_t> &vCode, VkShaderModule &ShaderModule)
4502 {
4503 VkShaderModuleCreateInfo CreateInfo{};
4504 CreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
4505 CreateInfo.codeSize = vCode.size();
4506 CreateInfo.pCode = (const uint32_t *)(vCode.data());
4507
4508 if(vkCreateShaderModule(device: m_VKDevice, pCreateInfo: &CreateInfo, pAllocator: nullptr, pShaderModule: &ShaderModule) != VK_SUCCESS)
4509 {
4510 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Shader module was not created.");
4511 return false;
4512 }
4513
4514 return true;
4515 }
4516
4517 [[nodiscard]] bool CreateDescriptorSetLayouts()
4518 {
4519 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4520 SamplerLayoutBinding.binding = 0;
4521 SamplerLayoutBinding.descriptorCount = 1;
4522 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
4523 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4524 SamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
4525
4526 std::array<VkDescriptorSetLayoutBinding, 1> aBindings = {SamplerLayoutBinding};
4527 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4528 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4529 LayoutInfo.bindingCount = aBindings.size();
4530 LayoutInfo.pBindings = aBindings.data();
4531
4532 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_StandardTexturedDescriptorSetLayout) != VK_SUCCESS)
4533 {
4534 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4535 return false;
4536 }
4537
4538 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_Standard3DTexturedDescriptorSetLayout) != VK_SUCCESS)
4539 {
4540 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4541 return false;
4542 }
4543 return true;
4544 }
4545
4546 void DestroyDescriptorSetLayouts()
4547 {
4548 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_StandardTexturedDescriptorSetLayout, pAllocator: nullptr);
4549 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_Standard3DTexturedDescriptorSetLayout, pAllocator: nullptr);
4550 }
4551
4552 [[nodiscard]] bool LoadShader(const char *pFilename, std::vector<uint8_t> *&pvShaderData)
4553 {
4554 auto ShaderFileIterator = m_ShaderFiles.find(x: pFilename);
4555 if(ShaderFileIterator == m_ShaderFiles.end())
4556 {
4557 void *pShaderBuff;
4558 unsigned FileSize;
4559 if(!m_pStorage->ReadFile(pFilename, Type: IStorage::TYPE_ALL, ppResult: &pShaderBuff, pResultLen: &FileSize))
4560 return false;
4561
4562 std::vector<uint8_t> vShaderBuff;
4563 vShaderBuff.resize(new_size: FileSize);
4564 mem_copy(dest: vShaderBuff.data(), source: pShaderBuff, size: FileSize);
4565 free(ptr: pShaderBuff);
4566
4567 ShaderFileIterator = m_ShaderFiles.insert(x: {pFilename, {.m_vBinary: std::move(vShaderBuff)}}).first;
4568 }
4569
4570 pvShaderData = &ShaderFileIterator->second.m_vBinary;
4571
4572 return true;
4573 }
4574
4575 [[nodiscard]] bool CreateShaders(const char *pVertName, const char *pFragName, VkPipelineShaderStageCreateInfo (&aShaderStages)[2], SShaderModule &ShaderModule)
4576 {
4577 bool ShaderLoaded = true;
4578
4579 std::vector<uint8_t> *pvVertBuff;
4580 std::vector<uint8_t> *pvFragBuff;
4581 ShaderLoaded &= LoadShader(pFilename: pVertName, pvShaderData&: pvVertBuff);
4582 ShaderLoaded &= LoadShader(pFilename: pFragName, pvShaderData&: pvFragBuff);
4583
4584 ShaderModule.m_VKDevice = m_VKDevice;
4585
4586 if(!ShaderLoaded)
4587 {
4588 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "A shader file could not load correctly.");
4589 return false;
4590 }
4591
4592 if(!CreateShaderModule(vCode: *pvVertBuff, ShaderModule&: ShaderModule.m_VertShaderModule))
4593 return false;
4594
4595 if(!CreateShaderModule(vCode: *pvFragBuff, ShaderModule&: ShaderModule.m_FragShaderModule))
4596 return false;
4597
4598 VkPipelineShaderStageCreateInfo &VertShaderStageInfo = aShaderStages[0];
4599 VertShaderStageInfo = {};
4600 VertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
4601 VertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
4602 VertShaderStageInfo.module = ShaderModule.m_VertShaderModule;
4603 VertShaderStageInfo.pName = "main";
4604
4605 VkPipelineShaderStageCreateInfo &FragShaderStageInfo = aShaderStages[1];
4606 FragShaderStageInfo = {};
4607 FragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
4608 FragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
4609 FragShaderStageInfo.module = ShaderModule.m_FragShaderModule;
4610 FragShaderStageInfo.pName = "main";
4611 return true;
4612 }
4613
4614 bool GetStandardPipelineInfo(VkPipelineInputAssemblyStateCreateInfo &InputAssembly,
4615 VkViewport &Viewport,
4616 VkRect2D &Scissor,
4617 VkPipelineViewportStateCreateInfo &ViewportState,
4618 VkPipelineRasterizationStateCreateInfo &Rasterizer,
4619 VkPipelineMultisampleStateCreateInfo &Multisampling,
4620 VkPipelineColorBlendAttachmentState &ColorBlendAttachment,
4621 VkPipelineColorBlendStateCreateInfo &ColorBlending) const
4622 {
4623 InputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
4624 InputAssembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
4625 InputAssembly.primitiveRestartEnable = VK_FALSE;
4626
4627 Viewport.x = 0.0f;
4628 Viewport.y = 0.0f;
4629 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
4630 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
4631 Viewport.minDepth = 0.0f;
4632 Viewport.maxDepth = 1.0f;
4633
4634 Scissor.offset = {.x: 0, .y: 0};
4635 Scissor.extent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
4636
4637 ViewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
4638 ViewportState.viewportCount = 1;
4639 ViewportState.pViewports = &Viewport;
4640 ViewportState.scissorCount = 1;
4641 ViewportState.pScissors = &Scissor;
4642
4643 Rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
4644 Rasterizer.depthClampEnable = VK_FALSE;
4645 Rasterizer.rasterizerDiscardEnable = VK_FALSE;
4646 Rasterizer.polygonMode = VK_POLYGON_MODE_FILL;
4647 Rasterizer.lineWidth = 1.0f;
4648 Rasterizer.cullMode = VK_CULL_MODE_NONE;
4649 Rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE;
4650 Rasterizer.depthBiasEnable = VK_FALSE;
4651
4652 Multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
4653 Multisampling.sampleShadingEnable = VK_FALSE;
4654 Multisampling.rasterizationSamples = GetSampleCount();
4655
4656 ColorBlendAttachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
4657 ColorBlendAttachment.blendEnable = VK_TRUE;
4658
4659 ColorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
4660 ColorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
4661 ColorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD;
4662 ColorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
4663 ColorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
4664 ColorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
4665
4666 ColorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
4667 ColorBlending.logicOpEnable = VK_FALSE;
4668 ColorBlending.logicOp = VK_LOGIC_OP_COPY;
4669 ColorBlending.attachmentCount = 1;
4670 ColorBlending.pAttachments = &ColorBlendAttachment;
4671 ColorBlending.blendConstants[0] = 0.0f;
4672 ColorBlending.blendConstants[1] = 0.0f;
4673 ColorBlending.blendConstants[2] = 0.0f;
4674 ColorBlending.blendConstants[3] = 0.0f;
4675
4676 return true;
4677 }
4678
4679 template<bool ForceRequireDescriptors, size_t ArraySize, size_t DescrArraySize, size_t PushArraySize>
4680 [[nodiscard]] bool CreateGraphicsPipeline(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, uint32_t Stride, std::array<VkVertexInputAttributeDescription, ArraySize> &aInputAttr,
4681 std::array<VkDescriptorSetLayout, DescrArraySize> &aSetLayouts, std::array<VkPushConstantRange, PushArraySize> &aPushConstants, EVulkanBackendTextureModes TexMode,
4682 EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode, bool IsLinePrim = false)
4683 {
4684 VkPipelineShaderStageCreateInfo aShaderStages[2];
4685 SShaderModule Module;
4686 if(!CreateShaders(pVertName, pFragName, aShaderStages, ShaderModule&: Module))
4687 return false;
4688
4689 bool HasSampler = TexMode == VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
4690
4691 VkPipelineVertexInputStateCreateInfo VertexInputInfo{};
4692 VertexInputInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
4693 VkVertexInputBindingDescription BindingDescription{};
4694 BindingDescription.binding = 0;
4695 BindingDescription.stride = Stride;
4696 BindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
4697
4698 VertexInputInfo.vertexBindingDescriptionCount = 1;
4699 VertexInputInfo.vertexAttributeDescriptionCount = aInputAttr.size();
4700 VertexInputInfo.pVertexBindingDescriptions = &BindingDescription;
4701 VertexInputInfo.pVertexAttributeDescriptions = aInputAttr.data();
4702
4703 VkPipelineInputAssemblyStateCreateInfo InputAssembly{};
4704 VkViewport Viewport{};
4705 VkRect2D Scissor{};
4706 VkPipelineViewportStateCreateInfo ViewportState{};
4707 VkPipelineRasterizationStateCreateInfo Rasterizer{};
4708 VkPipelineMultisampleStateCreateInfo Multisampling{};
4709 VkPipelineColorBlendAttachmentState ColorBlendAttachment{};
4710 VkPipelineColorBlendStateCreateInfo ColorBlending{};
4711
4712 GetStandardPipelineInfo(InputAssembly, Viewport, Scissor, ViewportState, Rasterizer, Multisampling, ColorBlendAttachment, ColorBlending);
4713 InputAssembly.topology = IsLinePrim ? VK_PRIMITIVE_TOPOLOGY_LINE_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
4714
4715 VkPipelineLayoutCreateInfo PipelineLayoutInfo{};
4716 PipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
4717 PipelineLayoutInfo.setLayoutCount = (HasSampler || ForceRequireDescriptors) ? aSetLayouts.size() : 0;
4718 PipelineLayoutInfo.pSetLayouts = (HasSampler || ForceRequireDescriptors) && !aSetLayouts.empty() ? aSetLayouts.data() : nullptr;
4719
4720 PipelineLayoutInfo.pushConstantRangeCount = aPushConstants.size();
4721 PipelineLayoutInfo.pPushConstantRanges = !aPushConstants.empty() ? aPushConstants.data() : nullptr;
4722
4723 VkPipelineLayout &PipeLayout = GetPipeLayout(Container&: PipeContainer, IsTextured: HasSampler, BlendModeIndex: size_t(BlendMode), DynamicIndex: size_t(DynamicMode));
4724 VkPipeline &Pipeline = GetPipeline(Container&: PipeContainer, IsTextured: HasSampler, BlendModeIndex: size_t(BlendMode), DynamicIndex: size_t(DynamicMode));
4725
4726 if(vkCreatePipelineLayout(device: m_VKDevice, pCreateInfo: &PipelineLayoutInfo, pAllocator: nullptr, pPipelineLayout: &PipeLayout) != VK_SUCCESS)
4727 {
4728 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating pipeline layout failed.");
4729 return false;
4730 }
4731
4732 VkGraphicsPipelineCreateInfo PipelineInfo{};
4733 PipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
4734 PipelineInfo.stageCount = 2;
4735 PipelineInfo.pStages = aShaderStages;
4736 PipelineInfo.pVertexInputState = &VertexInputInfo;
4737 PipelineInfo.pInputAssemblyState = &InputAssembly;
4738 PipelineInfo.pViewportState = &ViewportState;
4739 PipelineInfo.pRasterizationState = &Rasterizer;
4740 PipelineInfo.pMultisampleState = &Multisampling;
4741 PipelineInfo.pColorBlendState = &ColorBlending;
4742 PipelineInfo.layout = PipeLayout;
4743 PipelineInfo.renderPass = m_VKRenderPass;
4744 PipelineInfo.subpass = 0;
4745 PipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
4746
4747 std::array<VkDynamicState, 2> aDynamicStates = {
4748 VK_DYNAMIC_STATE_VIEWPORT,
4749 VK_DYNAMIC_STATE_SCISSOR,
4750 };
4751
4752 VkPipelineDynamicStateCreateInfo DynamicStateCreate{};
4753 DynamicStateCreate.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
4754 DynamicStateCreate.dynamicStateCount = aDynamicStates.size();
4755 DynamicStateCreate.pDynamicStates = aDynamicStates.data();
4756
4757 if(DynamicMode == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT)
4758 {
4759 PipelineInfo.pDynamicState = &DynamicStateCreate;
4760 }
4761
4762 if(vkCreateGraphicsPipelines(device: m_VKDevice, VK_NULL_HANDLE, createInfoCount: 1, pCreateInfos: &PipelineInfo, pAllocator: nullptr, pPipelines: &Pipeline) != VK_SUCCESS)
4763 {
4764 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the graphic pipeline failed.");
4765 return false;
4766 }
4767
4768 return true;
4769 }
4770
4771 [[nodiscard]] bool CreateStandardGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode, bool IsLinePrim)
4772 {
4773 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4774
4775 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4776 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4777 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4778
4779 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_StandardTexturedDescriptorSetLayout};
4780
4781 std::array<VkPushConstantRange, 1> aPushConstants{};
4782 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGPos)};
4783
4784 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode, IsLinePrim);
4785 }
4786
4787 [[nodiscard]] bool CreateStandardGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler, bool IsLinePipe)
4788 {
4789 bool Ret = true;
4790
4791 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4792
4793 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4794 {
4795 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4796 {
4797 Ret &= CreateStandardGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: IsLinePipe ? m_StandardLinePipeline : m_StandardPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j), IsLinePrim: IsLinePipe);
4798 }
4799 }
4800
4801 return Ret;
4802 }
4803
4804 [[nodiscard]] bool CreateStandard3DGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4805 {
4806 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4807
4808 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4809 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * 2};
4810 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R32G32B32_SFLOAT, .offset: sizeof(float) * 2 + sizeof(uint8_t) * 4};
4811
4812 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_Standard3DTexturedDescriptorSetLayout};
4813
4814 std::array<VkPushConstantRange, 1> aPushConstants{};
4815 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGPos)};
4816
4817 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * 2 + sizeof(uint8_t) * 4 + sizeof(float) * 3, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4818 }
4819
4820 [[nodiscard]] bool CreateStandard3DGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler)
4821 {
4822 bool Ret = true;
4823
4824 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4825
4826 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4827 {
4828 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4829 {
4830 Ret &= CreateStandard3DGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_Standard3DPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4831 }
4832 }
4833
4834 return Ret;
4835 }
4836
4837 [[nodiscard]] bool CreateTextDescriptorSetLayout()
4838 {
4839 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4840 SamplerLayoutBinding.binding = 0;
4841 SamplerLayoutBinding.descriptorCount = 1;
4842 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
4843 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4844 SamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
4845
4846 auto SamplerLayoutBinding2 = SamplerLayoutBinding;
4847 SamplerLayoutBinding2.binding = 1;
4848
4849 std::array<VkDescriptorSetLayoutBinding, 2> aBindings = {SamplerLayoutBinding, SamplerLayoutBinding2};
4850 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4851 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4852 LayoutInfo.bindingCount = aBindings.size();
4853 LayoutInfo.pBindings = aBindings.data();
4854
4855 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_TextDescriptorSetLayout) != VK_SUCCESS)
4856 {
4857 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4858 return false;
4859 }
4860
4861 return true;
4862 }
4863
4864 void DestroyTextDescriptorSetLayout()
4865 {
4866 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_TextDescriptorSetLayout, pAllocator: nullptr);
4867 }
4868
4869 [[nodiscard]] bool CreateTextGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4870 {
4871 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4872 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4873 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4874 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4875
4876 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_TextDescriptorSetLayout};
4877
4878 std::array<VkPushConstantRange, 2> aPushConstants{};
4879 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGTextPos)};
4880 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformGTextPos) + sizeof(SUniformTextGFragmentOffset), .size: sizeof(SUniformTextGFragmentConstants)};
4881
4882 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4883 }
4884
4885 [[nodiscard]] bool CreateTextGraphicsPipeline(const char *pVertName, const char *pFragName)
4886 {
4887 bool Ret = true;
4888
4889 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
4890
4891 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4892 {
4893 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4894 {
4895 Ret &= CreateTextGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_TextPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4896 }
4897 }
4898
4899 return Ret;
4900 }
4901
4902 template<bool HasSampler>
4903 [[nodiscard]] bool CreateTileGraphicsPipelineImpl(const char *pVertName, const char *pFragName, bool IsBorder, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4904 {
4905 std::array<VkVertexInputAttributeDescription, HasSampler ? 2 : 1> aAttributeDescriptions = {};
4906 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32_SFLOAT, 0};
4907 if(HasSampler)
4908 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UINT, sizeof(float) * 2};
4909
4910 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
4911 aSetLayouts[0] = m_Standard3DTexturedDescriptorSetLayout;
4912
4913 uint32_t VertPushConstantSize = sizeof(SUniformTileGPos);
4914 if(IsBorder)
4915 VertPushConstantSize = sizeof(SUniformTileGPosBorder);
4916
4917 uint32_t FragPushConstantSize = sizeof(SUniformTileGVertColor);
4918
4919 std::array<VkPushConstantRange, 2> aPushConstants{};
4920 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
4921 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformTileGPosBorder) + sizeof(SUniformTileGVertColorAlign), .size: FragPushConstantSize};
4922
4923 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, HasSampler ? (sizeof(float) * 2 + sizeof(uint8_t) * 4) : (sizeof(float) * 2), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4924 }
4925
4926 template<bool HasSampler>
4927 [[nodiscard]] bool CreateTileGraphicsPipeline(const char *pVertName, const char *pFragName, bool IsBorder)
4928 {
4929 bool Ret = true;
4930
4931 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4932
4933 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4934 {
4935 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4936 {
4937 Ret &= CreateTileGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, IsBorder, !IsBorder ? m_TilePipeline : m_TileBorderPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
4938 }
4939 }
4940
4941 return Ret;
4942 }
4943
4944 [[nodiscard]] bool CreatePrimExGraphicsPipelineImpl(const char *pVertName, const char *pFragName, bool Rotationless, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4945 {
4946 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4947 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4948 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4949 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4950
4951 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
4952 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
4953 uint32_t VertPushConstantSize = sizeof(SUniformPrimExGPos);
4954 if(Rotationless)
4955 VertPushConstantSize = sizeof(SUniformPrimExGPosRotationless);
4956
4957 uint32_t FragPushConstantSize = sizeof(SUniformPrimExGVertColor);
4958
4959 std::array<VkPushConstantRange, 2> aPushConstants{};
4960 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
4961 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformPrimExGPos) + sizeof(SUniformPrimExGVertColorAlign), .size: FragPushConstantSize};
4962
4963 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4964 }
4965
4966 [[nodiscard]] bool CreatePrimExGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler, bool Rotationless)
4967 {
4968 bool Ret = true;
4969
4970 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4971
4972 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4973 {
4974 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4975 {
4976 Ret &= CreatePrimExGraphicsPipelineImpl(pVertName, pFragName, Rotationless, PipeContainer&: Rotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4977 }
4978 }
4979
4980 return Ret;
4981 }
4982
4983 [[nodiscard]] bool CreateUniformDescriptorSetLayout(VkDescriptorSetLayout &SetLayout, VkShaderStageFlags StageFlags)
4984 {
4985 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4986 SamplerLayoutBinding.binding = 1;
4987 SamplerLayoutBinding.descriptorCount = 1;
4988 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
4989 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4990 SamplerLayoutBinding.stageFlags = StageFlags;
4991
4992 std::array<VkDescriptorSetLayoutBinding, 1> aBindings = {SamplerLayoutBinding};
4993 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4994 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4995 LayoutInfo.bindingCount = aBindings.size();
4996 LayoutInfo.pBindings = aBindings.data();
4997
4998 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &SetLayout) != VK_SUCCESS)
4999 {
5000 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
5001 return false;
5002 }
5003 return true;
5004 }
5005
5006 [[nodiscard]] bool CreateSpriteMultiUniformDescriptorSetLayout()
5007 {
5008 return CreateUniformDescriptorSetLayout(SetLayout&: m_SpriteMultiUniformDescriptorSetLayout, StageFlags: VK_SHADER_STAGE_VERTEX_BIT);
5009 }
5010
5011 [[nodiscard]] bool CreateQuadUniformDescriptorSetLayout()
5012 {
5013 return CreateUniformDescriptorSetLayout(SetLayout&: m_QuadUniformDescriptorSetLayout, StageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
5014 }
5015
5016 void DestroyUniformDescriptorSetLayouts()
5017 {
5018 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_QuadUniformDescriptorSetLayout, pAllocator: nullptr);
5019 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_SpriteMultiUniformDescriptorSetLayout, pAllocator: nullptr);
5020 }
5021
5022 [[nodiscard]] bool CreateUniformDescriptorSets(size_t RenderThreadIndex, VkDescriptorSetLayout &SetLayout, SDeviceDescriptorSet *pSets, size_t SetCount, VkBuffer BindBuffer, size_t SingleBufferInstanceSize, VkDeviceSize MemoryOffset)
5023 {
5024 VkDescriptorPool RetDescr;
5025 if(!GetDescriptorPoolForAlloc(RetDescr, DescriptorPools&: m_vUniformBufferDescrPools[RenderThreadIndex], pSets, AllocNum: SetCount))
5026 return false;
5027 VkDescriptorSetAllocateInfo DesAllocInfo{};
5028 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5029 DesAllocInfo.descriptorSetCount = 1;
5030 DesAllocInfo.pSetLayouts = &SetLayout;
5031 for(size_t i = 0; i < SetCount; ++i)
5032 {
5033 DesAllocInfo.descriptorPool = pSets[i].m_pPools->m_vPools[pSets[i].m_PoolIndex].m_Pool;
5034 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &pSets[i].m_Descriptor) != VK_SUCCESS)
5035 {
5036 return false;
5037 }
5038
5039 VkDescriptorBufferInfo BufferInfo{};
5040 BufferInfo.buffer = BindBuffer;
5041 BufferInfo.offset = MemoryOffset + SingleBufferInstanceSize * i;
5042 BufferInfo.range = SingleBufferInstanceSize;
5043
5044 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5045
5046 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5047 aDescriptorWrites[0].dstSet = pSets[i].m_Descriptor;
5048 aDescriptorWrites[0].dstBinding = 1;
5049 aDescriptorWrites[0].dstArrayElement = 0;
5050 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
5051 aDescriptorWrites[0].descriptorCount = 1;
5052 aDescriptorWrites[0].pBufferInfo = &BufferInfo;
5053
5054 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5055 }
5056
5057 return true;
5058 }
5059
5060 void DestroyUniformDescriptorSets(SDeviceDescriptorSet *pSets, size_t SetCount)
5061 {
5062 for(size_t i = 0; i < SetCount; ++i)
5063 {
5064 vkFreeDescriptorSets(device: m_VKDevice, descriptorPool: pSets[i].m_pPools->m_vPools[pSets[i].m_PoolIndex].m_Pool, descriptorSetCount: 1, pDescriptorSets: &pSets[i].m_Descriptor);
5065 pSets[i].m_Descriptor = VK_NULL_HANDLE;
5066 }
5067 }
5068
5069 [[nodiscard]] bool CreateSpriteMultiGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5070 {
5071 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
5072 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
5073 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
5074 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
5075
5076 std::array<VkDescriptorSetLayout, 2> aSetLayouts;
5077 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5078 aSetLayouts[1] = m_SpriteMultiUniformDescriptorSetLayout;
5079
5080 uint32_t VertPushConstantSize = sizeof(SUniformSpriteMultiGPos);
5081 uint32_t FragPushConstantSize = sizeof(SUniformSpriteMultiGVertColor);
5082
5083 std::array<VkPushConstantRange, 2> aPushConstants{};
5084 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
5085 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformSpriteMultiGPos) + sizeof(SUniformSpriteMultiGVertColorAlign), .size: FragPushConstantSize};
5086
5087 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5088 }
5089
5090 [[nodiscard]] bool CreateSpriteMultiGraphicsPipeline(const char *pVertName, const char *pFragName)
5091 {
5092 bool Ret = true;
5093
5094 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
5095
5096 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5097 {
5098 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5099 {
5100 Ret &= CreateSpriteMultiGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_SpriteMultiPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
5101 }
5102 }
5103
5104 return Ret;
5105 }
5106
5107 [[nodiscard]] bool CreateSpriteMultiPushGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5108 {
5109 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
5110 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
5111 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
5112 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
5113
5114 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
5115 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5116
5117 uint32_t VertPushConstantSize = sizeof(SUniformSpriteMultiPushGPos);
5118 uint32_t FragPushConstantSize = sizeof(SUniformSpriteMultiPushGVertColor);
5119
5120 std::array<VkPushConstantRange, 2> aPushConstants{};
5121 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
5122 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformSpriteMultiPushGPos), .size: FragPushConstantSize};
5123
5124 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5125 }
5126
5127 [[nodiscard]] bool CreateSpriteMultiPushGraphicsPipeline(const char *pVertName, const char *pFragName)
5128 {
5129 bool Ret = true;
5130
5131 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
5132
5133 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5134 {
5135 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5136 {
5137 Ret &= CreateSpriteMultiPushGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_SpriteMultiPushPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
5138 }
5139 }
5140
5141 return Ret;
5142 }
5143
5144 template<bool IsTextured>
5145 [[nodiscard]] bool CreateQuadGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5146 {
5147 std::array<VkVertexInputAttributeDescription, IsTextured ? 3 : 2> aAttributeDescriptions = {};
5148 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32B32A32_SFLOAT, 0};
5149 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UNORM, sizeof(float) * 4};
5150 if(IsTextured)
5151 aAttributeDescriptions[2] = {2, 0, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 4 + sizeof(uint8_t) * 4};
5152
5153 std::array<VkDescriptorSetLayout, IsTextured ? 2 : 1> aSetLayouts;
5154 if(IsTextured)
5155 {
5156 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5157 aSetLayouts[1] = m_QuadUniformDescriptorSetLayout;
5158 }
5159 else
5160 {
5161 aSetLayouts[0] = m_QuadUniformDescriptorSetLayout;
5162 }
5163
5164 uint32_t PushConstantSize = sizeof(SUniformQuadGPos);
5165
5166 std::array<VkPushConstantRange, 1> aPushConstants{};
5167 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: PushConstantSize};
5168
5169 return CreateGraphicsPipeline<true>(pVertName, pFragName, PipeContainer, sizeof(float) * 4 + sizeof(uint8_t) * 4 + (IsTextured ? (sizeof(float) * 2) : 0), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5170 }
5171
5172 template<bool HasSampler>
5173 [[nodiscard]] bool CreateQuadGraphicsPipeline(const char *pVertName, const char *pFragName)
5174 {
5175 bool Ret = true;
5176
5177 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
5178
5179 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5180 {
5181 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5182 {
5183 Ret &= CreateQuadGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, m_QuadPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
5184 }
5185 }
5186
5187 return Ret;
5188 }
5189
5190 template<bool IsTextured>
5191 [[nodiscard]] bool CreateQuadGroupedGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5192 {
5193 std::array<VkVertexInputAttributeDescription, IsTextured ? 3 : 2> aAttributeDescriptions = {};
5194 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32B32A32_SFLOAT, 0};
5195 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UNORM, sizeof(float) * 4};
5196 if(IsTextured)
5197 aAttributeDescriptions[2] = {2, 0, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 4 + sizeof(uint8_t) * 4};
5198
5199 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
5200 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5201
5202 uint32_t PushConstantSize = sizeof(SUniformQuadGroupedGPos);
5203
5204 std::array<VkPushConstantRange, 1> aPushConstants{};
5205 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, .offset: 0, .size: PushConstantSize};
5206
5207 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, sizeof(float) * 4 + sizeof(uint8_t) * 4 + (IsTextured ? (sizeof(float) * 2) : 0), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5208 }
5209
5210 template<bool HasSampler>
5211 [[nodiscard]] bool CreateQuadGroupedGraphicsPipeline(const char *pVertName, const char *pFragName)
5212 {
5213 bool Ret = true;
5214
5215 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
5216
5217 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5218 {
5219 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5220 {
5221 Ret &= CreateQuadGroupedGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, m_QuadGroupedPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
5222 }
5223 }
5224
5225 return Ret;
5226 }
5227
5228 [[nodiscard]] bool CreateCommandPool()
5229 {
5230 VkCommandPoolCreateInfo CreatePoolInfo{};
5231 CreatePoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
5232 CreatePoolInfo.queueFamilyIndex = m_VKGraphicsQueueIndex;
5233 CreatePoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
5234
5235 m_vCommandPools.resize(new_size: m_ThreadCount);
5236 for(size_t i = 0; i < m_ThreadCount; ++i)
5237 {
5238 if(vkCreateCommandPool(device: m_VKDevice, pCreateInfo: &CreatePoolInfo, pAllocator: nullptr, pCommandPool: &m_vCommandPools[i]) != VK_SUCCESS)
5239 {
5240 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the command pool failed.");
5241 return false;
5242 }
5243 }
5244 return true;
5245 }
5246
5247 void DestroyCommandPool()
5248 {
5249 for(size_t i = 0; i < m_ThreadCount; ++i)
5250 {
5251 vkDestroyCommandPool(device: m_VKDevice, commandPool: m_vCommandPools[i], pAllocator: nullptr);
5252 }
5253 }
5254
5255 [[nodiscard]] bool CreateCommandBuffers()
5256 {
5257 m_vMainDrawCommandBuffers.resize(new_size: m_SwapChainImageCount);
5258 if(m_ThreadCount > 1)
5259 {
5260 m_vvThreadDrawCommandBuffers.resize(new_size: m_ThreadCount);
5261 m_vvUsedThreadDrawCommandBuffer.resize(new_size: m_ThreadCount);
5262 m_vHelperThreadDrawCommandBuffers.resize(new_size: m_ThreadCount);
5263 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5264 {
5265 ThreadDrawCommandBuffers.resize(new_size: m_SwapChainImageCount);
5266 }
5267 for(auto &UsedThreadDrawCommandBuffer : m_vvUsedThreadDrawCommandBuffer)
5268 {
5269 UsedThreadDrawCommandBuffer.resize(new_size: m_SwapChainImageCount, x: false);
5270 }
5271 }
5272 m_vMemoryCommandBuffers.resize(new_size: m_SwapChainImageCount);
5273 m_vUsedMemoryCommandBuffer.resize(new_size: m_SwapChainImageCount, x: false);
5274
5275 VkCommandBufferAllocateInfo AllocInfo{};
5276 AllocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
5277 AllocInfo.commandPool = m_vCommandPools[0];
5278 AllocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
5279 AllocInfo.commandBufferCount = (uint32_t)m_vMainDrawCommandBuffers.size();
5280
5281 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: m_vMainDrawCommandBuffers.data()) != VK_SUCCESS)
5282 {
5283 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating command buffers failed.");
5284 return false;
5285 }
5286
5287 AllocInfo.commandBufferCount = (uint32_t)m_vMemoryCommandBuffers.size();
5288
5289 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: m_vMemoryCommandBuffers.data()) != VK_SUCCESS)
5290 {
5291 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating memory command buffers failed.");
5292 return false;
5293 }
5294
5295 if(m_ThreadCount > 1)
5296 {
5297 size_t Count = 0;
5298 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5299 {
5300 AllocInfo.commandPool = m_vCommandPools[Count];
5301 ++Count;
5302 AllocInfo.commandBufferCount = (uint32_t)ThreadDrawCommandBuffers.size();
5303 AllocInfo.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
5304 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: ThreadDrawCommandBuffers.data()) != VK_SUCCESS)
5305 {
5306 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating thread command buffers failed.");
5307 return false;
5308 }
5309 }
5310 }
5311
5312 return true;
5313 }
5314
5315 void DestroyCommandBuffer()
5316 {
5317 if(m_ThreadCount > 1)
5318 {
5319 size_t Count = 0;
5320 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5321 {
5322 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[Count], commandBufferCount: static_cast<uint32_t>(ThreadDrawCommandBuffers.size()), pCommandBuffers: ThreadDrawCommandBuffers.data());
5323 ++Count;
5324 }
5325 }
5326
5327 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[0], commandBufferCount: static_cast<uint32_t>(m_vMemoryCommandBuffers.size()), pCommandBuffers: m_vMemoryCommandBuffers.data());
5328 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[0], commandBufferCount: static_cast<uint32_t>(m_vMainDrawCommandBuffers.size()), pCommandBuffers: m_vMainDrawCommandBuffers.data());
5329
5330 m_vvThreadDrawCommandBuffers.clear();
5331 m_vvUsedThreadDrawCommandBuffer.clear();
5332 m_vHelperThreadDrawCommandBuffers.clear();
5333
5334 m_vMainDrawCommandBuffers.clear();
5335 m_vMemoryCommandBuffers.clear();
5336 m_vUsedMemoryCommandBuffer.clear();
5337 }
5338
5339 [[nodiscard]] bool CreateSyncObjects()
5340 {
5341 auto SyncObjectCount = m_SwapChainImageCount;
5342 m_vQueueSubmitSemaphores.resize(new_size: SyncObjectCount);
5343 m_vBusyAcquireImageSemaphores.resize(new_size: SyncObjectCount);
5344
5345 m_vQueueSubmitFences.resize(new_size: SyncObjectCount);
5346
5347 VkSemaphoreCreateInfo CreateSemaphoreInfo{};
5348 CreateSemaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
5349
5350 VkFenceCreateInfo FenceInfo{};
5351 FenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
5352 FenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
5353
5354 if(vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_AcquireImageSemaphore) != VK_SUCCESS)
5355 {
5356 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating acquire next image semaphore failed.");
5357 return false;
5358 }
5359 for(size_t i = 0; i < SyncObjectCount; i++)
5360 {
5361 if(vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_vQueueSubmitSemaphores[i]) != VK_SUCCESS ||
5362 vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_vBusyAcquireImageSemaphores[i]) != VK_SUCCESS ||
5363 vkCreateFence(device: m_VKDevice, pCreateInfo: &FenceInfo, pAllocator: nullptr, pFence: &m_vQueueSubmitFences[i]) != VK_SUCCESS)
5364 {
5365 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating swap chain sync objects(fences, semaphores) failed.");
5366 return false;
5367 }
5368 }
5369
5370 return true;
5371 }
5372
5373 void DestroySyncObjects()
5374 {
5375 for(size_t i = 0; i < m_vBusyAcquireImageSemaphores.size(); i++)
5376 {
5377 vkDestroySemaphore(device: m_VKDevice, semaphore: m_vBusyAcquireImageSemaphores[i], pAllocator: nullptr);
5378 vkDestroySemaphore(device: m_VKDevice, semaphore: m_vQueueSubmitSemaphores[i], pAllocator: nullptr);
5379 vkDestroyFence(device: m_VKDevice, fence: m_vQueueSubmitFences[i], pAllocator: nullptr);
5380 }
5381 vkDestroySemaphore(device: m_VKDevice, semaphore: m_AcquireImageSemaphore, pAllocator: nullptr);
5382
5383 m_vBusyAcquireImageSemaphores.clear();
5384 m_vQueueSubmitSemaphores.clear();
5385
5386 m_vQueueSubmitFences.clear();
5387 }
5388
5389 void DestroyBufferOfFrame(size_t ImageIndex, SFrameBuffers &Buffer)
5390 {
5391 CleanBufferPair(ImageIndex, Buffer&: Buffer.m_Buffer, BufferMem&: Buffer.m_BufferMem);
5392 }
5393
5394 void DestroyUniBufferOfFrame(size_t ImageIndex, SFrameUniformBuffers &Buffer)
5395 {
5396 CleanBufferPair(ImageIndex, Buffer&: Buffer.m_Buffer, BufferMem&: Buffer.m_BufferMem);
5397 for(auto &DescrSet : Buffer.m_aUniformSets)
5398 {
5399 if(DescrSet.m_Descriptor != VK_NULL_HANDLE)
5400 {
5401 DestroyUniformDescriptorSets(pSets: &DescrSet, SetCount: 1);
5402 }
5403 }
5404 }
5405
5406 /*************
5407 * SWAP CHAIN
5408 **************/
5409
5410 void CleanupVulkanSwapChain(bool ForceSwapChainDestruct)
5411 {
5412 m_StandardPipeline.Destroy(Device&: m_VKDevice);
5413 m_StandardLinePipeline.Destroy(Device&: m_VKDevice);
5414 m_Standard3DPipeline.Destroy(Device&: m_VKDevice);
5415 m_TextPipeline.Destroy(Device&: m_VKDevice);
5416 m_TilePipeline.Destroy(Device&: m_VKDevice);
5417 m_TileBorderPipeline.Destroy(Device&: m_VKDevice);
5418 m_PrimExPipeline.Destroy(Device&: m_VKDevice);
5419 m_PrimExRotationlessPipeline.Destroy(Device&: m_VKDevice);
5420 m_SpriteMultiPipeline.Destroy(Device&: m_VKDevice);
5421 m_SpriteMultiPushPipeline.Destroy(Device&: m_VKDevice);
5422 m_QuadPipeline.Destroy(Device&: m_VKDevice);
5423 m_QuadGroupedPipeline.Destroy(Device&: m_VKDevice);
5424
5425 DestroyFramebuffers();
5426
5427 DestroyRenderPass();
5428
5429 DestroyMultiSamplerImageAttachments();
5430
5431 DestroyImageViews();
5432 ClearSwapChainImageHandles();
5433
5434 DestroySwapChain(ForceDestroy: ForceSwapChainDestruct);
5435
5436 m_SwapchainCreated = false;
5437 }
5438
5439 template<bool IsLastCleanup>
5440 void CleanupVulkan(size_t SwapchainCount)
5441 {
5442 if(IsLastCleanup)
5443 {
5444 if(m_SwapchainCreated)
5445 CleanupVulkanSwapChain(ForceSwapChainDestruct: true);
5446
5447 // clean all images, buffers, buffer containers
5448 for(auto &Texture : m_vTextures)
5449 {
5450 if(Texture.m_VKTextDescrSet.m_Descriptor != VK_NULL_HANDLE && IsVerbose())
5451 {
5452 log_warn("gfx/vulkan", "Text textures were not cleared over command.");
5453 }
5454 DestroyTexture(Texture);
5455 }
5456
5457 for(auto &BufferObject : m_vBufferObjects)
5458 {
5459 if(!BufferObject.m_IsStreamedBuffer)
5460 FreeVertexMemBlock(Block&: BufferObject.m_BufferObject.m_Mem);
5461 }
5462
5463 m_vBufferContainers.clear();
5464 }
5465
5466 m_vImageLastFrameCheck.clear();
5467
5468 m_vLastPipeline.clear();
5469
5470 for(size_t i = 0; i < m_ThreadCount; ++i)
5471 {
5472 m_vStreamedVertexBuffers[i].Destroy(DestroyBuffer: [&](size_t ImageIndex, SFrameBuffers &Buffer) { DestroyBufferOfFrame(ImageIndex, Buffer); });
5473 m_vStreamedUniformBuffers[i].Destroy(DestroyBuffer: [&](size_t ImageIndex, SFrameUniformBuffers &Buffer) { DestroyUniBufferOfFrame(ImageIndex, Buffer); });
5474 }
5475 m_vStreamedVertexBuffers.clear();
5476 m_vStreamedUniformBuffers.clear();
5477
5478 for(size_t i = 0; i < SwapchainCount; ++i)
5479 {
5480 ClearFrameData(FrameImageIndex: i);
5481 }
5482
5483 m_vvFrameDelayedBufferCleanup.clear();
5484 m_vvFrameDelayedTextureCleanup.clear();
5485 m_vvFrameDelayedTextTexturesCleanup.clear();
5486
5487 m_StagingBufferCache.DestroyFrameData(ImageCount: SwapchainCount);
5488 m_StagingBufferCacheImage.DestroyFrameData(ImageCount: SwapchainCount);
5489 m_VertexBufferCache.DestroyFrameData(ImageCount: SwapchainCount);
5490 for(auto &ImageBufferCache : m_ImageBufferCaches)
5491 ImageBufferCache.second.DestroyFrameData(ImageCount: SwapchainCount);
5492
5493 if(IsLastCleanup)
5494 {
5495 m_StagingBufferCache.Destroy(Device&: m_VKDevice);
5496 m_StagingBufferCacheImage.Destroy(Device&: m_VKDevice);
5497 m_VertexBufferCache.Destroy(Device&: m_VKDevice);
5498 for(auto &ImageBufferCache : m_ImageBufferCaches)
5499 ImageBufferCache.second.Destroy(Device&: m_VKDevice);
5500
5501 m_ImageBufferCaches.clear();
5502
5503 DestroyTextureSamplers();
5504 DestroyDescriptorPools();
5505
5506 DeletePresentedImageDataImage();
5507 }
5508
5509 DestroySyncObjects();
5510 DestroyCommandBuffer();
5511
5512 if(IsLastCleanup)
5513 {
5514 DestroyCommandPool();
5515 }
5516
5517 if(IsLastCleanup)
5518 {
5519 DestroyUniformDescriptorSetLayouts();
5520 DestroyTextDescriptorSetLayout();
5521 DestroyDescriptorSetLayouts();
5522 }
5523 }
5524
5525 void CleanupVulkanSDL()
5526 {
5527 if(m_VKInstance != VK_NULL_HANDLE)
5528 {
5529 DestroySurface();
5530 vkDestroyDevice(device: m_VKDevice, pAllocator: nullptr);
5531
5532 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
5533 {
5534 UnregisterDebugCallback();
5535 }
5536 vkDestroyInstance(instance: m_VKInstance, pAllocator: nullptr);
5537 m_VKInstance = VK_NULL_HANDLE;
5538 }
5539 }
5540
5541 int RecreateSwapChain()
5542 {
5543 int Ret = 0;
5544 vkDeviceWaitIdle(device: m_VKDevice);
5545
5546 if(IsVerbose())
5547 {
5548 log_info("gfx/vulkan", "Recreating swap chain.");
5549 }
5550
5551 VkSwapchainKHR OldSwapChain = VK_NULL_HANDLE;
5552 uint32_t OldSwapChainImageCount = m_SwapChainImageCount;
5553
5554 if(m_SwapchainCreated)
5555 CleanupVulkanSwapChain(ForceSwapChainDestruct: false);
5556
5557 // set new multi sampling if it was requested
5558 if(m_NextMultiSamplingCount != std::numeric_limits<uint32_t>::max())
5559 {
5560 m_MultiSamplingCount = m_NextMultiSamplingCount;
5561 m_NextMultiSamplingCount = std::numeric_limits<uint32_t>::max();
5562 }
5563
5564 if(!m_SwapchainCreated)
5565 Ret = InitVulkanSwapChain(OldSwapChain);
5566
5567 if(OldSwapChainImageCount != m_SwapChainImageCount)
5568 {
5569 CleanupVulkan<false>(SwapchainCount: OldSwapChainImageCount);
5570 InitVulkan<false>();
5571 }
5572
5573 if(OldSwapChain != VK_NULL_HANDLE)
5574 {
5575 vkDestroySwapchainKHR(device: m_VKDevice, swapchain: OldSwapChain, pAllocator: nullptr);
5576 }
5577
5578 if(Ret != 0 && IsVerbose())
5579 {
5580 log_warn("gfx/vulkan", "Recreating swap chain failed.");
5581 }
5582
5583 return Ret;
5584 }
5585
5586 int InitVulkanSDL(SDL_Window *pWindow, uint32_t CanvasWidth, uint32_t CanvasHeight, char *pRendererString, char *pVendorString, char *pVersionString)
5587 {
5588 std::vector<std::string> vVKExtensions;
5589 std::vector<std::string> vVKLayers;
5590
5591 m_CanvasWidth = CanvasWidth;
5592 m_CanvasHeight = CanvasHeight;
5593
5594 if(!GetVulkanExtensions(pWindow, vVKExtensions))
5595 return -1;
5596
5597 if(!GetVulkanLayers(vVKLayers))
5598 return -1;
5599
5600 if(!CreateVulkanInstance(vVKLayers, vVKExtensions, TryDebugExtensions: true))
5601 return -1;
5602
5603 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
5604 {
5605 SetupDebugCallback();
5606
5607 for(auto &VKLayer : vVKLayers)
5608 {
5609 log_info("gfx/vulkan", "Validation layer: %s", VKLayer.c_str());
5610 }
5611 }
5612
5613 if(!SelectGpu(pRendererName: pRendererString, pVendorName: pVendorString, pVersionName: pVersionString))
5614 return -1;
5615
5616 if(!CreateLogicalDevice(vVKLayers))
5617 return -1;
5618
5619 GetDeviceQueue();
5620
5621 if(!CreateSurface(pWindow))
5622 return -1;
5623
5624 return 0;
5625 }
5626
5627 /************************
5628 * MEMORY MANAGEMENT
5629 ************************/
5630
5631 uint32_t FindMemoryType(VkPhysicalDevice PhyDevice, uint32_t TypeFilter, VkMemoryPropertyFlags Properties)
5632 {
5633 VkPhysicalDeviceMemoryProperties MemProperties;
5634 vkGetPhysicalDeviceMemoryProperties(physicalDevice: PhyDevice, pMemoryProperties: &MemProperties);
5635
5636 for(uint32_t i = 0; i < MemProperties.memoryTypeCount; i++)
5637 {
5638 if((TypeFilter & (1 << i)) && (MemProperties.memoryTypes[i].propertyFlags & Properties) == Properties)
5639 {
5640 return i;
5641 }
5642 }
5643
5644 return 0;
5645 }
5646
5647 [[nodiscard]] bool CreateBuffer(VkDeviceSize BufferSize, EMemoryBlockUsage MemUsage, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags MemoryProperties, VkBuffer &VKBuffer, SDeviceMemoryBlock &VKBufferMemory)
5648 {
5649 VkBufferCreateInfo BufferInfo{};
5650 BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5651 BufferInfo.size = BufferSize;
5652 BufferInfo.usage = BufferUsage;
5653 BufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
5654
5655 if(vkCreateBuffer(device: m_VKDevice, pCreateInfo: &BufferInfo, pAllocator: nullptr, pBuffer: &VKBuffer) != VK_SUCCESS)
5656 {
5657 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Buffer creation failed.");
5658 return false;
5659 }
5660
5661 VkMemoryRequirements MemRequirements;
5662 vkGetBufferMemoryRequirements(device: m_VKDevice, buffer: VKBuffer, pMemoryRequirements: &MemRequirements);
5663
5664 VkMemoryAllocateInfo MemAllocInfo{};
5665 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
5666 MemAllocInfo.allocationSize = MemRequirements.size;
5667 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: MemRequirements.memoryTypeBits, Properties: MemoryProperties);
5668
5669 VKBufferMemory.m_Size = MemRequirements.size;
5670
5671 if(MemUsage == MEMORY_BLOCK_USAGE_BUFFER)
5672 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5673 else if(MemUsage == MEMORY_BLOCK_USAGE_STAGING)
5674 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5675 else if(MemUsage == MEMORY_BLOCK_USAGE_STREAM)
5676 m_pStreamMemoryUsage->store(i: m_pStreamMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5677
5678 if(IsVerbose())
5679 {
5680 VerboseAllocatedMemory(Size: MemRequirements.size, FrameImageIndex: m_CurImageIndex, MemUsage);
5681 }
5682
5683 if(!AllocateVulkanMemory(pAllocateInfo: &MemAllocInfo, pMemory: &VKBufferMemory.m_Mem))
5684 {
5685 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Allocation for buffer object failed.");
5686 return false;
5687 }
5688
5689 VKBufferMemory.m_UsageType = MemUsage;
5690
5691 if(vkBindBufferMemory(device: m_VKDevice, buffer: VKBuffer, memory: VKBufferMemory.m_Mem, memoryOffset: 0) != VK_SUCCESS)
5692 {
5693 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Binding memory to buffer failed.");
5694 return false;
5695 }
5696
5697 return true;
5698 }
5699
5700 [[nodiscard]] bool AllocateDescriptorPool(SDeviceDescriptorPools &DescriptorPools, size_t AllocPoolSize)
5701 {
5702 SDeviceDescriptorPool NewPool;
5703 NewPool.m_Size = AllocPoolSize;
5704
5705 VkDescriptorPoolSize PoolSize{};
5706 if(DescriptorPools.m_IsUniformPool)
5707 PoolSize.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
5708 else
5709 PoolSize.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5710 PoolSize.descriptorCount = AllocPoolSize;
5711
5712 VkDescriptorPoolCreateInfo PoolInfo{};
5713 PoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
5714 PoolInfo.poolSizeCount = 1;
5715 PoolInfo.pPoolSizes = &PoolSize;
5716 PoolInfo.maxSets = AllocPoolSize;
5717 PoolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
5718
5719 if(vkCreateDescriptorPool(device: m_VKDevice, pCreateInfo: &PoolInfo, pAllocator: nullptr, pDescriptorPool: &NewPool.m_Pool) != VK_SUCCESS)
5720 {
5721 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the descriptor pool failed.");
5722 return false;
5723 }
5724
5725 DescriptorPools.m_vPools.push_back(x: NewPool);
5726
5727 return true;
5728 }
5729
5730 [[nodiscard]] bool CreateDescriptorPools()
5731 {
5732 m_StandardTextureDescrPool.m_IsUniformPool = false;
5733 m_StandardTextureDescrPool.m_DefaultAllocSize = 1024;
5734 m_TextTextureDescrPool.m_IsUniformPool = false;
5735 m_TextTextureDescrPool.m_DefaultAllocSize = 8;
5736
5737 m_vUniformBufferDescrPools.resize(new_size: m_ThreadCount);
5738 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5739 {
5740 UniformBufferDescrPool.m_IsUniformPool = true;
5741 UniformBufferDescrPool.m_DefaultAllocSize = 512;
5742 }
5743
5744 bool Ret = AllocateDescriptorPool(DescriptorPools&: m_StandardTextureDescrPool, AllocPoolSize: CCommandBuffer::MAX_TEXTURES);
5745 Ret |= AllocateDescriptorPool(DescriptorPools&: m_TextTextureDescrPool, AllocPoolSize: 8);
5746
5747 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5748 {
5749 Ret |= AllocateDescriptorPool(DescriptorPools&: UniformBufferDescrPool, AllocPoolSize: 64);
5750 }
5751
5752 return Ret;
5753 }
5754
5755 void DestroyDescriptorPools()
5756 {
5757 for(auto &DescrPool : m_StandardTextureDescrPool.m_vPools)
5758 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5759 for(auto &DescrPool : m_TextTextureDescrPool.m_vPools)
5760 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5761
5762 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5763 {
5764 for(auto &DescrPool : UniformBufferDescrPool.m_vPools)
5765 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5766 }
5767 m_vUniformBufferDescrPools.clear();
5768 }
5769
5770 [[nodiscard]] bool GetDescriptorPoolForAlloc(VkDescriptorPool &RetDescr, SDeviceDescriptorPools &DescriptorPools, SDeviceDescriptorSet *pSets, size_t AllocNum)
5771 {
5772 size_t CurAllocNum = AllocNum;
5773 size_t CurAllocOffset = 0;
5774 RetDescr = VK_NULL_HANDLE;
5775
5776 while(CurAllocNum > 0)
5777 {
5778 size_t AllocatedInThisRun = 0;
5779
5780 bool Found = false;
5781 size_t DescriptorPoolIndex = std::numeric_limits<size_t>::max();
5782 for(size_t i = 0; i < DescriptorPools.m_vPools.size(); ++i)
5783 {
5784 auto &Pool = DescriptorPools.m_vPools[i];
5785 if(Pool.m_CurSize + CurAllocNum < Pool.m_Size)
5786 {
5787 AllocatedInThisRun = CurAllocNum;
5788 Pool.m_CurSize += CurAllocNum;
5789 Found = true;
5790 if(RetDescr == VK_NULL_HANDLE)
5791 RetDescr = Pool.m_Pool;
5792 DescriptorPoolIndex = i;
5793 break;
5794 }
5795 else
5796 {
5797 size_t RemainingPoolCount = Pool.m_Size - Pool.m_CurSize;
5798 if(RemainingPoolCount > 0)
5799 {
5800 AllocatedInThisRun = RemainingPoolCount;
5801 Pool.m_CurSize += RemainingPoolCount;
5802 Found = true;
5803 if(RetDescr == VK_NULL_HANDLE)
5804 RetDescr = Pool.m_Pool;
5805 DescriptorPoolIndex = i;
5806 break;
5807 }
5808 }
5809 }
5810
5811 if(!Found)
5812 {
5813 DescriptorPoolIndex = DescriptorPools.m_vPools.size();
5814
5815 if(!AllocateDescriptorPool(DescriptorPools, AllocPoolSize: DescriptorPools.m_DefaultAllocSize))
5816 return false;
5817
5818 AllocatedInThisRun = minimum(a: (size_t)DescriptorPools.m_DefaultAllocSize, b: CurAllocNum);
5819
5820 auto &Pool = DescriptorPools.m_vPools.back();
5821 Pool.m_CurSize += AllocatedInThisRun;
5822 if(RetDescr == VK_NULL_HANDLE)
5823 RetDescr = Pool.m_Pool;
5824 }
5825
5826 for(size_t i = CurAllocOffset; i < CurAllocOffset + AllocatedInThisRun; ++i)
5827 {
5828 pSets[i].m_pPools = &DescriptorPools;
5829 pSets[i].m_PoolIndex = DescriptorPoolIndex;
5830 }
5831 CurAllocOffset += AllocatedInThisRun;
5832 CurAllocNum -= AllocatedInThisRun;
5833 }
5834
5835 return true;
5836 }
5837
5838 void FreeDescriptorSetFromPool(SDeviceDescriptorSet &DescrSet)
5839 {
5840 if(DescrSet.m_PoolIndex != std::numeric_limits<size_t>::max())
5841 {
5842 vkFreeDescriptorSets(device: m_VKDevice, descriptorPool: DescrSet.m_pPools->m_vPools[DescrSet.m_PoolIndex].m_Pool, descriptorSetCount: 1, pDescriptorSets: &DescrSet.m_Descriptor);
5843 DescrSet.m_pPools->m_vPools[DescrSet.m_PoolIndex].m_CurSize -= 1;
5844 }
5845 }
5846
5847 [[nodiscard]] bool CreateNewTexturedStandardDescriptorSets(size_t TextureSlot, size_t DescrIndex)
5848 {
5849 auto &Texture = m_vTextures[TextureSlot];
5850
5851 auto &DescrSet = Texture.m_aVKStandardTexturedDescrSets[DescrIndex];
5852
5853 VkDescriptorSetAllocateInfo DesAllocInfo{};
5854 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5855 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_StandardTextureDescrPool, pSets: &DescrSet, AllocNum: 1))
5856 return false;
5857 DesAllocInfo.descriptorSetCount = 1;
5858 DesAllocInfo.pSetLayouts = &m_StandardTexturedDescriptorSetLayout;
5859
5860 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSet.m_Descriptor) != VK_SUCCESS)
5861 {
5862 return false;
5863 }
5864
5865 VkDescriptorImageInfo ImageInfo{};
5866 ImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5867 ImageInfo.imageView = Texture.m_ImgView;
5868 ImageInfo.sampler = Texture.m_aSamplers[DescrIndex];
5869
5870 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5871
5872 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5873 aDescriptorWrites[0].dstSet = DescrSet.m_Descriptor;
5874 aDescriptorWrites[0].dstBinding = 0;
5875 aDescriptorWrites[0].dstArrayElement = 0;
5876 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5877 aDescriptorWrites[0].descriptorCount = 1;
5878 aDescriptorWrites[0].pImageInfo = &ImageInfo;
5879
5880 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5881
5882 return true;
5883 }
5884
5885 void DestroyTexturedStandardDescriptorSets(CTexture &Texture, size_t DescrIndex)
5886 {
5887 auto &DescrSet = Texture.m_aVKStandardTexturedDescrSets[DescrIndex];
5888 FreeDescriptorSetFromPool(DescrSet);
5889 DescrSet = {};
5890 }
5891
5892 [[nodiscard]] bool CreateNew3DTexturedStandardDescriptorSets(size_t TextureSlot)
5893 {
5894 auto &Texture = m_vTextures[TextureSlot];
5895
5896 auto &DescrSet = Texture.m_VKStandard3DTexturedDescrSet;
5897
5898 VkDescriptorSetAllocateInfo DesAllocInfo{};
5899 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5900 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_StandardTextureDescrPool, pSets: &DescrSet, AllocNum: 1))
5901 return false;
5902 DesAllocInfo.descriptorSetCount = 1;
5903 DesAllocInfo.pSetLayouts = &m_Standard3DTexturedDescriptorSetLayout;
5904
5905 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSet.m_Descriptor) != VK_SUCCESS)
5906 {
5907 return false;
5908 }
5909
5910 VkDescriptorImageInfo ImageInfo{};
5911 ImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5912 ImageInfo.imageView = Texture.m_Img3DView;
5913 ImageInfo.sampler = Texture.m_Sampler3D;
5914
5915 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5916
5917 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5918 aDescriptorWrites[0].dstSet = DescrSet.m_Descriptor;
5919 aDescriptorWrites[0].dstBinding = 0;
5920 aDescriptorWrites[0].dstArrayElement = 0;
5921 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5922 aDescriptorWrites[0].descriptorCount = 1;
5923 aDescriptorWrites[0].pImageInfo = &ImageInfo;
5924
5925 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5926
5927 return true;
5928 }
5929
5930 void DestroyTextured3DStandardDescriptorSets(CTexture &Texture)
5931 {
5932 auto &DescrSet = Texture.m_VKStandard3DTexturedDescrSet;
5933 FreeDescriptorSetFromPool(DescrSet);
5934 }
5935
5936 [[nodiscard]] bool CreateNewTextDescriptorSets(size_t Texture, size_t TextureOutline)
5937 {
5938 auto &TextureText = m_vTextures[Texture];
5939 auto &TextureTextOutline = m_vTextures[TextureOutline];
5940 auto &DescrSetText = TextureText.m_VKTextDescrSet;
5941
5942 VkDescriptorSetAllocateInfo DesAllocInfo{};
5943 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5944 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_TextTextureDescrPool, pSets: &DescrSetText, AllocNum: 1))
5945 return false;
5946 DesAllocInfo.descriptorSetCount = 1;
5947 DesAllocInfo.pSetLayouts = &m_TextDescriptorSetLayout;
5948
5949 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSetText.m_Descriptor) != VK_SUCCESS)
5950 {
5951 return false;
5952 }
5953
5954 std::array<VkDescriptorImageInfo, 2> aImageInfo{};
5955 aImageInfo[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5956 aImageInfo[0].imageView = TextureText.m_ImgView;
5957 aImageInfo[0].sampler = TextureText.m_aSamplers[0];
5958 aImageInfo[1].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5959 aImageInfo[1].imageView = TextureTextOutline.m_ImgView;
5960 aImageInfo[1].sampler = TextureTextOutline.m_aSamplers[0];
5961
5962 std::array<VkWriteDescriptorSet, 2> aDescriptorWrites{};
5963
5964 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5965 aDescriptorWrites[0].dstSet = DescrSetText.m_Descriptor;
5966 aDescriptorWrites[0].dstBinding = 0;
5967 aDescriptorWrites[0].dstArrayElement = 0;
5968 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5969 aDescriptorWrites[0].descriptorCount = 1;
5970 aDescriptorWrites[0].pImageInfo = aImageInfo.data();
5971 aDescriptorWrites[1] = aDescriptorWrites[0];
5972 aDescriptorWrites[1].dstBinding = 1;
5973 aDescriptorWrites[1].pImageInfo = &aImageInfo[1];
5974
5975 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5976
5977 return true;
5978 }
5979
5980 void DestroyTextDescriptorSets(CTexture &Texture, CTexture &TextureOutline)
5981 {
5982 auto &DescrSet = Texture.m_VKTextDescrSet;
5983 FreeDescriptorSetFromPool(DescrSet);
5984 }
5985
5986 [[nodiscard]] bool HasMultiSampling() const
5987 {
5988 return GetSampleCount() != VK_SAMPLE_COUNT_1_BIT;
5989 }
5990
5991 VkSampleCountFlagBits GetMaxSampleCount() const
5992 {
5993 if(m_MaxMultiSample & VK_SAMPLE_COUNT_64_BIT)
5994 return VK_SAMPLE_COUNT_64_BIT;
5995 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_32_BIT)
5996 return VK_SAMPLE_COUNT_32_BIT;
5997 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_16_BIT)
5998 return VK_SAMPLE_COUNT_16_BIT;
5999 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_8_BIT)
6000 return VK_SAMPLE_COUNT_8_BIT;
6001 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_4_BIT)
6002 return VK_SAMPLE_COUNT_4_BIT;
6003 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_2_BIT)
6004 return VK_SAMPLE_COUNT_2_BIT;
6005
6006 return VK_SAMPLE_COUNT_1_BIT;
6007 }
6008
6009 VkSampleCountFlagBits GetSampleCount() const
6010 {
6011 auto MaxSampleCount = GetMaxSampleCount();
6012 if(m_MultiSamplingCount >= 64 && MaxSampleCount >= VK_SAMPLE_COUNT_64_BIT)
6013 return VK_SAMPLE_COUNT_64_BIT;
6014 else if(m_MultiSamplingCount >= 32 && MaxSampleCount >= VK_SAMPLE_COUNT_32_BIT)
6015 return VK_SAMPLE_COUNT_32_BIT;
6016 else if(m_MultiSamplingCount >= 16 && MaxSampleCount >= VK_SAMPLE_COUNT_16_BIT)
6017 return VK_SAMPLE_COUNT_16_BIT;
6018 else if(m_MultiSamplingCount >= 8 && MaxSampleCount >= VK_SAMPLE_COUNT_8_BIT)
6019 return VK_SAMPLE_COUNT_8_BIT;
6020 else if(m_MultiSamplingCount >= 4 && MaxSampleCount >= VK_SAMPLE_COUNT_4_BIT)
6021 return VK_SAMPLE_COUNT_4_BIT;
6022 else if(m_MultiSamplingCount >= 2 && MaxSampleCount >= VK_SAMPLE_COUNT_2_BIT)
6023 return VK_SAMPLE_COUNT_2_BIT;
6024
6025 return VK_SAMPLE_COUNT_1_BIT;
6026 }
6027
6028 int InitVulkanSwapChain(VkSwapchainKHR &OldSwapChain)
6029 {
6030 OldSwapChain = VK_NULL_HANDLE;
6031 if(!CreateSwapChain(OldSwapChain))
6032 return -1;
6033
6034 if(!GetSwapChainImageHandles())
6035 return -1;
6036
6037 if(!CreateImageViews())
6038 return -1;
6039
6040 if(!CreateMultiSamplerImageAttachments())
6041 {
6042 return -1;
6043 }
6044
6045 m_LastPresentedSwapChainImageIndex = std::numeric_limits<decltype(m_LastPresentedSwapChainImageIndex)>::max();
6046
6047 if(!CreateRenderPass(ClearAttachments: true))
6048 return -1;
6049
6050 if(!CreateFramebuffers())
6051 return -1;
6052
6053 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim.vert.spv", pFragName: "shader/vulkan/prim.frag.spv", HasSampler: false, IsLinePipe: false))
6054 return -1;
6055
6056 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim_textured.vert.spv", pFragName: "shader/vulkan/prim_textured.frag.spv", HasSampler: true, IsLinePipe: false))
6057 return -1;
6058
6059 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim.vert.spv", pFragName: "shader/vulkan/prim.frag.spv", HasSampler: false, IsLinePipe: true))
6060 return -1;
6061
6062 if(!CreateStandard3DGraphicsPipeline(pVertName: "shader/vulkan/prim3d.vert.spv", pFragName: "shader/vulkan/prim3d.frag.spv", HasSampler: false))
6063 return -1;
6064
6065 if(!CreateStandard3DGraphicsPipeline(pVertName: "shader/vulkan/prim3d_textured.vert.spv", pFragName: "shader/vulkan/prim3d_textured.frag.spv", HasSampler: true))
6066 return -1;
6067
6068 if(!CreateTextGraphicsPipeline(pVertName: "shader/vulkan/text.vert.spv", pFragName: "shader/vulkan/text.frag.spv"))
6069 return -1;
6070
6071 if(!CreateTileGraphicsPipeline<false>(pVertName: "shader/vulkan/tile.vert.spv", pFragName: "shader/vulkan/tile.frag.spv", IsBorder: false))
6072 return -1;
6073
6074 if(!CreateTileGraphicsPipeline<true>(pVertName: "shader/vulkan/tile_textured.vert.spv", pFragName: "shader/vulkan/tile_textured.frag.spv", IsBorder: false))
6075 return -1;
6076
6077 if(!CreateTileGraphicsPipeline<false>(pVertName: "shader/vulkan/tile_border.vert.spv", pFragName: "shader/vulkan/tile_border.frag.spv", IsBorder: true))
6078 return -1;
6079
6080 if(!CreateTileGraphicsPipeline<true>(pVertName: "shader/vulkan/tile_border_textured.vert.spv", pFragName: "shader/vulkan/tile_border_textured.frag.spv", IsBorder: true))
6081 return -1;
6082
6083 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_rotationless.vert.spv", pFragName: "shader/vulkan/primex_rotationless.frag.spv", HasSampler: false, Rotationless: true))
6084 return -1;
6085
6086 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_tex_rotationless.vert.spv", pFragName: "shader/vulkan/primex_tex_rotationless.frag.spv", HasSampler: true, Rotationless: true))
6087 return -1;
6088
6089 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex.vert.spv", pFragName: "shader/vulkan/primex.frag.spv", HasSampler: false, Rotationless: false))
6090 return -1;
6091
6092 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_tex.vert.spv", pFragName: "shader/vulkan/primex_tex.frag.spv", HasSampler: true, Rotationless: false))
6093 return -1;
6094
6095 if(!CreateSpriteMultiGraphicsPipeline(pVertName: "shader/vulkan/spritemulti.vert.spv", pFragName: "shader/vulkan/spritemulti.frag.spv"))
6096 return -1;
6097
6098 if(!CreateSpriteMultiPushGraphicsPipeline(pVertName: "shader/vulkan/spritemulti_push.vert.spv", pFragName: "shader/vulkan/spritemulti_push.frag.spv"))
6099 return -1;
6100
6101 if(!CreateQuadGraphicsPipeline<false>(pVertName: "shader/vulkan/quad.vert.spv", pFragName: "shader/vulkan/quad.frag.spv"))
6102 return -1;
6103
6104 if(!CreateQuadGraphicsPipeline<true>(pVertName: "shader/vulkan/quad_textured.vert.spv", pFragName: "shader/vulkan/quad_textured.frag.spv"))
6105 return -1;
6106
6107 if(!CreateQuadGroupedGraphicsPipeline<false>(pVertName: "shader/vulkan/quad_grouped.vert.spv", pFragName: "shader/vulkan/quad_grouped.frag.spv"))
6108 return -1;
6109
6110 if(!CreateQuadGroupedGraphicsPipeline<true>(pVertName: "shader/vulkan/quad_grouped_textured.vert.spv", pFragName: "shader/vulkan/quad_grouped_textured.frag.spv"))
6111 return -1;
6112
6113 m_SwapchainCreated = true;
6114 return 0;
6115 }
6116
6117 template<bool IsFirstInitialization>
6118 int InitVulkan()
6119 {
6120 if(IsFirstInitialization)
6121 {
6122 if(!CreateDescriptorSetLayouts())
6123 return -1;
6124
6125 if(!CreateTextDescriptorSetLayout())
6126 return -1;
6127
6128 if(!CreateSpriteMultiUniformDescriptorSetLayout())
6129 return -1;
6130
6131 if(!CreateQuadUniformDescriptorSetLayout())
6132 return -1;
6133
6134 VkSwapchainKHR OldSwapChain = VK_NULL_HANDLE;
6135 if(InitVulkanSwapChain(OldSwapChain) != 0)
6136 return -1;
6137 }
6138
6139 if(IsFirstInitialization)
6140 {
6141 if(!CreateCommandPool())
6142 return -1;
6143 }
6144
6145 if(!CreateCommandBuffers())
6146 return -1;
6147
6148 if(!CreateSyncObjects())
6149 return -1;
6150
6151 if(IsFirstInitialization)
6152 {
6153 if(!CreateDescriptorPools())
6154 return -1;
6155
6156 if(!CreateTextureSamplers())
6157 return -1;
6158 }
6159
6160 m_vStreamedVertexBuffers.resize(new_size: m_ThreadCount);
6161 m_vStreamedUniformBuffers.resize(new_size: m_ThreadCount);
6162 for(size_t i = 0; i < m_ThreadCount; ++i)
6163 {
6164 m_vStreamedVertexBuffers[i].Init(FrameImageCount: m_SwapChainImageCount);
6165 m_vStreamedUniformBuffers[i].Init(FrameImageCount: m_SwapChainImageCount);
6166 }
6167
6168 m_vLastPipeline.resize(new_size: m_ThreadCount, VK_NULL_HANDLE);
6169
6170 m_vvFrameDelayedBufferCleanup.resize(new_size: m_SwapChainImageCount);
6171 m_vvFrameDelayedTextureCleanup.resize(new_size: m_SwapChainImageCount);
6172 m_vvFrameDelayedTextTexturesCleanup.resize(new_size: m_SwapChainImageCount);
6173 m_StagingBufferCache.Init(SwapChainImageCount: m_SwapChainImageCount);
6174 m_StagingBufferCacheImage.Init(SwapChainImageCount: m_SwapChainImageCount);
6175 m_VertexBufferCache.Init(SwapChainImageCount: m_SwapChainImageCount);
6176 for(auto &ImageBufferCache : m_ImageBufferCaches)
6177 ImageBufferCache.second.Init(SwapChainImageCount: m_SwapChainImageCount);
6178
6179 m_vImageLastFrameCheck.resize(new_size: m_SwapChainImageCount, x: 0);
6180
6181 if(IsFirstInitialization)
6182 {
6183 // check if image format supports linear blitting
6184 VkFormatProperties FormatProperties;
6185 vkGetPhysicalDeviceFormatProperties(physicalDevice: m_VKGPU, format: VK_FORMAT_R8G8B8A8_UNORM, pFormatProperties: &FormatProperties);
6186 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) != 0)
6187 {
6188 m_AllowsLinearBlitting = true;
6189 }
6190 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) != 0 && (FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT) != 0)
6191 {
6192 m_OptimalRGBAImageBlitting = true;
6193 }
6194 // check if image format supports blitting to linear tiled images
6195 if((FormatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT) != 0)
6196 {
6197 m_LinearRGBAImageBlitting = true;
6198 }
6199
6200 vkGetPhysicalDeviceFormatProperties(physicalDevice: m_VKGPU, format: m_VKSurfFormat.format, pFormatProperties: &FormatProperties);
6201 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) != 0)
6202 {
6203 m_OptimalSwapChainImageBlitting = true;
6204 }
6205 }
6206
6207 return 0;
6208 }
6209
6210 [[nodiscard]] bool GetMemoryCommandBuffer(VkCommandBuffer *&pMemCommandBuffer)
6211 {
6212 auto &MemCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
6213 if(!m_vUsedMemoryCommandBuffer[m_CurImageIndex])
6214 {
6215 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = true;
6216
6217 vkResetCommandBuffer(commandBuffer: MemCommandBuffer, flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
6218
6219 VkCommandBufferBeginInfo BeginInfo{};
6220 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
6221 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
6222 if(vkBeginCommandBuffer(commandBuffer: MemCommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
6223 {
6224 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be filled anymore.");
6225 return false;
6226 }
6227 }
6228 pMemCommandBuffer = &MemCommandBuffer;
6229 return true;
6230 }
6231
6232 [[nodiscard]] bool GetGraphicCommandBuffer(VkCommandBuffer *&pDrawCommandBuffer, size_t RenderThreadIndex)
6233 {
6234 if(m_ThreadCount < 2)
6235 {
6236 pDrawCommandBuffer = &m_vMainDrawCommandBuffers[m_CurImageIndex];
6237 return true;
6238 }
6239 else
6240 {
6241 auto &DrawCommandBuffer = m_vvThreadDrawCommandBuffers[RenderThreadIndex][m_CurImageIndex];
6242 if(!m_vvUsedThreadDrawCommandBuffer[RenderThreadIndex][m_CurImageIndex])
6243 {
6244 m_vvUsedThreadDrawCommandBuffer[RenderThreadIndex][m_CurImageIndex] = true;
6245
6246 vkResetCommandBuffer(commandBuffer: DrawCommandBuffer, flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
6247
6248 VkCommandBufferBeginInfo BeginInfo{};
6249 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
6250 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
6251
6252 VkCommandBufferInheritanceInfo InheritanceInfo{};
6253 InheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
6254 InheritanceInfo.framebuffer = m_vFramebufferList[m_CurImageIndex];
6255 InheritanceInfo.occlusionQueryEnable = VK_FALSE;
6256 InheritanceInfo.renderPass = m_VKRenderPass;
6257 InheritanceInfo.subpass = 0;
6258
6259 BeginInfo.pInheritanceInfo = &InheritanceInfo;
6260
6261 if(vkBeginCommandBuffer(commandBuffer: DrawCommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
6262 {
6263 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Thread draw command buffer cannot be filled anymore.");
6264 return false;
6265 }
6266 }
6267 pDrawCommandBuffer = &DrawCommandBuffer;
6268 return true;
6269 }
6270 }
6271
6272 VkCommandBuffer &GetMainGraphicCommandBuffer()
6273 {
6274 return m_vMainDrawCommandBuffers[m_CurImageIndex];
6275 }
6276
6277 /************************
6278 * STREAM BUFFERS SETUP
6279 ************************/
6280
6281 typedef std::function<bool(SFrameBuffers &, VkBuffer, VkDeviceSize)> TNewMemFunc;
6282
6283 // returns true, if the stream memory was just allocated
6284 template<typename TStreamMemName, typename TInstanceTypeName, size_t InstanceTypeCount, size_t BufferCreateCount, bool UsesCurrentCountOffset>
6285 [[nodiscard]] bool CreateStreamBuffer(TStreamMemName *&pBufferMem, TNewMemFunc &&NewMemFunc, SStreamMemory<TStreamMemName> &StreamUniformBuffer, VkBufferUsageFlagBits Usage, VkBuffer &NewBuffer, SDeviceMemoryBlock &NewBufferMem, size_t &BufferOffset, const void *pData, size_t DataSize)
6286 {
6287 VkBuffer Buffer = VK_NULL_HANDLE;
6288 SDeviceMemoryBlock BufferMem;
6289 size_t Offset = 0;
6290
6291 uint8_t *pMem = nullptr;
6292
6293 size_t BufferCountOffset = 0;
6294 if(UsesCurrentCountOffset)
6295 BufferCountOffset = StreamUniformBuffer.GetUsedCount(m_CurImageIndex);
6296 for(; BufferCountOffset < StreamUniformBuffer.GetBuffers(m_CurImageIndex).size(); ++BufferCountOffset)
6297 {
6298 auto &BufferOfFrame = StreamUniformBuffer.GetBuffers(m_CurImageIndex)[BufferCountOffset];
6299 if(BufferOfFrame.m_Size >= DataSize + BufferOfFrame.m_UsedSize)
6300 {
6301 if(BufferOfFrame.m_UsedSize == 0)
6302 StreamUniformBuffer.IncreaseUsedCount(m_CurImageIndex);
6303 Buffer = BufferOfFrame.m_Buffer;
6304 BufferMem = BufferOfFrame.m_BufferMem;
6305 Offset = BufferOfFrame.m_UsedSize;
6306 BufferOfFrame.m_UsedSize += DataSize;
6307 pMem = BufferOfFrame.m_pMappedBufferData;
6308 pBufferMem = &BufferOfFrame;
6309 break;
6310 }
6311 }
6312
6313 if(BufferMem.m_Mem == VK_NULL_HANDLE)
6314 {
6315 // create memory
6316 VkBuffer StreamBuffer;
6317 SDeviceMemoryBlock StreamBufferMemory;
6318 const VkDeviceSize NewBufferSingleSize = sizeof(TInstanceTypeName) * InstanceTypeCount;
6319 const VkDeviceSize NewBufferSize = NewBufferSingleSize * BufferCreateCount;
6320 if(!CreateBuffer(BufferSize: NewBufferSize, MemUsage: MEMORY_BLOCK_USAGE_STREAM, BufferUsage: Usage, MemoryProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, VKBuffer&: StreamBuffer, VKBufferMemory&: StreamBufferMemory))
6321 return false;
6322
6323 void *pMappedData = nullptr;
6324 if(vkMapMemory(device: m_VKDevice, memory: StreamBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMappedData) != VK_SUCCESS)
6325 return false;
6326
6327 size_t NewBufferIndex = StreamUniformBuffer.GetBuffers(m_CurImageIndex).size();
6328 for(size_t i = 0; i < BufferCreateCount; ++i)
6329 {
6330 StreamUniformBuffer.GetBuffers(m_CurImageIndex).push_back(TStreamMemName(StreamBuffer, StreamBufferMemory, NewBufferSingleSize * i, NewBufferSingleSize, 0, ((uint8_t *)pMappedData) + (NewBufferSingleSize * i)));
6331 StreamUniformBuffer.GetRanges(m_CurImageIndex).push_back({});
6332 if(!NewMemFunc(StreamUniformBuffer.GetBuffers(m_CurImageIndex).back(), StreamBuffer, NewBufferSingleSize * i))
6333 return false;
6334 }
6335 auto &NewStreamBuffer = StreamUniformBuffer.GetBuffers(m_CurImageIndex)[NewBufferIndex];
6336
6337 Buffer = StreamBuffer;
6338 BufferMem = StreamBufferMemory;
6339
6340 pBufferMem = &NewStreamBuffer;
6341 pMem = NewStreamBuffer.m_pMappedBufferData;
6342 Offset = NewStreamBuffer.m_OffsetInBuffer;
6343 NewStreamBuffer.m_UsedSize += DataSize;
6344
6345 StreamUniformBuffer.IncreaseUsedCount(m_CurImageIndex);
6346 }
6347
6348 // Offset here is the offset in the buffer
6349 if(BufferMem.m_Size - Offset < DataSize)
6350 {
6351 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Stream buffers are limited to CCommandBuffer::MAX_VERTICES. Exceeding it is a bug in the high level code.");
6352 return false;
6353 }
6354
6355 {
6356 mem_copy(dest: pMem + Offset, source: pData, size: DataSize);
6357 }
6358
6359 NewBuffer = Buffer;
6360 NewBufferMem = BufferMem;
6361 BufferOffset = Offset;
6362
6363 return true;
6364 }
6365
6366 [[nodiscard]] bool CreateStreamVertexBuffer(size_t RenderThreadIndex, VkBuffer &NewBuffer, SDeviceMemoryBlock &NewBufferMem, size_t &BufferOffset, const void *pData, size_t DataSize)
6367 {
6368 SFrameBuffers *pStreamBuffer;
6369 return CreateStreamBuffer<SFrameBuffers, GL_SVertexTex3DStream, CCommandBuffer::MAX_VERTICES * 2, 1, false>(
6370 pBufferMem&: pStreamBuffer, NewMemFunc: [](SFrameBuffers &, VkBuffer, VkDeviceSize) { return true; }, StreamUniformBuffer&: m_vStreamedVertexBuffers[RenderThreadIndex], Usage: VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, NewBuffer, NewBufferMem, BufferOffset, pData, DataSize);
6371 }
6372
6373 template<typename TName, size_t InstanceMaxParticleCount, size_t MaxInstances>
6374 [[nodiscard]] bool GetUniformBufferObjectImpl(size_t RenderThreadIndex, bool RequiresSharedStagesDescriptor, SStreamMemory<SFrameUniformBuffers> &StreamUniformBuffer, SDeviceDescriptorSet &DescrSet, const void *pData, size_t DataSize)
6375 {
6376 VkBuffer NewBuffer;
6377 SDeviceMemoryBlock NewBufferMem;
6378 size_t BufferOffset;
6379 SFrameUniformBuffers *pMem;
6380 if(!CreateStreamBuffer<SFrameUniformBuffers, TName, InstanceMaxParticleCount, MaxInstances, true>(
6381 pMem,
6382 [this, RenderThreadIndex](SFrameBuffers &Mem, VkBuffer Buffer, VkDeviceSize MemOffset) {
6383 if(!CreateUniformDescriptorSets(RenderThreadIndex, SetLayout&: m_SpriteMultiUniformDescriptorSetLayout, pSets: ((SFrameUniformBuffers *)(&Mem))->m_aUniformSets.data(), SetCount: 1, BindBuffer: Buffer, SingleBufferInstanceSize: InstanceMaxParticleCount * sizeof(TName), MemoryOffset: MemOffset))
6384 return false;
6385 if(!CreateUniformDescriptorSets(RenderThreadIndex, SetLayout&: m_QuadUniformDescriptorSetLayout, pSets: &((SFrameUniformBuffers *)(&Mem))->m_aUniformSets[1], SetCount: 1, BindBuffer: Buffer, SingleBufferInstanceSize: InstanceMaxParticleCount * sizeof(TName), MemoryOffset: MemOffset))
6386 return false;
6387 return true;
6388 },
6389 StreamUniformBuffer, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, NewBuffer, NewBufferMem, BufferOffset, pData, DataSize))
6390 return false;
6391
6392 DescrSet = pMem->m_aUniformSets[RequiresSharedStagesDescriptor ? 1 : 0];
6393 return true;
6394 }
6395
6396 [[nodiscard]] bool GetUniformBufferObject(size_t RenderThreadIndex, bool RequiresSharedStagesDescriptor, SDeviceDescriptorSet &DescrSet, size_t ParticleCount, const void *pData, size_t DataSize)
6397 {
6398 return GetUniformBufferObjectImpl<IGraphics::SRenderSpriteInfo, 512, 128>(RenderThreadIndex, RequiresSharedStagesDescriptor, StreamUniformBuffer&: m_vStreamedUniformBuffers[RenderThreadIndex], DescrSet, pData, DataSize);
6399 }
6400
6401 [[nodiscard]] bool CreateIndexBuffer(void *pData, size_t DataSize, VkBuffer &Buffer, SDeviceMemoryBlock &Memory)
6402 {
6403 VkDeviceSize BufferDataSize = DataSize;
6404
6405 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
6406 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: DataSize))
6407 return false;
6408
6409 SDeviceMemoryBlock VertexBufferMemory;
6410 VkBuffer VertexBuffer;
6411 if(!CreateBuffer(BufferSize: BufferDataSize, MemUsage: MEMORY_BLOCK_USAGE_BUFFER, BufferUsage: VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT, MemoryProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VKBuffer&: VertexBuffer, VKBufferMemory&: VertexBufferMemory))
6412 return false;
6413
6414 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: 0, Size: BufferDataSize, BufferAccessType: VK_ACCESS_INDEX_READ_BIT, BeforeCommand: true))
6415 return false;
6416 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: 0, CopySize: BufferDataSize))
6417 return false;
6418 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: 0, Size: BufferDataSize, BufferAccessType: VK_ACCESS_INDEX_READ_BIT, BeforeCommand: false))
6419 return false;
6420
6421 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
6422
6423 Buffer = VertexBuffer;
6424 Memory = VertexBufferMemory;
6425 return true;
6426 }
6427
6428 void DestroyIndexBuffer(VkBuffer &Buffer, SDeviceMemoryBlock &Memory)
6429 {
6430 CleanBufferPair(ImageIndex: 0, Buffer, BufferMem&: Memory);
6431 }
6432
6433 /************************
6434 * COMMAND IMPLEMENTATION
6435 ************************/
6436 template<typename TName>
6437 [[nodiscard]] static bool IsInCommandRange(TName CMD, TName Min, TName Max)
6438 {
6439 return CMD >= Min && CMD < Max;
6440 }
6441
6442 [[nodiscard]] ERunCommandReturnTypes RunCommand(const CCommandBuffer::SCommand *pBaseCommand) override
6443 {
6444 if(m_HasError)
6445 {
6446 // ignore all further commands
6447 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_ERROR;
6448 }
6449
6450 if(IsInCommandRange<decltype(pBaseCommand->m_Cmd)>(CMD: pBaseCommand->m_Cmd, Min: CCommandBuffer::CMD_FIRST, Max: CCommandBuffer::CMD_COUNT))
6451 {
6452 auto &CallbackObj = m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::ECommandBufferCMD(pBaseCommand->m_Cmd))];
6453 SRenderCommandExecuteBuffer Buffer;
6454 Buffer.m_Command = (CCommandBuffer::ECommandBufferCMD)pBaseCommand->m_Cmd;
6455 Buffer.m_pRawCommand = pBaseCommand;
6456 Buffer.m_ThreadIndex = 0;
6457
6458 if(m_CurCommandInPipe + 1 == m_CommandsInPipe)
6459 {
6460 m_LastCommandsInPipeThreadIndex = std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6461 }
6462
6463 bool CanStartThread = false;
6464 if(CallbackObj.m_IsRenderCommand)
6465 {
6466 bool ForceSingleThread = m_LastCommandsInPipeThreadIndex == std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6467
6468 size_t PotentiallyNextThread = (((m_CurCommandInPipe * (m_ThreadCount - 1)) / m_CommandsInPipe) + 1);
6469 if(PotentiallyNextThread - 1 > m_LastCommandsInPipeThreadIndex)
6470 {
6471 CanStartThread = true;
6472 m_LastCommandsInPipeThreadIndex = PotentiallyNextThread - 1;
6473 }
6474 Buffer.m_ThreadIndex = m_ThreadCount > 1 && !ForceSingleThread ? (m_LastCommandsInPipeThreadIndex + 1) : 0;
6475 CallbackObj.m_FillExecuteBuffer(Buffer, pBaseCommand);
6476 m_CurRenderCallCountInPipe += Buffer.m_EstimatedRenderCallCount;
6477 }
6478 bool Ret = true;
6479 if(!CallbackObj.m_IsRenderCommand || (Buffer.m_ThreadIndex == 0 && !m_RenderingPaused))
6480 {
6481 Ret = CallbackObj.m_CMDIsHandled;
6482 if(!CallbackObj.m_CommandCB(pBaseCommand, Buffer))
6483 {
6484 // an error occurred, stop this command and ignore all further commands
6485 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_ERROR;
6486 }
6487 }
6488 else if(!m_RenderingPaused)
6489 {
6490 if(CanStartThread)
6491 {
6492 StartRenderThread(ThreadIndex: m_LastCommandsInPipeThreadIndex - 1);
6493 }
6494 m_vvThreadCommandLists[Buffer.m_ThreadIndex - 1].push_back(x: Buffer);
6495 }
6496
6497 ++m_CurCommandInPipe;
6498 return Ret ? ERunCommandReturnTypes::RUN_COMMAND_COMMAND_HANDLED : ERunCommandReturnTypes::RUN_COMMAND_COMMAND_UNHANDLED;
6499 }
6500
6501 if(m_CurCommandInPipe + 1 == m_CommandsInPipe)
6502 {
6503 m_LastCommandsInPipeThreadIndex = std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6504 }
6505 ++m_CurCommandInPipe;
6506
6507 switch(pBaseCommand->m_Cmd)
6508 {
6509 case CCommandProcessorFragment_GLBase::CMD_INIT:
6510 if(!Cmd_Init(pCommand: static_cast<const SCommand_Init *>(pBaseCommand)))
6511 {
6512 SetWarningPreMsg("Could not initialize Vulkan: ");
6513 return RUN_COMMAND_COMMAND_WARNING;
6514 }
6515 break;
6516 case CCommandProcessorFragment_GLBase::CMD_SHUTDOWN:
6517 if(!Cmd_Shutdown(pCommand: static_cast<const SCommand_Shutdown *>(pBaseCommand)))
6518 {
6519 SetWarningPreMsg("Could not shutdown Vulkan: ");
6520 return RUN_COMMAND_COMMAND_WARNING;
6521 }
6522 break;
6523
6524 case CCommandProcessorFragment_GLBase::CMD_PRE_INIT:
6525 if(!Cmd_PreInit(pCommand: static_cast<const CCommandProcessorFragment_GLBase::SCommand_PreInit *>(pBaseCommand)))
6526 {
6527 SetWarningPreMsg("Could not initialize Vulkan: ");
6528 return RUN_COMMAND_COMMAND_WARNING;
6529 }
6530 break;
6531 case CCommandProcessorFragment_GLBase::CMD_POST_SHUTDOWN:
6532 if(!Cmd_PostShutdown(pCommand: static_cast<const CCommandProcessorFragment_GLBase::SCommand_PostShutdown *>(pBaseCommand)))
6533 {
6534 SetWarningPreMsg("Could not shutdown Vulkan: ");
6535 return RUN_COMMAND_COMMAND_WARNING;
6536 }
6537 break;
6538 default:
6539 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_UNHANDLED;
6540 }
6541
6542 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_HANDLED;
6543 }
6544
6545 [[nodiscard]] bool Cmd_Init(const SCommand_Init *pCommand)
6546 {
6547 pCommand->m_pCapabilities->m_TileBuffering = true;
6548 pCommand->m_pCapabilities->m_QuadBuffering = true;
6549 pCommand->m_pCapabilities->m_TextBuffering = true;
6550 pCommand->m_pCapabilities->m_QuadContainerBuffering = true;
6551 pCommand->m_pCapabilities->m_ShaderSupport = true;
6552
6553 pCommand->m_pCapabilities->m_MipMapping = true;
6554 pCommand->m_pCapabilities->m_3DTextures = false;
6555 pCommand->m_pCapabilities->m_2DArrayTextures = true;
6556 pCommand->m_pCapabilities->m_NPOTTextures = true;
6557
6558 pCommand->m_pCapabilities->m_ContextMajor = 1;
6559 pCommand->m_pCapabilities->m_ContextMinor = 1;
6560 pCommand->m_pCapabilities->m_ContextPatch = 0;
6561
6562 pCommand->m_pCapabilities->m_TrianglesAsQuads = true;
6563
6564 m_GlobalTextureLodBIAS = g_Config.m_GfxGLTextureLODBIAS;
6565 m_pTextureMemoryUsage = pCommand->m_pTextureMemoryUsage;
6566 m_pBufferMemoryUsage = pCommand->m_pBufferMemoryUsage;
6567 m_pStreamMemoryUsage = pCommand->m_pStreamMemoryUsage;
6568 m_pStagingMemoryUsage = pCommand->m_pStagingMemoryUsage;
6569
6570 m_MultiSamplingCount = (g_Config.m_GfxFsaaSamples & 0xFFFFFFFE); // ignore the uneven bit, only even multi sampling works
6571
6572 *pCommand->m_pReadPresentedImageDataFunc = [this](uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData) {
6573 return GetPresentedImageData(Width, Height, Format, vDstData);
6574 };
6575
6576 m_pWindow = pCommand->m_pWindow;
6577
6578 *pCommand->m_pInitError = m_VKInstance != VK_NULL_HANDLE ? 0 : -1;
6579
6580 if(m_VKInstance == VK_NULL_HANDLE)
6581 {
6582 *pCommand->m_pInitError = -2;
6583 return false;
6584 }
6585
6586 m_pStorage = pCommand->m_pStorage;
6587 if(InitVulkan<true>() != 0)
6588 {
6589 *pCommand->m_pInitError = -2;
6590 return false;
6591 }
6592
6593 std::array<uint32_t, (size_t)CCommandBuffer::MAX_VERTICES / 4 * 6> aIndices;
6594 int Primq = 0;
6595 for(int i = 0; i < CCommandBuffer::MAX_VERTICES / 4 * 6; i += 6)
6596 {
6597 aIndices[i] = Primq;
6598 aIndices[i + 1] = Primq + 1;
6599 aIndices[i + 2] = Primq + 2;
6600 aIndices[i + 3] = Primq;
6601 aIndices[i + 4] = Primq + 2;
6602 aIndices[i + 5] = Primq + 3;
6603 Primq += 4;
6604 }
6605
6606 if(!PrepareFrame())
6607 return false;
6608 if(m_HasError)
6609 {
6610 *pCommand->m_pInitError = -2;
6611 return false;
6612 }
6613
6614 if(!CreateIndexBuffer(pData: aIndices.data(), DataSize: sizeof(uint32_t) * aIndices.size(), Buffer&: m_IndexBuffer, Memory&: m_IndexBufferMemory))
6615 {
6616 *pCommand->m_pInitError = -2;
6617 return false;
6618 }
6619 if(!CreateIndexBuffer(pData: aIndices.data(), DataSize: sizeof(uint32_t) * aIndices.size(), Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory))
6620 {
6621 *pCommand->m_pInitError = -2;
6622 return false;
6623 }
6624 m_CurRenderIndexPrimitiveCount = CCommandBuffer::MAX_VERTICES / 4;
6625
6626 m_CanAssert = true;
6627
6628 return true;
6629 }
6630
6631 [[nodiscard]] bool Cmd_Shutdown(const SCommand_Shutdown *pCommand)
6632 {
6633 vkDeviceWaitIdle(device: m_VKDevice);
6634
6635 DestroyIndexBuffer(Buffer&: m_IndexBuffer, Memory&: m_IndexBufferMemory);
6636 DestroyIndexBuffer(Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory);
6637
6638 CleanupVulkan<true>(SwapchainCount: m_SwapChainImageCount);
6639
6640 return true;
6641 }
6642
6643 [[nodiscard]] bool Cmd_Texture_Destroy(const CCommandBuffer::SCommand_Texture_Destroy *pCommand)
6644 {
6645 size_t ImageIndex = (size_t)pCommand->m_Slot;
6646 auto &Texture = m_vTextures[ImageIndex];
6647
6648 m_vvFrameDelayedTextureCleanup[m_CurImageIndex].push_back(x: Texture);
6649
6650 Texture = CTexture{};
6651
6652 return true;
6653 }
6654
6655 [[nodiscard]] bool Cmd_Texture_Create(const CCommandBuffer::SCommand_Texture_Create *pCommand)
6656 {
6657 int Slot = pCommand->m_Slot;
6658 int Width = pCommand->m_Width;
6659 int Height = pCommand->m_Height;
6660 int Flags = pCommand->m_Flags;
6661 uint8_t *pData = pCommand->m_pData;
6662
6663 if(!CreateTextureCMD(Slot, Width, Height, Format: VK_FORMAT_R8G8B8A8_UNORM, StoreFormat: VK_FORMAT_R8G8B8A8_UNORM, Flags, pData))
6664 return false;
6665
6666 free(ptr: pData);
6667
6668 return true;
6669 }
6670
6671 [[nodiscard]] bool Cmd_TextTextures_Create(const CCommandBuffer::SCommand_TextTextures_Create *pCommand)
6672 {
6673 int Slot = pCommand->m_Slot;
6674 int SlotOutline = pCommand->m_SlotOutline;
6675 int Width = pCommand->m_Width;
6676 int Height = pCommand->m_Height;
6677
6678 uint8_t *pTmpData = pCommand->m_pTextData;
6679 uint8_t *pTmpData2 = pCommand->m_pTextOutlineData;
6680
6681 if(!CreateTextureCMD(Slot, Width, Height, Format: VK_FORMAT_R8_UNORM, StoreFormat: VK_FORMAT_R8_UNORM, Flags: TextureFlag::NO_MIPMAPS, pData&: pTmpData))
6682 return false;
6683 if(!CreateTextureCMD(Slot: SlotOutline, Width, Height, Format: VK_FORMAT_R8_UNORM, StoreFormat: VK_FORMAT_R8_UNORM, Flags: TextureFlag::NO_MIPMAPS, pData&: pTmpData2))
6684 return false;
6685
6686 if(!CreateNewTextDescriptorSets(Texture: Slot, TextureOutline: SlotOutline))
6687 return false;
6688
6689 free(ptr: pTmpData);
6690 free(ptr: pTmpData2);
6691
6692 return true;
6693 }
6694
6695 [[nodiscard]] bool Cmd_TextTextures_Destroy(const CCommandBuffer::SCommand_TextTextures_Destroy *pCommand)
6696 {
6697 size_t ImageIndex = (size_t)pCommand->m_Slot;
6698 size_t ImageIndexOutline = (size_t)pCommand->m_SlotOutline;
6699 auto &Texture = m_vTextures[ImageIndex];
6700 auto &TextureOutline = m_vTextures[ImageIndexOutline];
6701
6702 m_vvFrameDelayedTextTexturesCleanup[m_CurImageIndex].emplace_back(args&: Texture, args&: TextureOutline);
6703
6704 Texture = {};
6705 TextureOutline = {};
6706
6707 return true;
6708 }
6709
6710 [[nodiscard]] bool Cmd_TextTexture_Update(const CCommandBuffer::SCommand_TextTexture_Update *pCommand)
6711 {
6712 size_t IndexTex = pCommand->m_Slot;
6713 uint8_t *pData = pCommand->m_pData;
6714
6715 if(!UpdateTexture(TextureSlot: IndexTex, Format: VK_FORMAT_R8_UNORM, pData, XOff: pCommand->m_X, YOff: pCommand->m_Y, Width: pCommand->m_Width, Height: pCommand->m_Height))
6716 return false;
6717
6718 free(ptr: pData);
6719
6720 return true;
6721 }
6722
6723 void Cmd_Clear_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Clear *pCommand)
6724 {
6725 if(!pCommand->m_ForceClear)
6726 {
6727 bool ColorChanged = m_aClearColor[0] != pCommand->m_Color.r || m_aClearColor[1] != pCommand->m_Color.g ||
6728 m_aClearColor[2] != pCommand->m_Color.b || m_aClearColor[3] != pCommand->m_Color.a;
6729 m_aClearColor[0] = pCommand->m_Color.r;
6730 m_aClearColor[1] = pCommand->m_Color.g;
6731 m_aClearColor[2] = pCommand->m_Color.b;
6732 m_aClearColor[3] = pCommand->m_Color.a;
6733 if(ColorChanged)
6734 ExecBuffer.m_ClearColorInRenderThread = true;
6735 }
6736 else
6737 {
6738 ExecBuffer.m_ClearColorInRenderThread = true;
6739 }
6740 ExecBuffer.m_EstimatedRenderCallCount = 0;
6741 }
6742
6743 [[nodiscard]] bool Cmd_Clear(const SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Clear *pCommand)
6744 {
6745 if(ExecBuffer.m_ClearColorInRenderThread)
6746 {
6747 std::array<VkClearAttachment, 1> aAttachments = {VkClearAttachment{.aspectMask: VK_IMAGE_ASPECT_COLOR_BIT, .colorAttachment: 0, .clearValue: VkClearValue{.color: VkClearColorValue{.float32: {pCommand->m_Color.r, pCommand->m_Color.g, pCommand->m_Color.b, pCommand->m_Color.a}}}}};
6748 std::array<VkClearRect, 1> aClearRects = {VkClearRect{.rect: {.offset: {.x: 0, .y: 0}, .extent: m_VKSwapImgAndViewportExtent.m_SwapImageViewport}, .baseArrayLayer: 0, .layerCount: 1}};
6749
6750 VkCommandBuffer *pCommandBuffer;
6751 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
6752 return false;
6753 auto &CommandBuffer = *pCommandBuffer;
6754 vkCmdClearAttachments(commandBuffer: CommandBuffer, attachmentCount: aAttachments.size(), pAttachments: aAttachments.data(), rectCount: aClearRects.size(), pRects: aClearRects.data());
6755 }
6756
6757 return true;
6758 }
6759
6760 void Cmd_Render_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Render *pCommand)
6761 {
6762 bool IsTextured = GetIsTextured(State: pCommand->m_State);
6763 if(IsTextured)
6764 {
6765 size_t AddressModeIndex = GetAddressModeIndex(State: pCommand->m_State);
6766 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
6767 }
6768
6769 ExecBuffer.m_IndexBuffer = m_IndexBuffer;
6770
6771 ExecBuffer.m_EstimatedRenderCallCount = 1;
6772
6773 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
6774 }
6775
6776 [[nodiscard]] bool Cmd_Render(const CCommandBuffer::SCommand_Render *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
6777 {
6778 return RenderStandard<CCommandBuffer::SVertex, false>(ExecBuffer, State: pCommand->m_State, PrimType: pCommand->m_PrimType, pVertices: pCommand->m_pVertices, PrimitiveCount: pCommand->m_PrimCount);
6779 }
6780
6781 [[nodiscard]] bool Cmd_ReadPixel(const CCommandBuffer::SCommand_TrySwapAndReadPixel *pCommand)
6782 {
6783 if(!*pCommand->m_pSwapped && !NextFrame())
6784 return false;
6785 *pCommand->m_pSwapped = true;
6786
6787 uint32_t Width;
6788 uint32_t Height;
6789 CImageInfo::EImageFormat Format;
6790 if(GetPresentedImageDataImpl(Width, Height, Format, vDstData&: m_vReadPixelHelper, ResetAlpha: false, PixelOffset: pCommand->m_Position))
6791 {
6792 *pCommand->m_pColor = ColorRGBA(m_vReadPixelHelper[0] / 255.0f, m_vReadPixelHelper[1] / 255.0f, m_vReadPixelHelper[2] / 255.0f, 1.0f);
6793 }
6794 else
6795 {
6796 *pCommand->m_pColor = ColorRGBA(1.0f, 1.0f, 1.0f, 1.0f);
6797 }
6798
6799 return true;
6800 }
6801
6802 [[nodiscard]] bool Cmd_Screenshot(const CCommandBuffer::SCommand_TrySwapAndScreenshot *pCommand)
6803 {
6804 if(!*pCommand->m_pSwapped && !NextFrame())
6805 return false;
6806 *pCommand->m_pSwapped = true;
6807
6808 uint32_t Width;
6809 uint32_t Height;
6810 CImageInfo::EImageFormat Format;
6811 if(GetPresentedImageDataImpl(Width, Height, Format, vDstData&: m_vScreenshotHelper, ResetAlpha: true, PixelOffset: {}))
6812 {
6813 const size_t ImgSize = (size_t)Width * (size_t)Height * CImageInfo::PixelSize(Format);
6814 pCommand->m_pImage->m_pData = static_cast<uint8_t *>(malloc(size: ImgSize));
6815 mem_copy(dest: pCommand->m_pImage->m_pData, source: m_vScreenshotHelper.data(), size: ImgSize);
6816 }
6817 else
6818 {
6819 pCommand->m_pImage->m_pData = nullptr;
6820 }
6821 pCommand->m_pImage->m_Width = (int)Width;
6822 pCommand->m_pImage->m_Height = (int)Height;
6823 pCommand->m_pImage->m_Format = Format;
6824
6825 return true;
6826 }
6827
6828 void Cmd_RenderTex3D_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderTex3D *pCommand)
6829 {
6830 bool IsTextured = GetIsTextured(State: pCommand->m_State);
6831 if(IsTextured)
6832 {
6833 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_VKStandard3DTexturedDescrSet;
6834 }
6835
6836 ExecBuffer.m_IndexBuffer = m_IndexBuffer;
6837
6838 ExecBuffer.m_EstimatedRenderCallCount = 1;
6839
6840 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
6841 }
6842
6843 [[nodiscard]] bool Cmd_RenderTex3D(const CCommandBuffer::SCommand_RenderTex3D *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
6844 {
6845 return RenderStandard<CCommandBuffer::SVertexTex3DStream, true>(ExecBuffer, State: pCommand->m_State, PrimType: pCommand->m_PrimType, pVertices: pCommand->m_pVertices, PrimitiveCount: pCommand->m_PrimCount);
6846 }
6847
6848 void Cmd_Update_Viewport_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Update_Viewport *pCommand)
6849 {
6850 ExecBuffer.m_EstimatedRenderCallCount = 0;
6851 }
6852
6853 [[nodiscard]] bool Cmd_Update_Viewport(const CCommandBuffer::SCommand_Update_Viewport *pCommand)
6854 {
6855 if(pCommand->m_ByResize)
6856 {
6857 if(IsVerbose())
6858 {
6859 log_debug("gfx/vulkan", "Got resize event.");
6860 }
6861 m_CanvasWidth = (uint32_t)pCommand->m_Width;
6862 m_CanvasHeight = (uint32_t)pCommand->m_Height;
6863#ifndef CONF_PLATFORM_MACOS
6864 m_RecreateSwapChain = true;
6865#endif
6866 }
6867 else
6868 {
6869 auto Viewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
6870 if(pCommand->m_X != 0 || pCommand->m_Y != 0 || (uint32_t)pCommand->m_Width != Viewport.width || (uint32_t)pCommand->m_Height != Viewport.height)
6871 {
6872 m_HasDynamicViewport = true;
6873
6874 // convert viewport from OGL to vulkan
6875 int32_t ViewportY = (int32_t)Viewport.height - ((int32_t)pCommand->m_Y + (int32_t)pCommand->m_Height);
6876 uint32_t ViewportH = (int32_t)pCommand->m_Height;
6877 m_DynamicViewportOffset = {.x: (int32_t)pCommand->m_X, .y: ViewportY};
6878 m_DynamicViewportSize = {.width: (uint32_t)pCommand->m_Width, .height: ViewportH};
6879 }
6880 else
6881 {
6882 m_HasDynamicViewport = false;
6883 }
6884 }
6885
6886 return true;
6887 }
6888
6889 [[nodiscard]] bool Cmd_VSync(const CCommandBuffer::SCommand_VSync *pCommand)
6890 {
6891 if(IsVerbose())
6892 {
6893 log_info("gfx/vulkan", "Queueing swap chain recreation because V-Sync was changed.");
6894 }
6895 m_RecreateSwapChain = true;
6896 *pCommand->m_pRetOk = true;
6897
6898 return true;
6899 }
6900
6901 [[nodiscard]] bool Cmd_MultiSampling(const CCommandBuffer::SCommand_MultiSampling *pCommand)
6902 {
6903 if(IsVerbose())
6904 {
6905 log_info("gfx/vulkan", "Queueing swap chain recreation because multi sampling was changed.");
6906 }
6907 m_RecreateSwapChain = true;
6908
6909 uint32_t MSCount = (std::min(a: pCommand->m_RequestedMultiSamplingCount, b: (uint32_t)GetMaxSampleCount()) & 0xFFFFFFFE); // ignore the uneven bits
6910 m_NextMultiSamplingCount = MSCount;
6911
6912 *pCommand->m_pRetMultiSamplingCount = MSCount;
6913 *pCommand->m_pRetOk = true;
6914
6915 return true;
6916 }
6917
6918 [[nodiscard]] bool Cmd_Swap(const CCommandBuffer::SCommand_Swap *pCommand)
6919 {
6920 return NextFrame();
6921 }
6922
6923 [[nodiscard]] bool Cmd_CreateBufferObject(const CCommandBuffer::SCommand_CreateBufferObject *pCommand)
6924 {
6925 bool IsOneFrameBuffer = (pCommand->m_Flags & IGraphics::EBufferObjectCreateFlags::BUFFER_OBJECT_CREATE_FLAGS_ONE_TIME_USE_BIT) != 0;
6926 if(!CreateBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex, pUploadData: pCommand->m_pUploadData, BufferDataSize: (VkDeviceSize)pCommand->m_DataSize, IsOneFrameBuffer))
6927 return false;
6928 if(pCommand->m_DeletePointer)
6929 free(ptr: pCommand->m_pUploadData);
6930
6931 return true;
6932 }
6933
6934 [[nodiscard]] bool Cmd_UpdateBufferObject(const CCommandBuffer::SCommand_UpdateBufferObject *pCommand)
6935 {
6936 size_t BufferIndex = (size_t)pCommand->m_BufferIndex;
6937 bool DeletePointer = pCommand->m_DeletePointer;
6938 VkDeviceSize Offset = (VkDeviceSize)((intptr_t)pCommand->m_pOffset);
6939 void *pUploadData = pCommand->m_pUploadData;
6940 VkDeviceSize DataSize = (VkDeviceSize)pCommand->m_DataSize;
6941
6942 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
6943 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pUploadData, RequiredSize: DataSize))
6944 return false;
6945
6946 const auto &MemBlock = m_vBufferObjects[BufferIndex].m_BufferObject.m_Mem;
6947 VkBuffer VertexBuffer = MemBlock.m_Buffer;
6948 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6949 return false;
6950 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, CopySize: DataSize))
6951 return false;
6952 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6953 return false;
6954
6955 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
6956
6957 if(DeletePointer)
6958 free(ptr: pUploadData);
6959
6960 return true;
6961 }
6962
6963 [[nodiscard]] bool Cmd_RecreateBufferObject(const CCommandBuffer::SCommand_RecreateBufferObject *pCommand)
6964 {
6965 DeleteBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex);
6966 bool IsOneFrameBuffer = (pCommand->m_Flags & IGraphics::EBufferObjectCreateFlags::BUFFER_OBJECT_CREATE_FLAGS_ONE_TIME_USE_BIT) != 0;
6967 return CreateBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex, pUploadData: pCommand->m_pUploadData, BufferDataSize: (VkDeviceSize)pCommand->m_DataSize, IsOneFrameBuffer);
6968 }
6969
6970 [[nodiscard]] bool Cmd_CopyBufferObject(const CCommandBuffer::SCommand_CopyBufferObject *pCommand)
6971 {
6972 size_t ReadBufferIndex = (size_t)pCommand->m_ReadBufferIndex;
6973 size_t WriteBufferIndex = (size_t)pCommand->m_WriteBufferIndex;
6974 auto &ReadMemBlock = m_vBufferObjects[ReadBufferIndex].m_BufferObject.m_Mem;
6975 auto &WriteMemBlock = m_vBufferObjects[WriteBufferIndex].m_BufferObject.m_Mem;
6976 VkBuffer ReadBuffer = ReadMemBlock.m_Buffer;
6977 VkBuffer WriteBuffer = WriteMemBlock.m_Buffer;
6978
6979 VkDeviceSize DataSize = (VkDeviceSize)pCommand->m_CopySize;
6980 VkDeviceSize ReadOffset = (VkDeviceSize)pCommand->m_ReadOffset + ReadMemBlock.m_HeapData.m_OffsetToAlign;
6981 VkDeviceSize WriteOffset = (VkDeviceSize)pCommand->m_WriteOffset + WriteMemBlock.m_HeapData.m_OffsetToAlign;
6982
6983 if(!MemoryBarrier(Buffer: ReadBuffer, Offset: ReadOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6984 return false;
6985 if(!MemoryBarrier(Buffer: WriteBuffer, Offset: WriteOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6986 return false;
6987 if(!CopyBuffer(SrcBuffer: ReadBuffer, DstBuffer: WriteBuffer, SrcOffset: ReadOffset, DstOffset: WriteOffset, CopySize: DataSize))
6988 return false;
6989 if(!MemoryBarrier(Buffer: WriteBuffer, Offset: WriteOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6990 return false;
6991 if(!MemoryBarrier(Buffer: ReadBuffer, Offset: ReadOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6992 return false;
6993
6994 return true;
6995 }
6996
6997 [[nodiscard]] bool Cmd_DeleteBufferObject(const CCommandBuffer::SCommand_DeleteBufferObject *pCommand)
6998 {
6999 size_t BufferIndex = (size_t)pCommand->m_BufferIndex;
7000 DeleteBufferObject(BufferIndex);
7001
7002 return true;
7003 }
7004
7005 [[nodiscard]] bool Cmd_CreateBufferContainer(const CCommandBuffer::SCommand_CreateBufferContainer *pCommand)
7006 {
7007 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7008 while(ContainerIndex >= m_vBufferContainers.size())
7009 m_vBufferContainers.resize(new_size: (m_vBufferContainers.size() * 2) + 1);
7010
7011 m_vBufferContainers[ContainerIndex].m_BufferObjectIndex = pCommand->m_VertBufferBindingIndex;
7012
7013 return true;
7014 }
7015
7016 [[nodiscard]] bool Cmd_UpdateBufferContainer(const CCommandBuffer::SCommand_UpdateBufferContainer *pCommand)
7017 {
7018 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7019 m_vBufferContainers[ContainerIndex].m_BufferObjectIndex = pCommand->m_VertBufferBindingIndex;
7020
7021 return true;
7022 }
7023
7024 [[nodiscard]] bool Cmd_DeleteBufferContainer(const CCommandBuffer::SCommand_DeleteBufferContainer *pCommand)
7025 {
7026 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7027 bool DeleteAllBO = pCommand->m_DestroyAllBO;
7028 if(DeleteAllBO)
7029 {
7030 size_t BufferIndex = (size_t)m_vBufferContainers[ContainerIndex].m_BufferObjectIndex;
7031 DeleteBufferObject(BufferIndex);
7032 }
7033
7034 return true;
7035 }
7036
7037 [[nodiscard]] bool Cmd_IndicesRequiredNumNotify(const CCommandBuffer::SCommand_IndicesRequiredNumNotify *pCommand)
7038 {
7039 size_t IndicesCount = pCommand->m_RequiredIndicesNum;
7040 if(m_CurRenderIndexPrimitiveCount < IndicesCount / 6)
7041 {
7042 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: m_RenderIndexBuffer, .m_Mem: m_RenderIndexBufferMemory});
7043 std::vector<uint32_t> vIndices(IndicesCount);
7044 uint32_t Primq = 0;
7045 for(size_t i = 0; i < IndicesCount; i += 6)
7046 {
7047 vIndices[i] = Primq;
7048 vIndices[i + 1] = Primq + 1;
7049 vIndices[i + 2] = Primq + 2;
7050 vIndices[i + 3] = Primq;
7051 vIndices[i + 4] = Primq + 2;
7052 vIndices[i + 5] = Primq + 3;
7053 Primq += 4;
7054 }
7055 if(!CreateIndexBuffer(pData: vIndices.data(), DataSize: vIndices.size() * sizeof(uint32_t), Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory))
7056 return false;
7057 m_CurRenderIndexPrimitiveCount = IndicesCount / 6;
7058 }
7059
7060 return true;
7061 }
7062
7063 void Cmd_RenderTileLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderTileLayer *pCommand)
7064 {
7065 RenderTileLayer_FillExecuteBuffer(ExecBuffer, DrawCalls: pCommand->m_IndicesDrawNum, State: pCommand->m_State, BufferContainerIndex: pCommand->m_BufferContainerIndex);
7066 }
7067
7068 [[nodiscard]] bool Cmd_RenderTileLayer(const CCommandBuffer::SCommand_RenderTileLayer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7069 {
7070 vec2 Scale{};
7071 vec2 Off{};
7072 return RenderTileLayer(ExecBuffer, State: pCommand->m_State, IsBorder: false, Color: pCommand->m_Color, Scale, Off, IndicesDrawNum: (size_t)pCommand->m_IndicesDrawNum, pIndicesOffsets: pCommand->m_pIndicesOffsets, pDrawCount: pCommand->m_pDrawCount);
7073 }
7074
7075 void Cmd_RenderBorderTile_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderBorderTile *pCommand)
7076 {
7077 RenderTileLayer_FillExecuteBuffer(ExecBuffer, DrawCalls: 1, State: pCommand->m_State, BufferContainerIndex: pCommand->m_BufferContainerIndex);
7078 }
7079
7080 [[nodiscard]] bool Cmd_RenderBorderTile(const CCommandBuffer::SCommand_RenderBorderTile *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7081 {
7082 vec2 Scale = pCommand->m_Scale;
7083 vec2 Off = pCommand->m_Offset;
7084 unsigned int DrawNum = pCommand->m_DrawNum * 6;
7085 return RenderTileLayer(ExecBuffer, State: pCommand->m_State, IsBorder: true, Color: pCommand->m_Color, Scale, Off, IndicesDrawNum: 1, pIndicesOffsets: &pCommand->m_pIndicesOffset, pDrawCount: &DrawNum);
7086 }
7087
7088 void Cmd_RenderQuadLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadLayer *pCommand)
7089 {
7090 size_t BufferContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7091 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7092 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7093
7094 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7095 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7096
7097 bool IsTextured = GetIsTextured(State: pCommand->m_State);
7098 if(IsTextured)
7099 {
7100 size_t AddressModeIndex = GetAddressModeIndex(State: pCommand->m_State);
7101 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
7102 }
7103
7104 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7105
7106 ExecBuffer.m_EstimatedRenderCallCount = ((pCommand->m_QuadNum - 1) / GRAPHICS_MAX_QUADS_RENDER_COUNT) + 1;
7107
7108 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
7109 }
7110
7111 [[nodiscard]] bool Cmd_RenderQuadLayer(const CCommandBuffer::SCommand_RenderQuadLayer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer, bool Grouped)
7112 {
7113 std::array<float, (size_t)4 * 2> m;
7114 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7115
7116 bool CanBeGrouped = Grouped || pCommand->m_QuadNum == 1;
7117
7118 bool IsTextured;
7119 size_t BlendModeIndex;
7120 size_t DynamicIndex;
7121 size_t AddressModeIndex;
7122 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7123 auto &PipeLayout = GetPipeLayout(Container&: CanBeGrouped ? m_QuadGroupedPipeline : m_QuadPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7124 auto &PipeLine = GetPipeline(Container&: CanBeGrouped ? m_QuadGroupedPipeline : m_QuadPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7125
7126 VkCommandBuffer *pCommandBuffer;
7127 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7128 return false;
7129 auto &CommandBuffer = *pCommandBuffer;
7130
7131 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7132
7133 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7134 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7135 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7136
7137 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
7138
7139 if(IsTextured)
7140 {
7141 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7142 }
7143
7144 uint32_t DrawCount = (uint32_t)pCommand->m_QuadNum;
7145
7146 if(CanBeGrouped)
7147 {
7148 SUniformQuadGroupedGPos PushConstantVertex;
7149 mem_copy(dest: &PushConstantVertex.m_BOPush, source: &pCommand->m_pQuadInfo[0], size: sizeof(PushConstantVertex.m_BOPush));
7150
7151 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7152 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, offset: 0, size: sizeof(SUniformQuadGroupedGPos), pValues: &PushConstantVertex);
7153
7154 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)(pCommand->m_QuadOffset) * 6);
7155 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(DrawCount * 6), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
7156 }
7157 else
7158 {
7159 SUniformQuadGPos PushConstantVertex;
7160 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7161 PushConstantVertex.m_QuadOffset = pCommand->m_QuadOffset;
7162
7163 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(PushConstantVertex), pValues: &PushConstantVertex);
7164
7165 size_t RenderOffset = 0;
7166 while(DrawCount > 0)
7167 {
7168 uint32_t RealDrawCount = (DrawCount > GRAPHICS_MAX_QUADS_RENDER_COUNT ? GRAPHICS_MAX_QUADS_RENDER_COUNT : DrawCount);
7169 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)(pCommand->m_QuadOffset + RenderOffset) * 6);
7170
7171 // create uniform buffer
7172 SDeviceDescriptorSet UniDescrSet;
7173 if(!GetUniformBufferObject(RenderThreadIndex: ExecBuffer.m_ThreadIndex, RequiresSharedStagesDescriptor: true, DescrSet&: UniDescrSet, ParticleCount: RealDrawCount, pData: (const float *)(pCommand->m_pQuadInfo + RenderOffset), DataSize: RealDrawCount * sizeof(SQuadRenderInfo)))
7174 return false;
7175
7176 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: IsTextured ? 1 : 0, descriptorSetCount: 1, pDescriptorSets: &UniDescrSet.m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7177 if(RenderOffset > 0)
7178 {
7179 int32_t QuadOffset = pCommand->m_QuadOffset + RenderOffset;
7180 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: sizeof(SUniformQuadGPos) - sizeof(int32_t), size: sizeof(int32_t), pValues: &QuadOffset);
7181 }
7182
7183 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(RealDrawCount * 6), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
7184 RenderOffset += RealDrawCount;
7185 DrawCount -= RealDrawCount;
7186 }
7187 }
7188
7189 return true;
7190 }
7191
7192 void Cmd_RenderText_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderText *pCommand)
7193 {
7194 size_t BufferContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7195 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7196 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7197
7198 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7199 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7200
7201 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_TextTextureIndex].m_VKTextDescrSet;
7202
7203 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7204
7205 ExecBuffer.m_EstimatedRenderCallCount = 1;
7206
7207 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
7208 }
7209
7210 [[nodiscard]] bool Cmd_RenderText(const CCommandBuffer::SCommand_RenderText *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7211 {
7212 std::array<float, (size_t)4 * 2> m;
7213 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7214
7215 bool IsTextured;
7216 size_t BlendModeIndex;
7217 size_t DynamicIndex;
7218 size_t AddressModeIndex;
7219 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7220 IsTextured = true; // text is always textured
7221 auto &PipeLayout = GetPipeLayout(Container&: m_TextPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7222 auto &PipeLine = GetPipeline(Container&: m_TextPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7223
7224 VkCommandBuffer *pCommandBuffer;
7225 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7226 return false;
7227 auto &CommandBuffer = *pCommandBuffer;
7228
7229 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7230
7231 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7232 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7233 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7234
7235 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
7236
7237 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7238
7239 SUniformGTextPos PosTexSizeConstant;
7240 mem_copy(dest: PosTexSizeConstant.m_aPos, source: m.data(), size: m.size() * sizeof(float));
7241 PosTexSizeConstant.m_TextureSize = pCommand->m_TextureSize;
7242
7243 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGTextPos), pValues: &PosTexSizeConstant);
7244
7245 SUniformTextFragment FragmentConstants;
7246
7247 FragmentConstants.m_Constants.m_TextColor = pCommand->m_TextColor;
7248 FragmentConstants.m_Constants.m_TextOutlineColor = pCommand->m_TextOutlineColor;
7249 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformGTextPos) + sizeof(SUniformTextGFragmentOffset), size: sizeof(SUniformTextFragment), pValues: &FragmentConstants);
7250
7251 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7252
7253 return true;
7254 }
7255
7256 void BufferContainer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, size_t BufferContainerIndex, size_t DrawCalls)
7257 {
7258 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7259 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7260
7261 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7262 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7263
7264 bool IsTextured = GetIsTextured(State);
7265 if(IsTextured)
7266 {
7267 size_t AddressModeIndex = GetAddressModeIndex(State);
7268 ExecBuffer.m_aDescriptors[0] = m_vTextures[State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
7269 }
7270
7271 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7272
7273 ExecBuffer.m_EstimatedRenderCallCount = DrawCalls;
7274
7275 ExecBufferFillDynamicStates(State, ExecBuffer);
7276 }
7277
7278 void Cmd_RenderQuadContainer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainer *pCommand)
7279 {
7280 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: 1);
7281 }
7282
7283 [[nodiscard]] bool Cmd_RenderQuadContainer(const CCommandBuffer::SCommand_RenderQuadContainer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7284 {
7285 std::array<float, (size_t)4 * 2> m;
7286 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7287
7288 bool IsTextured;
7289 size_t BlendModeIndex;
7290 size_t DynamicIndex;
7291 size_t AddressModeIndex;
7292 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7293 auto &PipeLayout = GetStandardPipeLayout(IsLineGeometry: false, IsTextured, BlendModeIndex, DynamicIndex);
7294 auto &PipeLine = GetStandardPipe(IsLineGeometry: false, IsTextured, BlendModeIndex, DynamicIndex);
7295
7296 VkCommandBuffer *pCommandBuffer;
7297 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7298 return false;
7299 auto &CommandBuffer = *pCommandBuffer;
7300
7301 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7302
7303 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7304 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7305 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7306
7307 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7308
7309 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7310
7311 if(IsTextured)
7312 {
7313 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7314 }
7315
7316 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGPos), pValues: m.data());
7317
7318 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7319
7320 return true;
7321 }
7322
7323 void Cmd_RenderQuadContainerEx_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainerEx *pCommand)
7324 {
7325 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: 1);
7326 }
7327
7328 [[nodiscard]] bool Cmd_RenderQuadContainerEx(const CCommandBuffer::SCommand_RenderQuadContainerEx *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7329 {
7330 std::array<float, (size_t)4 * 2> m;
7331 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7332
7333 bool IsRotationless = !(pCommand->m_Rotation != 0);
7334 bool IsTextured;
7335 size_t BlendModeIndex;
7336 size_t DynamicIndex;
7337 size_t AddressModeIndex;
7338 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7339 auto &PipeLayout = GetPipeLayout(Container&: IsRotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7340 auto &PipeLine = GetPipeline(Container&: IsRotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7341
7342 VkCommandBuffer *pCommandBuffer;
7343 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7344 return false;
7345 auto &CommandBuffer = *pCommandBuffer;
7346
7347 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7348
7349 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7350 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7351 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7352
7353 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7354
7355 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7356
7357 if(IsTextured)
7358 {
7359 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7360 }
7361
7362 SUniformPrimExGVertColor PushConstantColor;
7363 SUniformPrimExGPos PushConstantVertex;
7364 size_t VertexPushConstantSize = sizeof(PushConstantVertex);
7365
7366 PushConstantColor = pCommand->m_VertexColor;
7367 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7368
7369 if(!IsRotationless)
7370 {
7371 PushConstantVertex.m_Rotation = pCommand->m_Rotation;
7372 PushConstantVertex.m_Center = {pCommand->m_Center.x, pCommand->m_Center.y};
7373 }
7374 else
7375 {
7376 VertexPushConstantSize = sizeof(SUniformPrimExGPosRotationless);
7377 }
7378
7379 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: VertexPushConstantSize, pValues: &PushConstantVertex);
7380 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformPrimExGPos) + sizeof(SUniformPrimExGVertColorAlign), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7381
7382 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7383
7384 return true;
7385 }
7386
7387 void Cmd_RenderQuadContainerAsSpriteMultiple_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *pCommand)
7388 {
7389 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: ((pCommand->m_DrawCount - 1) / GRAPHICS_MAX_PARTICLES_RENDER_COUNT) + 1);
7390 }
7391
7392 [[nodiscard]] bool Cmd_RenderQuadContainerAsSpriteMultiple(const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7393 {
7394 std::array<float, (size_t)4 * 2> m;
7395 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7396
7397 bool CanBePushed = pCommand->m_DrawCount <= 1;
7398
7399 bool IsTextured;
7400 size_t BlendModeIndex;
7401 size_t DynamicIndex;
7402 size_t AddressModeIndex;
7403 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7404 auto &PipeLayout = GetPipeLayout(Container&: CanBePushed ? m_SpriteMultiPushPipeline : m_SpriteMultiPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7405 auto &PipeLine = GetPipeline(Container&: CanBePushed ? m_SpriteMultiPushPipeline : m_SpriteMultiPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7406
7407 VkCommandBuffer *pCommandBuffer;
7408 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7409 return false;
7410 auto &CommandBuffer = *pCommandBuffer;
7411
7412 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7413
7414 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7415 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7416 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7417
7418 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7419 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7420
7421 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7422
7423 if(CanBePushed)
7424 {
7425 SUniformSpriteMultiPushGVertColor PushConstantColor;
7426 SUniformSpriteMultiPushGPos PushConstantVertex;
7427
7428 PushConstantColor = pCommand->m_VertexColor;
7429
7430 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7431 PushConstantVertex.m_Center = pCommand->m_Center;
7432
7433 for(size_t i = 0; i < pCommand->m_DrawCount; ++i)
7434 PushConstantVertex.m_aPSR[i] = vec4(pCommand->m_pRenderInfo[i].m_Pos.x, pCommand->m_pRenderInfo[i].m_Pos.y, pCommand->m_pRenderInfo[i].m_Scale, pCommand->m_pRenderInfo[i].m_Rotation);
7435
7436 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformSpriteMultiPushGPosBase) + sizeof(vec4) * pCommand->m_DrawCount, pValues: &PushConstantVertex);
7437 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformSpriteMultiPushGPos), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7438 }
7439 else
7440 {
7441 SUniformSpriteMultiGVertColor PushConstantColor;
7442 SUniformSpriteMultiGPos PushConstantVertex;
7443
7444 PushConstantColor = pCommand->m_VertexColor;
7445
7446 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7447 PushConstantVertex.m_Center = pCommand->m_Center;
7448
7449 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(PushConstantVertex), pValues: &PushConstantVertex);
7450 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformSpriteMultiGPos) + sizeof(SUniformSpriteMultiGVertColorAlign), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7451 }
7452
7453 const int RSPCount = 512;
7454 int DrawCount = pCommand->m_DrawCount;
7455 size_t RenderOffset = 0;
7456
7457 while(DrawCount > 0)
7458 {
7459 int UniformCount = (DrawCount > RSPCount ? RSPCount : DrawCount);
7460
7461 if(!CanBePushed)
7462 {
7463 // create uniform buffer
7464 SDeviceDescriptorSet UniDescrSet;
7465 if(!GetUniformBufferObject(RenderThreadIndex: ExecBuffer.m_ThreadIndex, RequiresSharedStagesDescriptor: false, DescrSet&: UniDescrSet, ParticleCount: UniformCount, pData: (const float *)(pCommand->m_pRenderInfo + RenderOffset), DataSize: UniformCount * sizeof(IGraphics::SRenderSpriteInfo)))
7466 return false;
7467
7468 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 1, descriptorSetCount: 1, pDescriptorSets: &UniDescrSet.m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7469 }
7470
7471 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: UniformCount, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7472
7473 RenderOffset += RSPCount;
7474 DrawCount -= RSPCount;
7475 }
7476
7477 return true;
7478 }
7479
7480 [[nodiscard]] bool Cmd_WindowCreateNtf(const CCommandBuffer::SCommand_WindowCreateNtf *pCommand)
7481 {
7482 if(IsVerbose())
7483 {
7484 log_debug("gfx/vulkan", "Creating new surface.");
7485 }
7486 m_pWindow = SDL_GetWindowFromID(id: pCommand->m_WindowId);
7487 if(m_RenderingPaused)
7488 {
7489#ifdef CONF_PLATFORM_ANDROID
7490 if(!CreateSurface(m_pWindow))
7491 return false;
7492 m_RecreateSwapChain = true;
7493#endif
7494 m_RenderingPaused = false;
7495 if(!PureMemoryFrame())
7496 return false;
7497 if(!PrepareFrame())
7498 return false;
7499 }
7500
7501 return true;
7502 }
7503
7504 [[nodiscard]] bool Cmd_WindowDestroyNtf(const CCommandBuffer::SCommand_WindowDestroyNtf *pCommand)
7505 {
7506 if(IsVerbose())
7507 {
7508 log_debug("gfx/vulkan", "Surface got destroyed.");
7509 }
7510 if(!m_RenderingPaused)
7511 {
7512 if(!WaitFrame())
7513 return false;
7514 m_RenderingPaused = true;
7515 vkDeviceWaitIdle(device: m_VKDevice);
7516#ifdef CONF_PLATFORM_ANDROID
7517 CleanupVulkanSwapChain(true);
7518#endif
7519 }
7520
7521 return true;
7522 }
7523
7524 [[nodiscard]] bool Cmd_PreInit(const CCommandProcessorFragment_GLBase::SCommand_PreInit *pCommand)
7525 {
7526 m_pGpuList = pCommand->m_pGpuList;
7527 if(InitVulkanSDL(pWindow: pCommand->m_pWindow, CanvasWidth: pCommand->m_Width, CanvasHeight: pCommand->m_Height, pRendererString: pCommand->m_pRendererString, pVendorString: pCommand->m_pVendorString, pVersionString: pCommand->m_pVersionString) != 0)
7528 {
7529 m_VKInstance = VK_NULL_HANDLE;
7530 }
7531
7532 RegisterCommands();
7533
7534 m_ThreadCount = g_Config.m_GfxRenderThreadCount;
7535 if(m_ThreadCount <= 1)
7536 m_ThreadCount = 1;
7537 else
7538 {
7539 m_ThreadCount = std::clamp<decltype(m_ThreadCount)>(val: m_ThreadCount, lo: 3, hi: std::max<decltype(m_ThreadCount)>(a: 3, b: std::thread::hardware_concurrency()));
7540 }
7541
7542 // start threads
7543 dbg_assert(m_ThreadCount != 2, "Either use 1 main thread or at least 2 extra rendering threads.");
7544 if(m_ThreadCount > 1)
7545 {
7546 m_vvThreadCommandLists.resize(new_size: m_ThreadCount - 1);
7547 m_vThreadHelperHadCommands.resize(new_size: m_ThreadCount - 1, x: false);
7548 for(auto &ThreadCommandList : m_vvThreadCommandLists)
7549 {
7550 ThreadCommandList.reserve(n: 256);
7551 }
7552
7553 m_vpRenderThreads.reserve(n: m_ThreadCount - 1);
7554 for(size_t i = 0; i < m_ThreadCount - 1; ++i)
7555 {
7556 auto *pRenderThread = new SRenderThread();
7557 std::unique_lock<std::mutex> Lock(pRenderThread->m_Mutex);
7558 m_vpRenderThreads.emplace_back(args&: pRenderThread);
7559 pRenderThread->m_Thread = std::thread([this, i]() { RunThread(ThreadIndex: i); });
7560 // wait until thread started
7561 pRenderThread->m_Cond.wait(lock&: Lock, p: [pRenderThread]() -> bool { return pRenderThread->m_Started; });
7562 }
7563 }
7564
7565 return true;
7566 }
7567
7568 [[nodiscard]] bool Cmd_PostShutdown(const CCommandProcessorFragment_GLBase::SCommand_PostShutdown *pCommand)
7569 {
7570 for(size_t i = 0; i < m_ThreadCount - 1; ++i)
7571 {
7572 auto *pThread = m_vpRenderThreads[i].get();
7573 {
7574 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
7575 pThread->m_Finished = true;
7576 pThread->m_Cond.notify_one();
7577 }
7578 pThread->m_Thread.join();
7579 }
7580 m_vpRenderThreads.clear();
7581 m_vvThreadCommandLists.clear();
7582 m_vThreadHelperHadCommands.clear();
7583
7584 m_ThreadCount = 1;
7585
7586 CleanupVulkanSDL();
7587
7588 return true;
7589 }
7590
7591 void StartCommands(size_t CommandCount, size_t EstimatedRenderCallCount) override
7592 {
7593 m_CommandsInPipe = CommandCount;
7594 m_RenderCallsInPipe = EstimatedRenderCallCount;
7595 m_CurCommandInPipe = 0;
7596 m_CurRenderCallCountInPipe = 0;
7597 }
7598
7599 void EndCommands() override
7600 {
7601 FinishRenderThreads();
7602 m_CommandsInPipe = 0;
7603 m_RenderCallsInPipe = 0;
7604 }
7605
7606 /****************
7607 * RENDER THREADS
7608 *****************/
7609
7610 void RunThread(size_t ThreadIndex)
7611 {
7612 auto *pThread = m_vpRenderThreads[ThreadIndex].get();
7613 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
7614 pThread->m_Started = true;
7615 pThread->m_Cond.notify_one();
7616
7617 while(!pThread->m_Finished)
7618 {
7619 pThread->m_Cond.wait(lock&: Lock, p: [pThread]() -> bool { return pThread->m_IsRendering || pThread->m_Finished; });
7620 pThread->m_Cond.notify_one();
7621
7622 // set this to true, if you want to benchmark the render thread times
7623 static constexpr bool BENCHMARK_RENDER_THREADS = false;
7624 std::chrono::nanoseconds ThreadRenderTime = 0ns;
7625 if(IsVerbose() && BENCHMARK_RENDER_THREADS)
7626 {
7627 ThreadRenderTime = time_get_nanoseconds();
7628 }
7629
7630 if(!pThread->m_Finished)
7631 {
7632 bool HasErrorFromCmd = false;
7633 for(auto &NextCmd : m_vvThreadCommandLists[ThreadIndex])
7634 {
7635 if(!m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: NextCmd.m_Command)].m_CommandCB(NextCmd.m_pRawCommand, NextCmd))
7636 {
7637 // an error occurred, the thread will not continue execution
7638 HasErrorFromCmd = true;
7639 break;
7640 }
7641 }
7642 m_vvThreadCommandLists[ThreadIndex].clear();
7643
7644 if(!HasErrorFromCmd && m_vvUsedThreadDrawCommandBuffer[ThreadIndex + 1][m_CurImageIndex])
7645 {
7646 auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[ThreadIndex + 1][m_CurImageIndex];
7647 vkEndCommandBuffer(commandBuffer: GraphicThreadCommandBuffer);
7648 }
7649 }
7650
7651 if(IsVerbose() && BENCHMARK_RENDER_THREADS)
7652 {
7653 log_debug("gfx/vulkan", "Render thread %" PRIzu " took %" PRId64 " ns to finish.", ThreadIndex, (int64_t)(time_get_nanoseconds() - ThreadRenderTime).count());
7654 }
7655
7656 pThread->m_IsRendering = false;
7657 }
7658 }
7659};
7660
7661CCommandProcessorFragment_GLBase *CreateVulkanCommandProcessorFragment()
7662{
7663 return new CCommandProcessorFragment_Vulkan();
7664}
7665
7666#endif
7667