1#if defined(CONF_BACKEND_VULKAN)
2
3#include <base/log.h>
4#include <base/math.h>
5#include <base/system.h>
6
7#include <engine/client/backend/backend_base.h>
8#include <engine/client/backend/vulkan/backend_vulkan.h>
9#include <engine/client/backend_sdl.h>
10#include <engine/client/graphics_threaded.h>
11#include <engine/gfx/image_manipulation.h>
12#include <engine/graphics.h>
13#include <engine/shared/config.h>
14#include <engine/shared/localization.h>
15#include <engine/storage.h>
16
17#include <SDL_video.h>
18#include <SDL_vulkan.h>
19#include <vulkan/vk_platform.h>
20#include <vulkan/vulkan_core.h>
21
22#include <algorithm>
23#include <array>
24#include <condition_variable>
25#include <cstddef>
26#include <cstdlib>
27#include <functional>
28#include <limits>
29#include <map>
30#include <memory>
31#include <mutex>
32#include <optional>
33#include <set>
34#include <string>
35#include <thread>
36#include <unordered_map>
37#include <utility>
38#include <vector>
39
40#ifndef VK_API_VERSION_MAJOR
41#define VK_API_VERSION_MAJOR VK_VERSION_MAJOR
42#define VK_API_VERSION_MINOR VK_VERSION_MINOR
43#define VK_API_VERSION_PATCH VK_VERSION_PATCH
44#endif
45
46using namespace std::chrono_literals;
47
48class CCommandProcessorFragment_Vulkan : public CCommandProcessorFragment_GLBase
49{
50 enum EMemoryBlockUsage
51 {
52 MEMORY_BLOCK_USAGE_TEXTURE = 0,
53 MEMORY_BLOCK_USAGE_BUFFER,
54 MEMORY_BLOCK_USAGE_STREAM,
55 MEMORY_BLOCK_USAGE_STAGING,
56
57 // whenever dummy is used, make sure to deallocate all memory
58 MEMORY_BLOCK_USAGE_DUMMY,
59 };
60
61 [[nodiscard]] bool IsVerbose()
62 {
63 return g_Config.m_DbgGfx == DEBUG_GFX_MODE_VERBOSE || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL;
64 }
65
66 void VerboseAllocatedMemory(VkDeviceSize Size, size_t FrameImageIndex, EMemoryBlockUsage MemUsage) const
67 {
68 const char *pUsage = "unknown";
69 switch(MemUsage)
70 {
71 case MEMORY_BLOCK_USAGE_TEXTURE:
72 pUsage = "texture";
73 break;
74 case MEMORY_BLOCK_USAGE_BUFFER:
75 pUsage = "buffer";
76 break;
77 case MEMORY_BLOCK_USAGE_STREAM:
78 pUsage = "stream";
79 break;
80 case MEMORY_BLOCK_USAGE_STAGING:
81 pUsage = "staging buffer";
82 break;
83 default: break;
84 }
85 dbg_msg(sys: "vulkan", fmt: "allocated chunk of memory with size: %" PRIzu " for frame %" PRIzu " (%s)", (size_t)Size, (size_t)m_CurImageIndex, pUsage);
86 }
87
88 void VerboseDeallocatedMemory(VkDeviceSize Size, size_t FrameImageIndex, EMemoryBlockUsage MemUsage) const
89 {
90 const char *pUsage = "unknown";
91 switch(MemUsage)
92 {
93 case MEMORY_BLOCK_USAGE_TEXTURE:
94 pUsage = "texture";
95 break;
96 case MEMORY_BLOCK_USAGE_BUFFER:
97 pUsage = "buffer";
98 break;
99 case MEMORY_BLOCK_USAGE_STREAM:
100 pUsage = "stream";
101 break;
102 case MEMORY_BLOCK_USAGE_STAGING:
103 pUsage = "staging buffer";
104 break;
105 default: break;
106 }
107 dbg_msg(sys: "vulkan", fmt: "deallocated chunk of memory with size: %" PRIzu " for frame %" PRIzu " (%s)", (size_t)Size, (size_t)m_CurImageIndex, pUsage);
108 }
109
110 /************************
111 * STRUCT DEFINITIONS
112 ************************/
113
114 static constexpr size_t STAGING_BUFFER_CACHE_ID = 0;
115 static constexpr size_t STAGING_BUFFER_IMAGE_CACHE_ID = 1;
116 static constexpr size_t VERTEX_BUFFER_CACHE_ID = 2;
117 static constexpr size_t IMAGE_BUFFER_CACHE_ID = 3;
118
119 struct SDeviceMemoryBlock
120 {
121 VkDeviceMemory m_Mem = VK_NULL_HANDLE;
122 VkDeviceSize m_Size = 0;
123 EMemoryBlockUsage m_UsageType;
124 };
125
126 struct SDeviceDescriptorPools;
127
128 struct SDeviceDescriptorSet
129 {
130 VkDescriptorSet m_Descriptor = VK_NULL_HANDLE;
131 SDeviceDescriptorPools *m_pPools = nullptr;
132 size_t m_PoolIndex = std::numeric_limits<size_t>::max();
133 };
134
135 struct SDeviceDescriptorPool
136 {
137 VkDescriptorPool m_Pool;
138 VkDeviceSize m_Size = 0;
139 VkDeviceSize m_CurSize = 0;
140 };
141
142 struct SDeviceDescriptorPools
143 {
144 std::vector<SDeviceDescriptorPool> m_vPools;
145 VkDeviceSize m_DefaultAllocSize = 0;
146 bool m_IsUniformPool = false;
147 };
148
149 // some mix of queue and binary tree
150 struct SMemoryHeap
151 {
152 struct SMemoryHeapElement;
153 struct SMemoryHeapQueueElement
154 {
155 size_t m_AllocationSize;
156 // only useful information for the heap
157 size_t m_OffsetInHeap;
158 // useful for the user of this element
159 size_t m_OffsetToAlign;
160 SMemoryHeapElement *m_pElementInHeap;
161 [[nodiscard]] bool operator>(const SMemoryHeapQueueElement &Other) const { return m_AllocationSize > Other.m_AllocationSize; }
162 struct SMemoryHeapQueueElementFind
163 {
164 // respects alignment requirements
165 constexpr bool operator()(const SMemoryHeapQueueElement &Val, const std::pair<size_t, size_t> &Other) const
166 {
167 auto AllocSize = Other.first;
168 auto AllocAlignment = Other.second;
169 size_t ExtraSizeAlign = Val.m_OffsetInHeap % AllocAlignment;
170 if(ExtraSizeAlign != 0)
171 ExtraSizeAlign = AllocAlignment - ExtraSizeAlign;
172 size_t RealAllocSize = AllocSize + ExtraSizeAlign;
173 return Val.m_AllocationSize < RealAllocSize;
174 }
175 };
176 };
177
178 typedef std::multiset<SMemoryHeapQueueElement, std::greater<>> TMemoryHeapQueue;
179
180 struct SMemoryHeapElement
181 {
182 size_t m_AllocationSize;
183 size_t m_Offset;
184 SMemoryHeapElement *m_pParent;
185 std::unique_ptr<SMemoryHeapElement> m_pLeft;
186 std::unique_ptr<SMemoryHeapElement> m_pRight;
187
188 bool m_InUse;
189 TMemoryHeapQueue::iterator m_InQueue;
190 };
191
192 SMemoryHeapElement m_Root;
193 TMemoryHeapQueue m_Elements;
194
195 void Init(size_t Size, size_t Offset)
196 {
197 m_Root.m_AllocationSize = Size;
198 m_Root.m_Offset = Offset;
199 m_Root.m_pParent = nullptr;
200 m_Root.m_InUse = false;
201
202 SMemoryHeapQueueElement QueueEl;
203 QueueEl.m_AllocationSize = Size;
204 QueueEl.m_OffsetInHeap = Offset;
205 QueueEl.m_OffsetToAlign = Offset;
206 QueueEl.m_pElementInHeap = &m_Root;
207 m_Root.m_InQueue = m_Elements.insert(x: QueueEl);
208 }
209
210 [[nodiscard]] bool Allocate(size_t AllocSize, size_t AllocAlignment, SMemoryHeapQueueElement &AllocatedMemory)
211 {
212 if(m_Elements.empty())
213 {
214 return false;
215 }
216 else
217 {
218 // check if there is enough space in this instance
219 if(SMemoryHeapQueueElement::SMemoryHeapQueueElementFind{}(*m_Elements.begin(), std::make_pair(x&: AllocSize, y&: AllocAlignment)))
220 {
221 return false;
222 }
223 else
224 {
225 // see SMemoryHeapQueueElement::operator>
226 SMemoryHeapQueueElement FindAllocSize;
227 FindAllocSize.m_AllocationSize = AllocSize;
228 // find upper bound for a allocation size
229 auto Upper = m_Elements.upper_bound(x: FindAllocSize);
230 // then find the first entry that respects alignment, this is a linear search!
231 auto FoundEl = std::lower_bound(first: std::make_reverse_iterator(i: Upper), last: m_Elements.rend(), val: std::make_pair(x&: AllocSize, y&: AllocAlignment), comp: SMemoryHeapQueueElement::SMemoryHeapQueueElementFind{});
232
233 auto TopEl = *FoundEl;
234 m_Elements.erase(position: TopEl.m_pElementInHeap->m_InQueue);
235
236 TopEl.m_pElementInHeap->m_InUse = true;
237
238 // calculate the real alloc size + alignment offset
239 size_t ExtraSizeAlign = TopEl.m_OffsetInHeap % AllocAlignment;
240 if(ExtraSizeAlign != 0)
241 ExtraSizeAlign = AllocAlignment - ExtraSizeAlign;
242 size_t RealAllocSize = AllocSize + ExtraSizeAlign;
243
244 // the heap element gets children
245 TopEl.m_pElementInHeap->m_pLeft = std::make_unique<SMemoryHeapElement>();
246 TopEl.m_pElementInHeap->m_pLeft->m_AllocationSize = RealAllocSize;
247 TopEl.m_pElementInHeap->m_pLeft->m_Offset = TopEl.m_OffsetInHeap;
248 TopEl.m_pElementInHeap->m_pLeft->m_pParent = TopEl.m_pElementInHeap;
249 TopEl.m_pElementInHeap->m_pLeft->m_InUse = true;
250
251 if(RealAllocSize < TopEl.m_AllocationSize)
252 {
253 SMemoryHeapQueueElement RemainingEl;
254 RemainingEl.m_OffsetInHeap = TopEl.m_OffsetInHeap + RealAllocSize;
255 RemainingEl.m_AllocationSize = TopEl.m_AllocationSize - RealAllocSize;
256
257 TopEl.m_pElementInHeap->m_pRight = std::make_unique<SMemoryHeapElement>();
258 TopEl.m_pElementInHeap->m_pRight->m_AllocationSize = RemainingEl.m_AllocationSize;
259 TopEl.m_pElementInHeap->m_pRight->m_Offset = RemainingEl.m_OffsetInHeap;
260 TopEl.m_pElementInHeap->m_pRight->m_pParent = TopEl.m_pElementInHeap;
261 TopEl.m_pElementInHeap->m_pRight->m_InUse = false;
262
263 RemainingEl.m_pElementInHeap = TopEl.m_pElementInHeap->m_pRight.get();
264 RemainingEl.m_pElementInHeap->m_InQueue = m_Elements.insert(x: RemainingEl);
265 }
266
267 AllocatedMemory.m_pElementInHeap = TopEl.m_pElementInHeap->m_pLeft.get();
268 AllocatedMemory.m_AllocationSize = RealAllocSize;
269 AllocatedMemory.m_OffsetInHeap = TopEl.m_OffsetInHeap;
270 AllocatedMemory.m_OffsetToAlign = TopEl.m_OffsetInHeap + ExtraSizeAlign;
271 return true;
272 }
273 }
274 }
275
276 void Free(const SMemoryHeapQueueElement &AllocatedMemory)
277 {
278 bool ContinueFree = true;
279 SMemoryHeapQueueElement ThisEl = AllocatedMemory;
280 while(ContinueFree)
281 {
282 // first check if the other block is in use, if not merge them again
283 SMemoryHeapElement *pThisHeapObj = ThisEl.m_pElementInHeap;
284 SMemoryHeapElement *pThisParent = pThisHeapObj->m_pParent;
285 pThisHeapObj->m_InUse = false;
286 SMemoryHeapElement *pOtherHeapObj = nullptr;
287 if(pThisParent != nullptr && pThisHeapObj == pThisParent->m_pLeft.get())
288 pOtherHeapObj = pThisHeapObj->m_pParent->m_pRight.get();
289 else if(pThisParent != nullptr)
290 pOtherHeapObj = pThisHeapObj->m_pParent->m_pLeft.get();
291
292 if((pThisParent != nullptr && pOtherHeapObj == nullptr) || (pOtherHeapObj != nullptr && !pOtherHeapObj->m_InUse))
293 {
294 // merge them
295 if(pOtherHeapObj != nullptr)
296 {
297 m_Elements.erase(position: pOtherHeapObj->m_InQueue);
298 pOtherHeapObj->m_InUse = false;
299 }
300
301 SMemoryHeapQueueElement ParentEl;
302 ParentEl.m_OffsetInHeap = pThisParent->m_Offset;
303 ParentEl.m_AllocationSize = pThisParent->m_AllocationSize;
304 ParentEl.m_pElementInHeap = pThisParent;
305
306 pThisParent->m_pLeft = nullptr;
307 pThisParent->m_pRight = nullptr;
308
309 ThisEl = ParentEl;
310 }
311 else
312 {
313 // else just put this back into queue
314 ThisEl.m_pElementInHeap->m_InQueue = m_Elements.insert(x: ThisEl);
315 ContinueFree = false;
316 }
317 }
318 }
319
320 [[nodiscard]] bool IsUnused() const
321 {
322 return !m_Root.m_InUse;
323 }
324 };
325
326 template<size_t Id>
327 struct SMemoryBlock
328 {
329 SMemoryHeap::SMemoryHeapQueueElement m_HeapData;
330
331 VkDeviceSize m_UsedSize;
332
333 // optional
334 VkBuffer m_Buffer;
335
336 SDeviceMemoryBlock m_BufferMem;
337 void *m_pMappedBuffer;
338
339 bool m_IsCached;
340 SMemoryHeap *m_pHeap;
341 };
342
343 template<size_t Id>
344 struct SMemoryImageBlock : public SMemoryBlock<Id>
345 {
346 uint32_t m_ImageMemoryBits;
347 };
348
349 template<size_t Id>
350 struct SMemoryBlockCache
351 {
352 struct SMemoryCacheType
353 {
354 struct SMemoryCacheHeap
355 {
356 SMemoryHeap m_Heap;
357 VkBuffer m_Buffer;
358
359 SDeviceMemoryBlock m_BufferMem;
360 void *m_pMappedBuffer;
361 };
362 std::vector<SMemoryCacheHeap *> m_vpMemoryHeaps;
363 };
364 SMemoryCacheType m_MemoryCaches;
365 std::vector<std::vector<SMemoryBlock<Id>>> m_vvFrameDelayedCachedBufferCleanup;
366
367 bool m_CanShrink = false;
368
369 void Init(size_t SwapChainImageCount)
370 {
371 m_vvFrameDelayedCachedBufferCleanup.resize(SwapChainImageCount);
372 }
373
374 void DestroyFrameData(size_t ImageCount)
375 {
376 for(size_t i = 0; i < ImageCount; ++i)
377 Cleanup(ImgIndex: i);
378 m_vvFrameDelayedCachedBufferCleanup.clear();
379 }
380
381 void Destroy(VkDevice &Device)
382 {
383 for(auto HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.begin(); HeapIterator != m_MemoryCaches.m_vpMemoryHeaps.end();)
384 {
385 auto *pHeap = *HeapIterator;
386 if(pHeap->m_pMappedBuffer != nullptr)
387 vkUnmapMemory(Device, pHeap->m_BufferMem.m_Mem);
388 if(pHeap->m_Buffer != VK_NULL_HANDLE)
389 vkDestroyBuffer(Device, pHeap->m_Buffer, nullptr);
390 vkFreeMemory(Device, pHeap->m_BufferMem.m_Mem, nullptr);
391
392 delete pHeap;
393 HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.erase(HeapIterator);
394 }
395
396 m_MemoryCaches.m_vpMemoryHeaps.clear();
397 m_vvFrameDelayedCachedBufferCleanup.clear();
398 }
399
400 void Cleanup(size_t ImgIndex)
401 {
402 for(auto &MemBlock : m_vvFrameDelayedCachedBufferCleanup[ImgIndex])
403 {
404 MemBlock.m_UsedSize = 0;
405 MemBlock.m_pHeap->Free(MemBlock.m_HeapData);
406
407 m_CanShrink = true;
408 }
409 m_vvFrameDelayedCachedBufferCleanup[ImgIndex].clear();
410 }
411
412 void FreeMemBlock(SMemoryBlock<Id> &Block, size_t ImgIndex)
413 {
414 m_vvFrameDelayedCachedBufferCleanup[ImgIndex].push_back(Block);
415 }
416
417 // returns the total free'd memory
418 size_t Shrink(VkDevice &Device)
419 {
420 size_t FreedMemory = 0;
421 if(m_CanShrink)
422 {
423 m_CanShrink = false;
424 if(m_MemoryCaches.m_vpMemoryHeaps.size() > 1)
425 {
426 for(auto HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.begin(); HeapIterator != m_MemoryCaches.m_vpMemoryHeaps.end();)
427 {
428 auto *pHeap = *HeapIterator;
429 if(pHeap->m_Heap.IsUnused())
430 {
431 if(pHeap->m_pMappedBuffer != nullptr)
432 vkUnmapMemory(Device, pHeap->m_BufferMem.m_Mem);
433 if(pHeap->m_Buffer != VK_NULL_HANDLE)
434 vkDestroyBuffer(Device, pHeap->m_Buffer, nullptr);
435 vkFreeMemory(Device, pHeap->m_BufferMem.m_Mem, nullptr);
436 FreedMemory += pHeap->m_BufferMem.m_Size;
437
438 delete pHeap;
439 HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.erase(HeapIterator);
440 if(m_MemoryCaches.m_vpMemoryHeaps.size() == 1)
441 break;
442 }
443 else
444 ++HeapIterator;
445 }
446 }
447 }
448
449 return FreedMemory;
450 }
451 };
452
453 struct CTexture
454 {
455 VkImage m_Img = VK_NULL_HANDLE;
456 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_ImgMem;
457 VkImageView m_ImgView = VK_NULL_HANDLE;
458 VkSampler m_aSamplers[2] = {VK_NULL_HANDLE, VK_NULL_HANDLE};
459
460 VkImage m_Img3D = VK_NULL_HANDLE;
461 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_Img3DMem;
462 VkImageView m_Img3DView = VK_NULL_HANDLE;
463 VkSampler m_Sampler3D = VK_NULL_HANDLE;
464
465 uint32_t m_Width = 0;
466 uint32_t m_Height = 0;
467 uint32_t m_RescaleCount = 0;
468
469 uint32_t m_MipMapCount = 1;
470
471 std::array<SDeviceDescriptorSet, 2> m_aVKStandardTexturedDescrSets;
472 SDeviceDescriptorSet m_VKStandard3DTexturedDescrSet;
473 SDeviceDescriptorSet m_VKTextDescrSet;
474 };
475
476 struct SBufferObject
477 {
478 SMemoryBlock<VERTEX_BUFFER_CACHE_ID> m_Mem;
479 };
480
481 struct SBufferObjectFrame
482 {
483 SBufferObject m_BufferObject;
484
485 // since stream buffers can be used the cur buffer should always be used for rendering
486 bool m_IsStreamedBuffer = false;
487 VkBuffer m_CurBuffer = VK_NULL_HANDLE;
488 size_t m_CurBufferOffset = 0;
489 };
490
491 struct SBufferContainer
492 {
493 int m_BufferObjectIndex;
494 };
495
496 struct SFrameBuffers
497 {
498 VkBuffer m_Buffer;
499 SDeviceMemoryBlock m_BufferMem;
500 size_t m_OffsetInBuffer = 0;
501 size_t m_Size;
502 size_t m_UsedSize;
503 uint8_t *m_pMappedBufferData;
504
505 SFrameBuffers(VkBuffer Buffer, SDeviceMemoryBlock BufferMem, size_t OffsetInBuffer, size_t Size, size_t UsedSize, uint8_t *pMappedBufferData) :
506 m_Buffer(Buffer), m_BufferMem(BufferMem), m_OffsetInBuffer(OffsetInBuffer), m_Size(Size), m_UsedSize(UsedSize), m_pMappedBufferData(pMappedBufferData)
507 {
508 }
509 };
510
511 struct SFrameUniformBuffers : public SFrameBuffers
512 {
513 std::array<SDeviceDescriptorSet, 2> m_aUniformSets;
514
515 SFrameUniformBuffers(VkBuffer Buffer, SDeviceMemoryBlock BufferMem, size_t OffsetInBuffer, size_t Size, size_t UsedSize, uint8_t *pMappedBufferData) :
516 SFrameBuffers(Buffer, BufferMem, OffsetInBuffer, Size, UsedSize, pMappedBufferData) {}
517 };
518
519 template<typename TName>
520 struct SStreamMemory
521 {
522 typedef std::vector<std::vector<TName>> TBufferObjectsOfFrame;
523 typedef std::vector<std::vector<VkMappedMemoryRange>> TMemoryMapRangesOfFrame;
524 typedef std::vector<size_t> TStreamUseCount;
525 TBufferObjectsOfFrame m_vvBufferObjectsOfFrame;
526 TMemoryMapRangesOfFrame m_vvBufferObjectsOfFrameRangeData;
527 TStreamUseCount m_vCurrentUsedCount;
528
529 std::vector<TName> &GetBuffers(size_t FrameImageIndex)
530 {
531 return m_vvBufferObjectsOfFrame[FrameImageIndex];
532 }
533
534 std::vector<VkMappedMemoryRange> &GetRanges(size_t FrameImageIndex)
535 {
536 return m_vvBufferObjectsOfFrameRangeData[FrameImageIndex];
537 }
538
539 size_t GetUsedCount(size_t FrameImageIndex)
540 {
541 return m_vCurrentUsedCount[FrameImageIndex];
542 }
543
544 void IncreaseUsedCount(size_t FrameImageIndex)
545 {
546 ++m_vCurrentUsedCount[FrameImageIndex];
547 }
548
549 [[nodiscard]] bool IsUsed(size_t FrameImageIndex)
550 {
551 return GetUsedCount(FrameImageIndex) > 0;
552 }
553
554 void ResetFrame(size_t FrameImageIndex)
555 {
556 m_vCurrentUsedCount[FrameImageIndex] = 0;
557 }
558
559 void Init(size_t FrameImageCount)
560 {
561 m_vvBufferObjectsOfFrame.resize(FrameImageCount);
562 m_vvBufferObjectsOfFrameRangeData.resize(new_size: FrameImageCount);
563 m_vCurrentUsedCount.resize(new_size: FrameImageCount);
564 }
565
566 typedef std::function<void(size_t, TName &)> TDestroyBufferFunc;
567
568 void Destroy(TDestroyBufferFunc &&DestroyBuffer)
569 {
570 size_t ImageIndex = 0;
571 for(auto &vBuffersOfFrame : m_vvBufferObjectsOfFrame)
572 {
573 for(auto &BufferOfFrame : vBuffersOfFrame)
574 {
575 VkDeviceMemory BufferMem = BufferOfFrame.m_BufferMem.m_Mem;
576 DestroyBuffer(ImageIndex, BufferOfFrame);
577
578 // delete similar buffers
579 for(auto &BufferOfFrameDel : vBuffersOfFrame)
580 {
581 if(BufferOfFrameDel.m_BufferMem.m_Mem == BufferMem)
582 {
583 BufferOfFrameDel.m_Buffer = VK_NULL_HANDLE;
584 BufferOfFrameDel.m_BufferMem.m_Mem = VK_NULL_HANDLE;
585 }
586 }
587 }
588 ++ImageIndex;
589 }
590 m_vvBufferObjectsOfFrame.clear();
591 m_vvBufferObjectsOfFrameRangeData.clear();
592 m_vCurrentUsedCount.clear();
593 }
594 };
595
596 struct SShaderModule
597 {
598 VkShaderModule m_VertShaderModule = VK_NULL_HANDLE;
599 VkShaderModule m_FragShaderModule = VK_NULL_HANDLE;
600
601 VkDevice m_VKDevice = VK_NULL_HANDLE;
602
603 ~SShaderModule()
604 {
605 if(m_VKDevice != VK_NULL_HANDLE)
606 {
607 if(m_VertShaderModule != VK_NULL_HANDLE)
608 vkDestroyShaderModule(device: m_VKDevice, shaderModule: m_VertShaderModule, pAllocator: nullptr);
609
610 if(m_FragShaderModule != VK_NULL_HANDLE)
611 vkDestroyShaderModule(device: m_VKDevice, shaderModule: m_FragShaderModule, pAllocator: nullptr);
612 }
613 }
614 };
615
616 enum EVulkanBackendAddressModes
617 {
618 VULKAN_BACKEND_ADDRESS_MODE_REPEAT = 0,
619 VULKAN_BACKEND_ADDRESS_MODE_CLAMP_EDGES,
620
621 VULKAN_BACKEND_ADDRESS_MODE_COUNT,
622 };
623
624 enum EVulkanBackendBlendModes
625 {
626 VULKAN_BACKEND_BLEND_MODE_ALPHA = 0,
627 VULKAN_BACKEND_BLEND_MODE_NONE,
628 VULKAN_BACKEND_BLEND_MODE_ADDITATIVE,
629
630 VULKAN_BACKEND_BLEND_MODE_COUNT,
631 };
632
633 enum EVulkanBackendClipModes
634 {
635 VULKAN_BACKEND_CLIP_MODE_NONE = 0,
636 VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT,
637
638 VULKAN_BACKEND_CLIP_MODE_COUNT,
639 };
640
641 enum EVulkanBackendTextureModes
642 {
643 VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED = 0,
644 VULKAN_BACKEND_TEXTURE_MODE_TEXTURED,
645
646 VULKAN_BACKEND_TEXTURE_MODE_COUNT,
647 };
648
649 struct SPipelineContainer
650 {
651 // 3 blend modes - 2 viewport & scissor modes - 2 texture modes
652 std::array<std::array<std::array<VkPipelineLayout, VULKAN_BACKEND_TEXTURE_MODE_COUNT>, VULKAN_BACKEND_CLIP_MODE_COUNT>, VULKAN_BACKEND_BLEND_MODE_COUNT> m_aaaPipelineLayouts;
653 std::array<std::array<std::array<VkPipeline, VULKAN_BACKEND_TEXTURE_MODE_COUNT>, VULKAN_BACKEND_CLIP_MODE_COUNT>, VULKAN_BACKEND_BLEND_MODE_COUNT> m_aaaPipelines;
654
655 SPipelineContainer()
656 {
657 for(auto &aaPipeLayouts : m_aaaPipelineLayouts)
658 {
659 for(auto &aPipeLayouts : aaPipeLayouts)
660 {
661 for(auto &PipeLayout : aPipeLayouts)
662 {
663 PipeLayout = VK_NULL_HANDLE;
664 }
665 }
666 }
667 for(auto &aaPipe : m_aaaPipelines)
668 {
669 for(auto &aPipe : aaPipe)
670 {
671 for(auto &Pipe : aPipe)
672 {
673 Pipe = VK_NULL_HANDLE;
674 }
675 }
676 }
677 }
678
679 void Destroy(VkDevice &Device)
680 {
681 for(auto &aaPipeLayouts : m_aaaPipelineLayouts)
682 {
683 for(auto &aPipeLayouts : aaPipeLayouts)
684 {
685 for(auto &PipeLayout : aPipeLayouts)
686 {
687 if(PipeLayout != VK_NULL_HANDLE)
688 vkDestroyPipelineLayout(device: Device, pipelineLayout: PipeLayout, pAllocator: nullptr);
689 PipeLayout = VK_NULL_HANDLE;
690 }
691 }
692 }
693 for(auto &aaPipe : m_aaaPipelines)
694 {
695 for(auto &aPipe : aaPipe)
696 {
697 for(auto &Pipe : aPipe)
698 {
699 if(Pipe != VK_NULL_HANDLE)
700 vkDestroyPipeline(device: Device, pipeline: Pipe, pAllocator: nullptr);
701 Pipe = VK_NULL_HANDLE;
702 }
703 }
704 }
705 }
706 };
707
708 /*******************************
709 * UNIFORM PUSH CONSTANT LAYOUTS
710 ********************************/
711
712 struct SUniformGPos
713 {
714 float m_aPos[4 * 2];
715 };
716
717 struct SUniformGTextPos
718 {
719 float m_aPos[4 * 2];
720 float m_TextureSize;
721 };
722
723 typedef vec3 SUniformTextGFragmentOffset;
724
725 struct SUniformTextGFragmentConstants
726 {
727 ColorRGBA m_TextColor;
728 ColorRGBA m_TextOutlineColor;
729 };
730
731 struct SUniformTextFragment
732 {
733 SUniformTextGFragmentConstants m_Constants;
734 };
735
736 struct SUniformTileGPos
737 {
738 float m_aPos[4 * 2];
739 };
740
741 struct SUniformTileGPosBorder : public SUniformTileGPos
742 {
743 vec2 m_Offset;
744 vec2 m_Scale;
745 };
746
747 typedef ColorRGBA SUniformTileGVertColor;
748
749 struct SUniformTileGVertColorAlign
750 {
751 float m_aPad[(64 - 48) / 4];
752 };
753
754 struct SUniformPrimExGPosRotationless
755 {
756 float m_aPos[4 * 2];
757 };
758
759 struct SUniformPrimExGPos : public SUniformPrimExGPosRotationless
760 {
761 vec2 m_Center;
762 float m_Rotation;
763 };
764
765 typedef ColorRGBA SUniformPrimExGVertColor;
766
767 struct SUniformPrimExGVertColorAlign
768 {
769 float m_aPad[(48 - 44) / 4];
770 };
771
772 struct SUniformSpriteMultiGPos
773 {
774 float m_aPos[4 * 2];
775 vec2 m_Center;
776 };
777
778 typedef ColorRGBA SUniformSpriteMultiGVertColor;
779
780 struct SUniformSpriteMultiGVertColorAlign
781 {
782 float m_aPad[(48 - 40) / 4];
783 };
784
785 struct SUniformSpriteMultiPushGPosBase
786 {
787 float m_aPos[4 * 2];
788 vec2 m_Center;
789 vec2 m_Padding;
790 };
791
792 struct SUniformSpriteMultiPushGPos : public SUniformSpriteMultiPushGPosBase
793 {
794 vec4 m_aPSR[1];
795 };
796
797 typedef ColorRGBA SUniformSpriteMultiPushGVertColor;
798
799 struct SUniformQuadGPosBase
800 {
801 float m_aPos[4 * 2];
802 int32_t m_QuadOffset;
803 };
804
805 struct SUniformQuadPushGBufferObject
806 {
807 ColorRGBA m_VertColor;
808 vec2 m_Offset;
809 float m_Rotation;
810 float m_Padding;
811 };
812
813 struct SUniformQuadGroupedGPos
814 {
815 float m_aPos[4 * 2];
816 SUniformQuadPushGBufferObject m_BOPush;
817 };
818
819 struct SUniformQuadGPos
820 {
821 float m_aPos[4 * 2];
822 int32_t m_QuadOffset;
823 };
824
825 enum ESupportedSamplerTypes
826 {
827 SUPPORTED_SAMPLER_TYPE_REPEAT = 0,
828 SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE,
829 SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY,
830
831 SUPPORTED_SAMPLER_TYPE_COUNT,
832 };
833
834 struct SShaderFileCache
835 {
836 std::vector<uint8_t> m_vBinary;
837 };
838
839 struct SSwapImgViewportExtent
840 {
841 VkExtent2D m_SwapImageViewport;
842 bool m_HasForcedViewport = false;
843 VkExtent2D m_ForcedViewport;
844
845 // the viewport of the resulting presented image on the screen
846 // if there is a forced viewport the resulting image is smaller
847 // than the full swap image size
848 VkExtent2D GetPresentedImageViewport() const
849 {
850 uint32_t ViewportWidth = m_SwapImageViewport.width;
851 uint32_t ViewportHeight = m_SwapImageViewport.height;
852 if(m_HasForcedViewport)
853 {
854 ViewportWidth = m_ForcedViewport.width;
855 ViewportHeight = m_ForcedViewport.height;
856 }
857
858 return {.width: ViewportWidth, .height: ViewportHeight};
859 }
860 };
861
862 struct SSwapChainMultiSampleImage
863 {
864 VkImage m_Image = VK_NULL_HANDLE;
865 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_ImgMem;
866 VkImageView m_ImgView = VK_NULL_HANDLE;
867 };
868
869 /************************
870 * MEMBER VARIABLES
871 ************************/
872
873 std::unordered_map<std::string, SShaderFileCache> m_ShaderFiles;
874
875 SMemoryBlockCache<STAGING_BUFFER_CACHE_ID> m_StagingBufferCache;
876 SMemoryBlockCache<STAGING_BUFFER_IMAGE_CACHE_ID> m_StagingBufferCacheImage;
877 SMemoryBlockCache<VERTEX_BUFFER_CACHE_ID> m_VertexBufferCache;
878 std::map<uint32_t, SMemoryBlockCache<IMAGE_BUFFER_CACHE_ID>> m_ImageBufferCaches;
879
880 std::vector<VkMappedMemoryRange> m_vNonFlushedStagingBufferRange;
881
882 std::vector<CTexture> m_vTextures;
883
884 std::atomic<uint64_t> *m_pTextureMemoryUsage;
885 std::atomic<uint64_t> *m_pBufferMemoryUsage;
886 std::atomic<uint64_t> *m_pStreamMemoryUsage;
887 std::atomic<uint64_t> *m_pStagingMemoryUsage;
888
889 TTwGraphicsGpuList *m_pGpuList;
890
891 int m_GlobalTextureLodBIAS;
892 uint32_t m_MultiSamplingCount = 1;
893
894 uint32_t m_NextMultiSamplingCount = std::numeric_limits<uint32_t>::max();
895
896 bool m_RecreateSwapChain = false;
897 bool m_SwapchainCreated = false;
898 bool m_RenderingPaused = false;
899 bool m_HasDynamicViewport = false;
900 VkOffset2D m_DynamicViewportOffset;
901 VkExtent2D m_DynamicViewportSize;
902
903 bool m_AllowsLinearBlitting = false;
904 bool m_OptimalSwapChainImageBlitting = false;
905 bool m_OptimalRGBAImageBlitting = false;
906 bool m_LinearRGBAImageBlitting = false;
907
908 VkBuffer m_IndexBuffer;
909 SDeviceMemoryBlock m_IndexBufferMemory;
910
911 VkBuffer m_RenderIndexBuffer;
912 SDeviceMemoryBlock m_RenderIndexBufferMemory;
913 size_t m_CurRenderIndexPrimitiveCount;
914
915 VkDeviceSize m_NonCoherentMemAlignment;
916 VkDeviceSize m_OptimalImageCopyMemAlignment;
917 uint32_t m_MaxTextureSize;
918 uint32_t m_MaxSamplerAnisotropy;
919 VkSampleCountFlags m_MaxMultiSample;
920
921 uint32_t m_MinUniformAlign;
922
923 std::vector<uint8_t> m_vReadPixelHelper;
924 std::vector<uint8_t> m_vScreenshotHelper;
925
926 SDeviceMemoryBlock m_GetPresentedImgDataHelperMem;
927 VkImage m_GetPresentedImgDataHelperImage = VK_NULL_HANDLE;
928 uint8_t *m_pGetPresentedImgDataHelperMappedMemory = nullptr;
929 VkDeviceSize m_GetPresentedImgDataHelperMappedLayoutOffset = 0;
930 VkDeviceSize m_GetPresentedImgDataHelperMappedLayoutPitch = 0;
931 uint32_t m_GetPresentedImgDataHelperWidth = 0;
932 uint32_t m_GetPresentedImgDataHelperHeight = 0;
933 VkFence m_GetPresentedImgDataHelperFence = VK_NULL_HANDLE;
934
935 std::array<VkSampler, SUPPORTED_SAMPLER_TYPE_COUNT> m_aSamplers;
936
937 class IStorage *m_pStorage;
938
939 struct SDelayedBufferCleanupItem
940 {
941 VkBuffer m_Buffer;
942 SDeviceMemoryBlock m_Mem;
943 void *m_pMappedData = nullptr;
944 };
945
946 std::vector<std::vector<SDelayedBufferCleanupItem>> m_vvFrameDelayedBufferCleanup;
947 std::vector<std::vector<CTexture>> m_vvFrameDelayedTextureCleanup;
948 std::vector<std::vector<std::pair<CTexture, CTexture>>> m_vvFrameDelayedTextTexturesCleanup;
949
950 size_t m_ThreadCount = 1;
951 static constexpr size_t MAIN_THREAD_INDEX = 0;
952 size_t m_CurCommandInPipe = 0;
953 size_t m_CurRenderCallCountInPipe = 0;
954 size_t m_CommandsInPipe = 0;
955 size_t m_RenderCallsInPipe = 0;
956 size_t m_LastCommandsInPipeThreadIndex = 0;
957
958 struct SRenderThread
959 {
960 bool m_IsRendering = false;
961 std::thread m_Thread;
962 std::mutex m_Mutex;
963 std::condition_variable m_Cond;
964 bool m_Finished = false;
965 bool m_Started = false;
966 };
967 std::vector<std::unique_ptr<SRenderThread>> m_vpRenderThreads;
968
969private:
970 std::vector<VkImageView> m_vSwapChainImageViewList;
971 std::vector<SSwapChainMultiSampleImage> m_vSwapChainMultiSamplingImages;
972 std::vector<VkFramebuffer> m_vFramebufferList;
973 std::vector<VkCommandBuffer> m_vMainDrawCommandBuffers;
974
975 std::vector<std::vector<VkCommandBuffer>> m_vvThreadDrawCommandBuffers;
976 std::vector<VkCommandBuffer> m_vHelperThreadDrawCommandBuffers;
977 std::vector<std::vector<bool>> m_vvUsedThreadDrawCommandBuffer;
978
979 std::vector<VkCommandBuffer> m_vMemoryCommandBuffers;
980 std::vector<bool> m_vUsedMemoryCommandBuffer;
981
982 std::vector<VkSemaphore> m_vQueueSubmitSemaphores;
983 std::vector<VkSemaphore> m_vBusyAcquireImageSemaphores;
984 VkSemaphore m_AcquireImageSemaphore;
985
986 std::vector<VkFence> m_vQueueSubmitFences;
987
988 uint64_t m_CurFrame = 0;
989 std::vector<uint64_t> m_vImageLastFrameCheck;
990
991 uint32_t m_LastPresentedSwapChainImageIndex;
992
993 std::vector<SBufferObjectFrame> m_vBufferObjects;
994
995 std::vector<SBufferContainer> m_vBufferContainers;
996
997 VkInstance m_VKInstance;
998 VkPhysicalDevice m_VKGPU;
999 uint32_t m_VKGraphicsQueueIndex = std::numeric_limits<uint32_t>::max();
1000 VkDevice m_VKDevice;
1001 VkQueue m_VKGraphicsQueue, m_VKPresentQueue;
1002 VkSurfaceKHR m_VKPresentSurface;
1003 SSwapImgViewportExtent m_VKSwapImgAndViewportExtent;
1004
1005#ifdef VK_EXT_debug_utils
1006 VkDebugUtilsMessengerEXT m_DebugMessenger;
1007#endif
1008
1009 VkDescriptorSetLayout m_StandardTexturedDescriptorSetLayout;
1010 VkDescriptorSetLayout m_Standard3DTexturedDescriptorSetLayout;
1011
1012 VkDescriptorSetLayout m_TextDescriptorSetLayout;
1013
1014 VkDescriptorSetLayout m_SpriteMultiUniformDescriptorSetLayout;
1015 VkDescriptorSetLayout m_QuadUniformDescriptorSetLayout;
1016
1017 SPipelineContainer m_StandardPipeline;
1018 SPipelineContainer m_StandardLinePipeline;
1019 SPipelineContainer m_Standard3DPipeline;
1020 SPipelineContainer m_TextPipeline;
1021 SPipelineContainer m_TilePipeline;
1022 SPipelineContainer m_TileBorderPipeline;
1023 SPipelineContainer m_PrimExPipeline;
1024 SPipelineContainer m_PrimExRotationlessPipeline;
1025 SPipelineContainer m_SpriteMultiPipeline;
1026 SPipelineContainer m_SpriteMultiPushPipeline;
1027 SPipelineContainer m_QuadPipeline;
1028 SPipelineContainer m_QuadGroupedPipeline;
1029
1030 std::vector<VkPipeline> m_vLastPipeline;
1031
1032 std::vector<VkCommandPool> m_vCommandPools;
1033
1034 VkRenderPass m_VKRenderPass;
1035
1036 VkSurfaceFormatKHR m_VKSurfFormat;
1037
1038 SDeviceDescriptorPools m_StandardTextureDescrPool;
1039 SDeviceDescriptorPools m_TextTextureDescrPool;
1040
1041 std::vector<SDeviceDescriptorPools> m_vUniformBufferDescrPools;
1042
1043 VkSwapchainKHR m_VKSwapChain = VK_NULL_HANDLE;
1044 std::vector<VkImage> m_vSwapChainImages;
1045 uint32_t m_SwapChainImageCount = 0;
1046
1047 std::vector<SStreamMemory<SFrameBuffers>> m_vStreamedVertexBuffers;
1048 std::vector<SStreamMemory<SFrameUniformBuffers>> m_vStreamedUniformBuffers;
1049
1050 uint32_t m_CurImageIndex = 0;
1051
1052 uint32_t m_CanvasWidth;
1053 uint32_t m_CanvasHeight;
1054
1055 SDL_Window *m_pWindow;
1056
1057 std::array<float, 4> m_aClearColor = {0, 0, 0, 0};
1058
1059 struct SRenderCommandExecuteBuffer
1060 {
1061 CCommandBuffer::ECommandBufferCMD m_Command;
1062 const CCommandBuffer::SCommand *m_pRawCommand;
1063 uint32_t m_ThreadIndex;
1064
1065 // must be calculated when the buffer gets filled
1066 size_t m_EstimatedRenderCallCount = 0;
1067
1068 // useful data
1069 VkBuffer m_Buffer;
1070 size_t m_BufferOff;
1071 std::array<SDeviceDescriptorSet, 2> m_aDescriptors;
1072
1073 VkBuffer m_IndexBuffer;
1074
1075 bool m_ClearColorInRenderThread = false;
1076
1077 bool m_HasDynamicState = false;
1078 VkViewport m_Viewport;
1079 VkRect2D m_Scissor;
1080 };
1081
1082 typedef std::vector<SRenderCommandExecuteBuffer> TCommandList;
1083 typedef std::vector<TCommandList> TThreadCommandList;
1084
1085 TThreadCommandList m_vvThreadCommandLists;
1086 std::vector<bool> m_vThreadHelperHadCommands;
1087
1088 typedef std::function<bool(const CCommandBuffer::SCommand *, SRenderCommandExecuteBuffer &)> TCommandBufferCommandCallback;
1089 typedef std::function<void(SRenderCommandExecuteBuffer &, const CCommandBuffer::SCommand *)> TCommandBufferFillExecuteBufferFunc;
1090
1091 struct SCommandCallback
1092 {
1093 bool m_IsRenderCommand;
1094 TCommandBufferFillExecuteBufferFunc m_FillExecuteBuffer;
1095 TCommandBufferCommandCallback m_CommandCB;
1096 // command should be considered handled after it executed
1097 bool m_CMDIsHandled = true;
1098 };
1099 std::array<SCommandCallback, static_cast<int>(CCommandBuffer::CMD_COUNT) - static_cast<int>(CCommandBuffer::CMD_FIRST)> m_aCommandCallbacks;
1100
1101protected:
1102 /************************
1103 * ERROR MANAGEMENT
1104 ************************/
1105 std::mutex m_ErrWarnMutex;
1106 std::string m_ErrorHelper;
1107
1108 bool m_HasError = false;
1109 bool m_CanAssert = false;
1110
1111 /**
1112 * After an error occurred, the rendering stop as soon as possible
1113 * Always stop the current code execution after a call to this function (e.g. return false)
1114 */
1115 void SetError(EGfxErrorType ErrType, const char *pErr, const char *pErrStrExtra = nullptr)
1116 {
1117 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1118 SGfxErrorContainer::SError Err = {.m_RequiresTranslation: false, .m_Err: pErr};
1119 if(std::find(first: m_Error.m_vErrors.begin(), last: m_Error.m_vErrors.end(), val: Err) == m_Error.m_vErrors.end())
1120 m_Error.m_vErrors.emplace_back(args&: Err);
1121 if(pErrStrExtra != nullptr)
1122 {
1123 SGfxErrorContainer::SError ErrExtra = {.m_RequiresTranslation: false, .m_Err: pErrStrExtra};
1124 if(std::find(first: m_Error.m_vErrors.begin(), last: m_Error.m_vErrors.end(), val: ErrExtra) == m_Error.m_vErrors.end())
1125 m_Error.m_vErrors.emplace_back(args&: ErrExtra);
1126 }
1127 if(m_CanAssert)
1128 {
1129 if(pErrStrExtra != nullptr)
1130 dbg_msg(sys: "vulkan", fmt: "vulkan error: %s: %s", pErr, pErrStrExtra);
1131 else
1132 dbg_msg(sys: "vulkan", fmt: "vulkan error: %s", pErr);
1133 m_HasError = true;
1134 m_Error.m_ErrorType = ErrType;
1135 }
1136 else
1137 {
1138 Lock.unlock();
1139 // during initialization vulkan should not throw any errors but warnings instead
1140 // since most code in the swapchain is shared with runtime code, add this extra code path
1141 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED, pWarning: pErr);
1142 }
1143 }
1144
1145 void SetWarningPreMsg(const char *pWarningPre)
1146 {
1147 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1148 if(std::find(first: m_Warning.m_vWarnings.begin(), last: m_Warning.m_vWarnings.end(), val: pWarningPre) == m_Warning.m_vWarnings.end())
1149 m_Warning.m_vWarnings.emplace(position: m_Warning.m_vWarnings.begin(), args&: pWarningPre);
1150 }
1151
1152 void SetWarning(EGfxWarningType WarningType, const char *pWarning)
1153 {
1154 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1155 dbg_msg(sys: "vulkan", fmt: "vulkan warning: %s", pWarning);
1156 if(std::find(first: m_Warning.m_vWarnings.begin(), last: m_Warning.m_vWarnings.end(), val: pWarning) == m_Warning.m_vWarnings.end())
1157 m_Warning.m_vWarnings.emplace_back(args&: pWarning);
1158 m_Warning.m_WarningType = WarningType;
1159 }
1160
1161 const char *CheckVulkanCriticalError(VkResult CallResult)
1162 {
1163 const char *pCriticalError = nullptr;
1164 switch(CallResult)
1165 {
1166 case VK_ERROR_OUT_OF_HOST_MEMORY:
1167 pCriticalError = "host ran out of memory";
1168 dbg_msg(sys: "vulkan", fmt: "%s", pCriticalError);
1169 break;
1170 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
1171 pCriticalError = "device ran out of memory";
1172 dbg_msg(sys: "vulkan", fmt: "%s", pCriticalError);
1173 break;
1174 case VK_ERROR_DEVICE_LOST:
1175 pCriticalError = "device lost";
1176 dbg_msg(sys: "vulkan", fmt: "%s", pCriticalError);
1177 break;
1178 case VK_ERROR_OUT_OF_DATE_KHR:
1179 {
1180 if(IsVerbose())
1181 {
1182 dbg_msg(sys: "vulkan", fmt: "queueing swap chain recreation because the current is out of date");
1183 }
1184 m_RecreateSwapChain = true;
1185 break;
1186 }
1187 case VK_ERROR_SURFACE_LOST_KHR:
1188 dbg_msg(sys: "vulkan", fmt: "surface lost");
1189 break;
1190 /*case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
1191 dbg_msg("vulkan", "fullscreen exclusive mode lost");
1192 break;*/
1193 case VK_ERROR_INCOMPATIBLE_DRIVER:
1194 pCriticalError = "no compatible driver found. Vulkan 1.1 is required.";
1195 dbg_msg(sys: "vulkan", fmt: "%s", pCriticalError);
1196 break;
1197 case VK_ERROR_INITIALIZATION_FAILED:
1198 pCriticalError = "initialization failed for unknown reason.";
1199 dbg_msg(sys: "vulkan", fmt: "%s", pCriticalError);
1200 break;
1201 case VK_ERROR_LAYER_NOT_PRESENT:
1202 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_MISSING_EXTENSION, pWarning: "One Vulkan layer was not present. (try to disable them)");
1203 break;
1204 case VK_ERROR_EXTENSION_NOT_PRESENT:
1205 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_MISSING_EXTENSION, pWarning: "One Vulkan extension was not present. (try to disable them)");
1206 break;
1207 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
1208 dbg_msg(sys: "vulkan", fmt: "native window in use");
1209 break;
1210 case VK_SUCCESS:
1211 break;
1212 case VK_SUBOPTIMAL_KHR:
1213 if(IsVerbose())
1214 {
1215 dbg_msg(sys: "vulkan", fmt: "queueing swap chain recreation because the current is sub optimal");
1216 }
1217 m_RecreateSwapChain = true;
1218 break;
1219 default:
1220 m_ErrorHelper = "unknown error: ";
1221 m_ErrorHelper.append(str: std::to_string(val: CallResult));
1222 pCriticalError = m_ErrorHelper.c_str();
1223 break;
1224 }
1225
1226 return pCriticalError;
1227 }
1228
1229 void ErroneousCleanup() override
1230 {
1231 CleanupVulkanSDL();
1232 }
1233
1234 /************************
1235 * COMMAND CALLBACKS
1236 ************************/
1237
1238 size_t CommandBufferCMDOff(CCommandBuffer::ECommandBufferCMD CommandBufferCMD)
1239 {
1240 return (size_t)CommandBufferCMD - CCommandBuffer::CMD_FIRST;
1241 }
1242
1243 void RegisterCommands()
1244 {
1245 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXTURE_CREATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Texture_Create(pCommand: static_cast<const CCommandBuffer::SCommand_Texture_Create *>(pBaseCommand)); }};
1246 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXTURE_DESTROY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Texture_Destroy(pCommand: static_cast<const CCommandBuffer::SCommand_Texture_Destroy *>(pBaseCommand)); }};
1247 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURES_CREATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTextures_Create(pCommand: static_cast<const CCommandBuffer::SCommand_TextTextures_Create *>(pBaseCommand)); }};
1248 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURES_DESTROY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTextures_Destroy(pCommand: static_cast<const CCommandBuffer::SCommand_TextTextures_Destroy *>(pBaseCommand)); }};
1249 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURE_UPDATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTexture_Update(pCommand: static_cast<const CCommandBuffer::SCommand_TextTexture_Update *>(pBaseCommand)); }};
1250
1251 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CLEAR)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Clear_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Clear *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Clear(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Clear *>(pBaseCommand)); }};
1252 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Render_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Render *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Render(pCommand: static_cast<const CCommandBuffer::SCommand_Render *>(pBaseCommand), ExecBuffer); }};
1253 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TEX3D)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderTex3D_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderTex3D *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderTex3D(pCommand: static_cast<const CCommandBuffer::SCommand_RenderTex3D *>(pBaseCommand), ExecBuffer); }};
1254
1255 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CREATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CreateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_CreateBufferObject *>(pBaseCommand)); }};
1256 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RECREATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RecreateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_RecreateBufferObject *>(pBaseCommand)); }};
1257 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_UpdateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_UpdateBufferObject *>(pBaseCommand)); }};
1258 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_COPY_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CopyBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_CopyBufferObject *>(pBaseCommand)); }};
1259 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_DELETE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_DeleteBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_DeleteBufferObject *>(pBaseCommand)); }};
1260
1261 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CREATE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CreateBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_CreateBufferContainer *>(pBaseCommand)); }};
1262 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_DELETE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_DeleteBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_DeleteBufferContainer *>(pBaseCommand)); }};
1263 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_UpdateBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_UpdateBufferContainer *>(pBaseCommand)); }};
1264
1265 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_INDICES_REQUIRED_NUM_NOTIFY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_IndicesRequiredNumNotify(pCommand: static_cast<const CCommandBuffer::SCommand_IndicesRequiredNumNotify *>(pBaseCommand)); }};
1266
1267 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TILE_LAYER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderTileLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderTileLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderTileLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderTileLayer *>(pBaseCommand), ExecBuffer); }};
1268 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_BORDER_TILE)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderBorderTile_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderBorderTile *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderBorderTile(pCommand: static_cast<const CCommandBuffer::SCommand_RenderBorderTile *>(pBaseCommand), ExecBuffer); }};
1269 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_LAYER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand), ExecBuffer, Grouped: false); }};
1270 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_LAYER_GROUPED)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand), ExecBuffer, Grouped: true); }};
1271 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TEXT)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderText_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderText *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderText(pCommand: static_cast<const CCommandBuffer::SCommand_RenderText *>(pBaseCommand), ExecBuffer); }};
1272 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainer *>(pBaseCommand), ExecBuffer); }};
1273 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER_EX)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainerEx_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerEx *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainerEx(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerEx *>(pBaseCommand), ExecBuffer); }};
1274 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER_SPRITE_MULTIPLE)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainerAsSpriteMultiple_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainerAsSpriteMultiple(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *>(pBaseCommand), ExecBuffer); }};
1275
1276 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_SWAP)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Swap(pCommand: static_cast<const CCommandBuffer::SCommand_Swap *>(pBaseCommand)); }};
1277
1278 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_VSYNC)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_VSync(pCommand: static_cast<const CCommandBuffer::SCommand_VSync *>(pBaseCommand)); }};
1279 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_MULTISAMPLING)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_MultiSampling(pCommand: static_cast<const CCommandBuffer::SCommand_MultiSampling *>(pBaseCommand)); }};
1280 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TRY_SWAP_AND_READ_PIXEL)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_ReadPixel(pCommand: static_cast<const CCommandBuffer::SCommand_TrySwapAndReadPixel *>(pBaseCommand)); }};
1281 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TRY_SWAP_AND_SCREENSHOT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Screenshot(pCommand: static_cast<const CCommandBuffer::SCommand_TrySwapAndScreenshot *>(pBaseCommand)); }};
1282
1283 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_VIEWPORT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Update_Viewport_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Update_Viewport *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Update_Viewport(pCommand: static_cast<const CCommandBuffer::SCommand_Update_Viewport *>(pBaseCommand)); }};
1284
1285 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_WINDOW_CREATE_NTF)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_WindowCreateNtf(pCommand: static_cast<const CCommandBuffer::SCommand_WindowCreateNtf *>(pBaseCommand)); }, .m_CMDIsHandled: false};
1286 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_WINDOW_DESTROY_NTF)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_WindowDestroyNtf(pCommand: static_cast<const CCommandBuffer::SCommand_WindowDestroyNtf *>(pBaseCommand)); }, .m_CMDIsHandled: false};
1287
1288 for(auto &Callback : m_aCommandCallbacks)
1289 {
1290 if(!(bool)Callback.m_CommandCB)
1291 Callback = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return true; }};
1292 }
1293 }
1294
1295 /*****************************
1296 * VIDEO AND SCREENSHOT HELPER
1297 ******************************/
1298
1299 [[nodiscard]] bool PreparePresentedImageDataImage(uint8_t *&pResImageData, uint32_t Width, uint32_t Height)
1300 {
1301 bool NeedsNewImg = Width != m_GetPresentedImgDataHelperWidth || Height != m_GetPresentedImgDataHelperHeight;
1302 if(m_GetPresentedImgDataHelperImage == VK_NULL_HANDLE || NeedsNewImg)
1303 {
1304 if(m_GetPresentedImgDataHelperImage != VK_NULL_HANDLE)
1305 {
1306 DeletePresentedImageDataImage();
1307 }
1308 m_GetPresentedImgDataHelperWidth = Width;
1309 m_GetPresentedImgDataHelperHeight = Height;
1310
1311 VkImageCreateInfo ImageInfo{};
1312 ImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
1313 ImageInfo.imageType = VK_IMAGE_TYPE_2D;
1314 ImageInfo.extent.width = Width;
1315 ImageInfo.extent.height = Height;
1316 ImageInfo.extent.depth = 1;
1317 ImageInfo.mipLevels = 1;
1318 ImageInfo.arrayLayers = 1;
1319 ImageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1320 ImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
1321 ImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1322 ImageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1323 ImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1324 ImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1325
1326 vkCreateImage(device: m_VKDevice, pCreateInfo: &ImageInfo, pAllocator: nullptr, pImage: &m_GetPresentedImgDataHelperImage);
1327 // Create memory to back up the image
1328 VkMemoryRequirements MemRequirements;
1329 vkGetImageMemoryRequirements(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pMemoryRequirements: &MemRequirements);
1330
1331 VkMemoryAllocateInfo MemAllocInfo{};
1332 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1333 MemAllocInfo.allocationSize = MemRequirements.size;
1334 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: MemRequirements.memoryTypeBits, Properties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
1335
1336 vkAllocateMemory(device: m_VKDevice, pAllocateInfo: &MemAllocInfo, pAllocator: nullptr, pMemory: &m_GetPresentedImgDataHelperMem.m_Mem);
1337 vkBindImageMemory(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, memory: m_GetPresentedImgDataHelperMem.m_Mem, memoryOffset: 0);
1338
1339 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_UNDEFINED, NewLayout: VK_IMAGE_LAYOUT_GENERAL))
1340 return false;
1341
1342 VkImageSubresource SubResource{.aspectMask: VK_IMAGE_ASPECT_COLOR_BIT, .mipLevel: 0, .arrayLayer: 0};
1343 VkSubresourceLayout SubResourceLayout;
1344 vkGetImageSubresourceLayout(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pSubresource: &SubResource, pLayout: &SubResourceLayout);
1345
1346 if(vkMapMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: (void **)&m_pGetPresentedImgDataHelperMappedMemory) != VK_SUCCESS)
1347 return false;
1348 m_GetPresentedImgDataHelperMappedLayoutOffset = SubResourceLayout.offset;
1349 m_GetPresentedImgDataHelperMappedLayoutPitch = SubResourceLayout.rowPitch;
1350 m_pGetPresentedImgDataHelperMappedMemory += m_GetPresentedImgDataHelperMappedLayoutOffset;
1351
1352 VkFenceCreateInfo FenceInfo{};
1353 FenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1354 FenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
1355 vkCreateFence(device: m_VKDevice, pCreateInfo: &FenceInfo, pAllocator: nullptr, pFence: &m_GetPresentedImgDataHelperFence);
1356 }
1357 pResImageData = m_pGetPresentedImgDataHelperMappedMemory;
1358 return true;
1359 }
1360
1361 void DeletePresentedImageDataImage()
1362 {
1363 if(m_GetPresentedImgDataHelperImage != VK_NULL_HANDLE)
1364 {
1365 vkDestroyFence(device: m_VKDevice, fence: m_GetPresentedImgDataHelperFence, pAllocator: nullptr);
1366
1367 m_GetPresentedImgDataHelperFence = VK_NULL_HANDLE;
1368
1369 vkDestroyImage(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pAllocator: nullptr);
1370 vkUnmapMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem);
1371 vkFreeMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem, pAllocator: nullptr);
1372
1373 m_GetPresentedImgDataHelperImage = VK_NULL_HANDLE;
1374 m_GetPresentedImgDataHelperMem = {};
1375 m_pGetPresentedImgDataHelperMappedMemory = nullptr;
1376
1377 m_GetPresentedImgDataHelperWidth = 0;
1378 m_GetPresentedImgDataHelperHeight = 0;
1379 }
1380 }
1381
1382 [[nodiscard]] bool GetPresentedImageDataImpl(uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData, bool ResetAlpha, std::optional<ivec2> PixelOffset)
1383 {
1384 bool IsB8G8R8A8 = m_VKSurfFormat.format == VK_FORMAT_B8G8R8A8_UNORM;
1385 bool UsesRGBALikeFormat = m_VKSurfFormat.format == VK_FORMAT_R8G8B8A8_UNORM || IsB8G8R8A8;
1386 if(UsesRGBALikeFormat && m_LastPresentedSwapChainImageIndex != std::numeric_limits<decltype(m_LastPresentedSwapChainImageIndex)>::max())
1387 {
1388 auto Viewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
1389 VkOffset3D SrcOffset;
1390 if(PixelOffset.has_value())
1391 {
1392 SrcOffset.x = PixelOffset.value().x;
1393 SrcOffset.y = PixelOffset.value().y;
1394 Width = 1;
1395 Height = 1;
1396 }
1397 else
1398 {
1399 SrcOffset.x = 0;
1400 SrcOffset.y = 0;
1401 Width = Viewport.width;
1402 Height = Viewport.height;
1403 }
1404 SrcOffset.z = 0;
1405 Format = CImageInfo::FORMAT_RGBA;
1406
1407 const size_t ImageTotalSize = (size_t)Width * Height * CImageInfo::PixelSize(Format);
1408
1409 uint8_t *pResImageData;
1410 if(!PreparePresentedImageDataImage(pResImageData, Width, Height))
1411 return false;
1412
1413 VkCommandBuffer *pCommandBuffer;
1414 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
1415 return false;
1416 VkCommandBuffer &CommandBuffer = *pCommandBuffer;
1417
1418 auto &SwapImg = m_vSwapChainImages[m_LastPresentedSwapChainImageIndex];
1419
1420 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_GENERAL, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
1421 return false;
1422 if(!ImageBarrier(Image: SwapImg, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: m_VKSurfFormat.format, OldLayout: VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL))
1423 return false;
1424
1425 // If source and destination support blit we'll blit as this also does automatic format conversion (e.g. from BGR to RGB)
1426 if(m_OptimalSwapChainImageBlitting && m_LinearRGBAImageBlitting)
1427 {
1428 VkOffset3D BlitSize;
1429 BlitSize.x = Width;
1430 BlitSize.y = Height;
1431 BlitSize.z = 1;
1432
1433 VkImageBlit ImageBlitRegion{};
1434 ImageBlitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1435 ImageBlitRegion.srcSubresource.layerCount = 1;
1436 ImageBlitRegion.srcOffsets[0] = SrcOffset;
1437 ImageBlitRegion.srcOffsets[1] = {.x: SrcOffset.x + BlitSize.x, .y: SrcOffset.y + BlitSize.y, .z: SrcOffset.z + BlitSize.z};
1438 ImageBlitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1439 ImageBlitRegion.dstSubresource.layerCount = 1;
1440 ImageBlitRegion.dstOffsets[1] = BlitSize;
1441
1442 // Issue the blit command
1443 vkCmdBlitImage(commandBuffer: CommandBuffer, srcImage: SwapImg, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1444 dstImage: m_GetPresentedImgDataHelperImage, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1445 regionCount: 1, pRegions: &ImageBlitRegion, filter: VK_FILTER_NEAREST);
1446
1447 // transformed to RGBA
1448 IsB8G8R8A8 = false;
1449 }
1450 else
1451 {
1452 // Otherwise use image copy (requires us to manually flip components)
1453 VkImageCopy ImageCopyRegion{};
1454 ImageCopyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1455 ImageCopyRegion.srcSubresource.layerCount = 1;
1456 ImageCopyRegion.srcOffset = SrcOffset;
1457 ImageCopyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1458 ImageCopyRegion.dstSubresource.layerCount = 1;
1459 ImageCopyRegion.extent.width = Width;
1460 ImageCopyRegion.extent.height = Height;
1461 ImageCopyRegion.extent.depth = 1;
1462
1463 // Issue the copy command
1464 vkCmdCopyImage(commandBuffer: CommandBuffer, srcImage: SwapImg, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1465 dstImage: m_GetPresentedImgDataHelperImage, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1466 regionCount: 1, pRegions: &ImageCopyRegion);
1467 }
1468
1469 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_GENERAL))
1470 return false;
1471 if(!ImageBarrier(Image: SwapImg, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: m_VKSurfFormat.format, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
1472 return false;
1473
1474 vkEndCommandBuffer(commandBuffer: CommandBuffer);
1475 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
1476
1477 VkSubmitInfo SubmitInfo{};
1478 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1479 SubmitInfo.commandBufferCount = 1;
1480 SubmitInfo.pCommandBuffers = &CommandBuffer;
1481
1482 vkResetFences(device: m_VKDevice, fenceCount: 1, pFences: &m_GetPresentedImgDataHelperFence);
1483 vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, fence: m_GetPresentedImgDataHelperFence);
1484 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_GetPresentedImgDataHelperFence, VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
1485
1486 VkMappedMemoryRange MemRange{};
1487 MemRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1488 MemRange.memory = m_GetPresentedImgDataHelperMem.m_Mem;
1489 MemRange.offset = m_GetPresentedImgDataHelperMappedLayoutOffset;
1490 MemRange.size = VK_WHOLE_SIZE;
1491 vkInvalidateMappedMemoryRanges(device: m_VKDevice, memoryRangeCount: 1, pMemoryRanges: &MemRange);
1492
1493 size_t RealFullImageSize = maximum(a: ImageTotalSize, b: (size_t)(Height * m_GetPresentedImgDataHelperMappedLayoutPitch));
1494 size_t ExtraRowSize = Width * 4;
1495 if(vDstData.size() < RealFullImageSize + ExtraRowSize)
1496 vDstData.resize(new_size: RealFullImageSize + ExtraRowSize);
1497
1498 mem_copy(dest: vDstData.data(), source: pResImageData, size: RealFullImageSize);
1499
1500 // pack image data together without any offset that the driver might require
1501 if(Width * 4 < m_GetPresentedImgDataHelperMappedLayoutPitch)
1502 {
1503 for(uint32_t Y = 0; Y < Height; ++Y)
1504 {
1505 size_t OffsetImagePacked = (Y * Width * 4);
1506 size_t OffsetImageUnpacked = (Y * m_GetPresentedImgDataHelperMappedLayoutPitch);
1507 mem_copy(dest: vDstData.data() + RealFullImageSize, source: vDstData.data() + OffsetImageUnpacked, size: Width * 4);
1508 mem_copy(dest: vDstData.data() + OffsetImagePacked, source: vDstData.data() + RealFullImageSize, size: Width * 4);
1509 }
1510 }
1511
1512 if(IsB8G8R8A8 || ResetAlpha)
1513 {
1514 // swizzle
1515 for(uint32_t Y = 0; Y < Height; ++Y)
1516 {
1517 for(uint32_t X = 0; X < Width; ++X)
1518 {
1519 size_t ImgOff = (Y * Width * 4) + (X * 4);
1520 if(IsB8G8R8A8)
1521 {
1522 std::swap(a&: vDstData[ImgOff], b&: vDstData[ImgOff + 2]);
1523 }
1524 vDstData[ImgOff + 3] = 255;
1525 }
1526 }
1527 }
1528
1529 return true;
1530 }
1531 else
1532 {
1533 if(!UsesRGBALikeFormat)
1534 {
1535 dbg_msg(sys: "vulkan", fmt: "swap chain image was not in a RGBA like format.");
1536 }
1537 else
1538 {
1539 dbg_msg(sys: "vulkan", fmt: "swap chain image was not ready to be copied.");
1540 }
1541 return false;
1542 }
1543 }
1544
1545 [[nodiscard]] bool GetPresentedImageData(uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData) override
1546 {
1547 return GetPresentedImageDataImpl(Width, Height, Format, vDstData, ResetAlpha: false, PixelOffset: {});
1548 }
1549
1550 /************************
1551 * MEMORY MANAGEMENT
1552 ************************/
1553
1554 [[nodiscard]] bool AllocateVulkanMemory(const VkMemoryAllocateInfo *pAllocateInfo, VkDeviceMemory *pMemory)
1555 {
1556 VkResult Res = vkAllocateMemory(device: m_VKDevice, pAllocateInfo, pAllocator: nullptr, pMemory);
1557 if(Res != VK_SUCCESS)
1558 {
1559 dbg_msg(sys: "vulkan", fmt: "vulkan memory allocation failed, trying to recover.");
1560 if(Res == VK_ERROR_OUT_OF_HOST_MEMORY || Res == VK_ERROR_OUT_OF_DEVICE_MEMORY)
1561 {
1562 // aggressively try to get more memory
1563 vkDeviceWaitIdle(device: m_VKDevice);
1564 for(size_t i = 0; i < m_SwapChainImageCount + 1; ++i)
1565 {
1566 if(!NextFrame())
1567 return false;
1568 }
1569 Res = vkAllocateMemory(device: m_VKDevice, pAllocateInfo, pAllocator: nullptr, pMemory);
1570 }
1571 if(Res != VK_SUCCESS)
1572 {
1573 dbg_msg(sys: "vulkan", fmt: "vulkan memory allocation failed.");
1574 return false;
1575 }
1576 }
1577 return true;
1578 }
1579
1580 [[nodiscard]] bool GetBufferImpl(VkDeviceSize RequiredSize, EMemoryBlockUsage MemUsage, VkBuffer &Buffer, SDeviceMemoryBlock &BufferMemory, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags BufferProperties)
1581 {
1582 return CreateBuffer(BufferSize: RequiredSize, MemUsage, BufferUsage, MemoryProperties: BufferProperties, VKBuffer&: Buffer, VKBufferMemory&: BufferMemory);
1583 }
1584
1585 template<size_t Id,
1586 int64_t MemoryBlockSize, size_t BlockCount,
1587 bool RequiresMapping>
1588 [[nodiscard]] bool GetBufferBlockImpl(SMemoryBlock<Id> &RetBlock, SMemoryBlockCache<Id> &MemoryCache, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags BufferProperties, const void *pBufferData, VkDeviceSize RequiredSize, VkDeviceSize TargetAlignment)
1589 {
1590 bool Res = true;
1591
1592 auto &&CreateCacheBlock = [&]() -> bool {
1593 bool FoundAllocation = false;
1594 SMemoryHeap::SMemoryHeapQueueElement AllocatedMem;
1595 SDeviceMemoryBlock TmpBufferMemory;
1596 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pCacheHeap = nullptr;
1597 auto &Heaps = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps;
1598 for(size_t i = 0; i < Heaps.size(); ++i)
1599 {
1600 auto *pHeap = Heaps[i];
1601 if(pHeap->m_Heap.Allocate(RequiredSize, TargetAlignment, AllocatedMem))
1602 {
1603 TmpBufferMemory = pHeap->m_BufferMem;
1604 FoundAllocation = true;
1605 pCacheHeap = pHeap;
1606 break;
1607 }
1608 }
1609 if(!FoundAllocation)
1610 {
1611 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pNewHeap = new typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap();
1612
1613 VkBuffer TmpBuffer;
1614 if(!GetBufferImpl(RequiredSize: MemoryBlockSize * BlockCount, MemUsage: RequiresMapping ? MEMORY_BLOCK_USAGE_STAGING : MEMORY_BLOCK_USAGE_BUFFER, Buffer&: TmpBuffer, BufferMemory&: TmpBufferMemory, BufferUsage, BufferProperties))
1615 {
1616 delete pNewHeap;
1617 return false;
1618 }
1619
1620 void *pMapData = nullptr;
1621
1622 if(RequiresMapping)
1623 {
1624 if(vkMapMemory(device: m_VKDevice, memory: TmpBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMapData) != VK_SUCCESS)
1625 {
1626 SetError(ErrType: RequiresMapping ? EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_STAGING : EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Failed to map buffer block memory.");
1627 delete pNewHeap;
1628 return false;
1629 }
1630 }
1631
1632 pNewHeap->m_Buffer = TmpBuffer;
1633
1634 pNewHeap->m_BufferMem = TmpBufferMemory;
1635 pNewHeap->m_pMappedBuffer = pMapData;
1636
1637 pCacheHeap = pNewHeap;
1638 Heaps.emplace_back(pNewHeap);
1639 Heaps.back()->m_Heap.Init(MemoryBlockSize * BlockCount, 0);
1640 if(!Heaps.back()->m_Heap.Allocate(RequiredSize, TargetAlignment, AllocatedMem))
1641 {
1642 SetError(ErrType: RequiresMapping ? EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_STAGING : EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Heap allocation failed directly after creating fresh heap.");
1643 return false;
1644 }
1645 }
1646
1647 RetBlock.m_Buffer = pCacheHeap->m_Buffer;
1648 RetBlock.m_BufferMem = TmpBufferMemory;
1649 if(RequiresMapping)
1650 RetBlock.m_pMappedBuffer = ((uint8_t *)pCacheHeap->m_pMappedBuffer) + AllocatedMem.m_OffsetToAlign;
1651 else
1652 RetBlock.m_pMappedBuffer = nullptr;
1653 RetBlock.m_IsCached = true;
1654 RetBlock.m_pHeap = &pCacheHeap->m_Heap;
1655 RetBlock.m_HeapData = AllocatedMem;
1656 RetBlock.m_UsedSize = RequiredSize;
1657
1658 if(RequiresMapping)
1659 mem_copy(RetBlock.m_pMappedBuffer, pBufferData, RequiredSize);
1660
1661 return true;
1662 };
1663
1664 if(RequiredSize < (VkDeviceSize)MemoryBlockSize)
1665 {
1666 Res = CreateCacheBlock();
1667 }
1668 else
1669 {
1670 VkBuffer TmpBuffer;
1671 SDeviceMemoryBlock TmpBufferMemory;
1672 if(!GetBufferImpl(RequiredSize, MemUsage: RequiresMapping ? MEMORY_BLOCK_USAGE_STAGING : MEMORY_BLOCK_USAGE_BUFFER, Buffer&: TmpBuffer, BufferMemory&: TmpBufferMemory, BufferUsage, BufferProperties))
1673 return false;
1674
1675 void *pMapData = nullptr;
1676 if(RequiresMapping)
1677 {
1678 if(vkMapMemory(device: m_VKDevice, memory: TmpBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMapData) != VK_SUCCESS)
1679 return false;
1680 mem_copy(dest: pMapData, source: pBufferData, size: static_cast<size_t>(RequiredSize));
1681 }
1682
1683 RetBlock.m_Buffer = TmpBuffer;
1684 RetBlock.m_BufferMem = TmpBufferMemory;
1685 RetBlock.m_pMappedBuffer = pMapData;
1686 RetBlock.m_pHeap = nullptr;
1687 RetBlock.m_IsCached = false;
1688 RetBlock.m_HeapData.m_OffsetToAlign = 0;
1689 RetBlock.m_HeapData.m_AllocationSize = RequiredSize;
1690 RetBlock.m_UsedSize = RequiredSize;
1691 }
1692
1693 return Res;
1694 }
1695
1696 [[nodiscard]] bool GetStagingBuffer(SMemoryBlock<STAGING_BUFFER_CACHE_ID> &ResBlock, const void *pBufferData, VkDeviceSize RequiredSize)
1697 {
1698 return GetBufferBlockImpl<STAGING_BUFFER_CACHE_ID, 8 * 1024 * 1024, 3, true>(RetBlock&: ResBlock, MemoryCache&: m_StagingBufferCache, BufferUsage: VK_BUFFER_USAGE_TRANSFER_SRC_BIT, BufferProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, pBufferData, RequiredSize, TargetAlignment: maximum<VkDeviceSize>(a: m_NonCoherentMemAlignment, b: 16));
1699 }
1700
1701 [[nodiscard]] bool GetStagingBufferImage(SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> &ResBlock, const void *pBufferData, VkDeviceSize RequiredSize)
1702 {
1703 return GetBufferBlockImpl<STAGING_BUFFER_IMAGE_CACHE_ID, 8 * 1024 * 1024, 3, true>(RetBlock&: ResBlock, MemoryCache&: m_StagingBufferCacheImage, BufferUsage: VK_BUFFER_USAGE_TRANSFER_SRC_BIT, BufferProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, pBufferData, RequiredSize, TargetAlignment: maximum<VkDeviceSize>(a: m_OptimalImageCopyMemAlignment, b: maximum<VkDeviceSize>(a: m_NonCoherentMemAlignment, b: 16)));
1704 }
1705
1706 template<size_t Id>
1707 void PrepareStagingMemRange(SMemoryBlock<Id> &Block)
1708 {
1709 VkMappedMemoryRange UploadRange{};
1710 UploadRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1711 UploadRange.memory = Block.m_BufferMem.m_Mem;
1712 UploadRange.offset = Block.m_HeapData.m_OffsetToAlign;
1713
1714 auto AlignmentMod = ((VkDeviceSize)Block.m_HeapData.m_AllocationSize % m_NonCoherentMemAlignment);
1715 auto AlignmentReq = (m_NonCoherentMemAlignment - AlignmentMod);
1716 if(AlignmentMod == 0)
1717 AlignmentReq = 0;
1718 UploadRange.size = Block.m_HeapData.m_AllocationSize + AlignmentReq;
1719
1720 if(UploadRange.offset + UploadRange.size > Block.m_BufferMem.m_Size)
1721 UploadRange.size = VK_WHOLE_SIZE;
1722
1723 m_vNonFlushedStagingBufferRange.push_back(x: UploadRange);
1724 }
1725
1726 void UploadAndFreeStagingMemBlock(SMemoryBlock<STAGING_BUFFER_CACHE_ID> &Block)
1727 {
1728 PrepareStagingMemRange(Block);
1729 if(!Block.m_IsCached)
1730 {
1731 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: Block.m_pMappedBuffer});
1732 }
1733 else
1734 {
1735 m_StagingBufferCache.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1736 }
1737 }
1738
1739 void UploadAndFreeStagingImageMemBlock(SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> &Block)
1740 {
1741 PrepareStagingMemRange(Block);
1742 if(!Block.m_IsCached)
1743 {
1744 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: Block.m_pMappedBuffer});
1745 }
1746 else
1747 {
1748 m_StagingBufferCacheImage.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1749 }
1750 }
1751
1752 [[nodiscard]] bool GetVertexBuffer(SMemoryBlock<VERTEX_BUFFER_CACHE_ID> &ResBlock, VkDeviceSize RequiredSize)
1753 {
1754 return GetBufferBlockImpl<VERTEX_BUFFER_CACHE_ID, 8 * 1024 * 1024, 3, false>(RetBlock&: ResBlock, MemoryCache&: m_VertexBufferCache, BufferUsage: VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, BufferProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, pBufferData: nullptr, RequiredSize, TargetAlignment: 16);
1755 }
1756
1757 void FreeVertexMemBlock(SMemoryBlock<VERTEX_BUFFER_CACHE_ID> &Block)
1758 {
1759 if(!Block.m_IsCached)
1760 {
1761 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: nullptr});
1762 }
1763 else
1764 {
1765 m_VertexBufferCache.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1766 }
1767 }
1768
1769 static size_t ImageMipLevelCount(size_t Width, size_t Height, size_t Depth)
1770 {
1771 return std::floor(x: std::log2(x: maximum(a: Width, b: maximum(a: Height, b: Depth)))) + 1;
1772 }
1773
1774 static size_t ImageMipLevelCount(const VkExtent3D &ImgExtent)
1775 {
1776 return ImageMipLevelCount(Width: ImgExtent.width, Height: ImgExtent.height, Depth: ImgExtent.depth);
1777 }
1778
1779 // good approximation of 1024x1024 image with mipmaps
1780 static constexpr int64_t IMAGE_SIZE_1024X1024_APPROXIMATION = (1024 * 1024 * 4) * 2;
1781
1782 [[nodiscard]] bool GetImageMemoryImpl(VkDeviceSize RequiredSize, uint32_t RequiredMemoryTypeBits, SDeviceMemoryBlock &BufferMemory, VkMemoryPropertyFlags BufferProperties)
1783 {
1784 VkMemoryAllocateInfo MemAllocInfo{};
1785 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1786 MemAllocInfo.allocationSize = RequiredSize;
1787 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: RequiredMemoryTypeBits, Properties: BufferProperties);
1788
1789 BufferMemory.m_Size = RequiredSize;
1790 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) + RequiredSize, m: std::memory_order_relaxed);
1791
1792 if(IsVerbose())
1793 {
1794 VerboseAllocatedMemory(Size: RequiredSize, FrameImageIndex: m_CurImageIndex, MemUsage: MEMORY_BLOCK_USAGE_TEXTURE);
1795 }
1796
1797 if(!AllocateVulkanMemory(pAllocateInfo: &MemAllocInfo, pMemory: &BufferMemory.m_Mem))
1798 {
1799 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_IMAGE, pErr: "Allocation for image memory failed.");
1800 return false;
1801 }
1802
1803 BufferMemory.m_UsageType = MEMORY_BLOCK_USAGE_TEXTURE;
1804
1805 return true;
1806 }
1807
1808 template<size_t Id,
1809 int64_t MemoryBlockSize, size_t BlockCount>
1810 [[nodiscard]] bool GetImageMemoryBlockImpl(SMemoryImageBlock<Id> &RetBlock, SMemoryBlockCache<Id> &MemoryCache, VkMemoryPropertyFlags BufferProperties, VkDeviceSize RequiredSize, VkDeviceSize RequiredAlignment, uint32_t RequiredMemoryTypeBits)
1811 {
1812 auto &&CreateCacheBlock = [&]() -> bool {
1813 bool FoundAllocation = false;
1814 SMemoryHeap::SMemoryHeapQueueElement AllocatedMem;
1815 SDeviceMemoryBlock TmpBufferMemory;
1816 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pCacheHeap = nullptr;
1817 for(size_t i = 0; i < MemoryCache.m_MemoryCaches.m_vpMemoryHeaps.size(); ++i)
1818 {
1819 auto *pHeap = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps[i];
1820 if(pHeap->m_Heap.Allocate(RequiredSize, RequiredAlignment, AllocatedMem))
1821 {
1822 TmpBufferMemory = pHeap->m_BufferMem;
1823 FoundAllocation = true;
1824 pCacheHeap = pHeap;
1825 break;
1826 }
1827 }
1828 if(!FoundAllocation)
1829 {
1830 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pNewHeap = new typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap();
1831
1832 if(!GetImageMemoryImpl(RequiredSize: MemoryBlockSize * BlockCount, RequiredMemoryTypeBits, BufferMemory&: TmpBufferMemory, BufferProperties))
1833 {
1834 delete pNewHeap;
1835 return false;
1836 }
1837
1838 pNewHeap->m_Buffer = VK_NULL_HANDLE;
1839
1840 pNewHeap->m_BufferMem = TmpBufferMemory;
1841 pNewHeap->m_pMappedBuffer = nullptr;
1842
1843 auto &Heaps = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps;
1844 pCacheHeap = pNewHeap;
1845 Heaps.emplace_back(pNewHeap);
1846 Heaps.back()->m_Heap.Init(MemoryBlockSize * BlockCount, 0);
1847 if(!Heaps.back()->m_Heap.Allocate(RequiredSize, RequiredAlignment, AllocatedMem))
1848 {
1849 dbg_assert_failed("Heap allocation failed directly after creating fresh heap for image");
1850 }
1851 }
1852
1853 RetBlock.m_Buffer = VK_NULL_HANDLE;
1854 RetBlock.m_BufferMem = TmpBufferMemory;
1855 RetBlock.m_pMappedBuffer = nullptr;
1856 RetBlock.m_IsCached = true;
1857 RetBlock.m_pHeap = &pCacheHeap->m_Heap;
1858 RetBlock.m_HeapData = AllocatedMem;
1859 RetBlock.m_UsedSize = RequiredSize;
1860
1861 return true;
1862 };
1863
1864 if(RequiredSize < (VkDeviceSize)MemoryBlockSize)
1865 {
1866 if(!CreateCacheBlock())
1867 return false;
1868 }
1869 else
1870 {
1871 SDeviceMemoryBlock TmpBufferMemory;
1872 if(!GetImageMemoryImpl(RequiredSize, RequiredMemoryTypeBits, BufferMemory&: TmpBufferMemory, BufferProperties))
1873 return false;
1874
1875 RetBlock.m_Buffer = VK_NULL_HANDLE;
1876 RetBlock.m_BufferMem = TmpBufferMemory;
1877 RetBlock.m_pMappedBuffer = nullptr;
1878 RetBlock.m_IsCached = false;
1879 RetBlock.m_pHeap = nullptr;
1880 RetBlock.m_HeapData.m_OffsetToAlign = 0;
1881 RetBlock.m_HeapData.m_AllocationSize = RequiredSize;
1882 RetBlock.m_UsedSize = RequiredSize;
1883 }
1884
1885 RetBlock.m_ImageMemoryBits = RequiredMemoryTypeBits;
1886
1887 return true;
1888 }
1889
1890 [[nodiscard]] bool GetImageMemory(SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &RetBlock, VkDeviceSize RequiredSize, VkDeviceSize RequiredAlignment, uint32_t RequiredMemoryTypeBits)
1891 {
1892 auto BufferCacheIterator = m_ImageBufferCaches.find(x: RequiredMemoryTypeBits);
1893 if(BufferCacheIterator == m_ImageBufferCaches.end())
1894 {
1895 BufferCacheIterator = m_ImageBufferCaches.insert(x: {RequiredMemoryTypeBits, {}}).first;
1896
1897 BufferCacheIterator->second.Init(SwapChainImageCount: m_SwapChainImageCount);
1898 }
1899 return GetImageMemoryBlockImpl<IMAGE_BUFFER_CACHE_ID, IMAGE_SIZE_1024X1024_APPROXIMATION, 2>(RetBlock, MemoryCache&: BufferCacheIterator->second, BufferProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, RequiredSize, RequiredAlignment, RequiredMemoryTypeBits);
1900 }
1901
1902 void FreeImageMemBlock(SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &Block)
1903 {
1904 if(!Block.m_IsCached)
1905 {
1906 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: nullptr});
1907 }
1908 else
1909 {
1910 m_ImageBufferCaches[Block.m_ImageMemoryBits].FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1911 }
1912 }
1913
1914 template<bool FlushForRendering, typename TName>
1915 void UploadStreamedBuffer(SStreamMemory<TName> &StreamedBuffer)
1916 {
1917 size_t RangeUpdateCount = 0;
1918 if(StreamedBuffer.IsUsed(m_CurImageIndex))
1919 {
1920 for(size_t i = 0; i < StreamedBuffer.GetUsedCount(m_CurImageIndex); ++i)
1921 {
1922 auto &BufferOfFrame = StreamedBuffer.GetBuffers(m_CurImageIndex)[i];
1923 auto &MemRange = StreamedBuffer.GetRanges(m_CurImageIndex)[RangeUpdateCount++];
1924 MemRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1925 MemRange.memory = BufferOfFrame.m_BufferMem.m_Mem;
1926 MemRange.offset = BufferOfFrame.m_OffsetInBuffer;
1927 auto AlignmentMod = ((VkDeviceSize)BufferOfFrame.m_UsedSize % m_NonCoherentMemAlignment);
1928 auto AlignmentReq = (m_NonCoherentMemAlignment - AlignmentMod);
1929 if(AlignmentMod == 0)
1930 AlignmentReq = 0;
1931 MemRange.size = BufferOfFrame.m_UsedSize + AlignmentReq;
1932
1933 if(MemRange.offset + MemRange.size > BufferOfFrame.m_BufferMem.m_Size)
1934 MemRange.size = VK_WHOLE_SIZE;
1935
1936 BufferOfFrame.m_UsedSize = 0;
1937 }
1938 if(RangeUpdateCount > 0 && FlushForRendering)
1939 {
1940 vkFlushMappedMemoryRanges(m_VKDevice, RangeUpdateCount, StreamedBuffer.GetRanges(m_CurImageIndex).data());
1941 }
1942 }
1943 StreamedBuffer.ResetFrame(m_CurImageIndex);
1944 }
1945
1946 void CleanBufferPair(size_t ImageIndex, VkBuffer &Buffer, SDeviceMemoryBlock &BufferMem)
1947 {
1948 bool IsBuffer = Buffer != VK_NULL_HANDLE;
1949 if(IsBuffer)
1950 {
1951 vkDestroyBuffer(device: m_VKDevice, buffer: Buffer, pAllocator: nullptr);
1952
1953 Buffer = VK_NULL_HANDLE;
1954 }
1955 if(BufferMem.m_Mem != VK_NULL_HANDLE)
1956 {
1957 vkFreeMemory(device: m_VKDevice, memory: BufferMem.m_Mem, pAllocator: nullptr);
1958 if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_BUFFER)
1959 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1960 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_TEXTURE)
1961 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1962 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_STREAM)
1963 m_pStreamMemoryUsage->store(i: m_pStreamMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1964 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_STAGING)
1965 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1966
1967 if(IsVerbose())
1968 {
1969 VerboseDeallocatedMemory(Size: BufferMem.m_Size, FrameImageIndex: ImageIndex, MemUsage: BufferMem.m_UsageType);
1970 }
1971
1972 BufferMem.m_Mem = VK_NULL_HANDLE;
1973 }
1974 }
1975
1976 void DestroyTexture(CTexture &Texture)
1977 {
1978 if(Texture.m_Img != VK_NULL_HANDLE)
1979 {
1980 FreeImageMemBlock(Block&: Texture.m_ImgMem);
1981 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img, pAllocator: nullptr);
1982
1983 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_ImgView, pAllocator: nullptr);
1984 }
1985
1986 if(Texture.m_Img3D != VK_NULL_HANDLE)
1987 {
1988 FreeImageMemBlock(Block&: Texture.m_Img3DMem);
1989 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img3D, pAllocator: nullptr);
1990
1991 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_Img3DView, pAllocator: nullptr);
1992 }
1993
1994 DestroyTexturedStandardDescriptorSets(Texture, DescrIndex: 0);
1995 DestroyTexturedStandardDescriptorSets(Texture, DescrIndex: 1);
1996
1997 DestroyTextured3DStandardDescriptorSets(Texture);
1998 }
1999
2000 void DestroyTextTexture(CTexture &Texture, CTexture &TextureOutline)
2001 {
2002 if(Texture.m_Img != VK_NULL_HANDLE)
2003 {
2004 FreeImageMemBlock(Block&: Texture.m_ImgMem);
2005 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img, pAllocator: nullptr);
2006
2007 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_ImgView, pAllocator: nullptr);
2008 }
2009
2010 if(TextureOutline.m_Img != VK_NULL_HANDLE)
2011 {
2012 FreeImageMemBlock(Block&: TextureOutline.m_ImgMem);
2013 vkDestroyImage(device: m_VKDevice, image: TextureOutline.m_Img, pAllocator: nullptr);
2014
2015 vkDestroyImageView(device: m_VKDevice, imageView: TextureOutline.m_ImgView, pAllocator: nullptr);
2016 }
2017
2018 DestroyTextDescriptorSets(Texture, TextureOutline);
2019 }
2020
2021 void ClearFrameData(size_t FrameImageIndex)
2022 {
2023 UploadStagingBuffers();
2024
2025 // clear pending buffers, that require deletion
2026 for(auto &BufferPair : m_vvFrameDelayedBufferCleanup[FrameImageIndex])
2027 {
2028 if(BufferPair.m_pMappedData != nullptr)
2029 {
2030 vkUnmapMemory(device: m_VKDevice, memory: BufferPair.m_Mem.m_Mem);
2031 }
2032 CleanBufferPair(ImageIndex: FrameImageIndex, Buffer&: BufferPair.m_Buffer, BufferMem&: BufferPair.m_Mem);
2033 }
2034 m_vvFrameDelayedBufferCleanup[FrameImageIndex].clear();
2035
2036 // clear pending textures, that require deletion
2037 for(auto &Texture : m_vvFrameDelayedTextureCleanup[FrameImageIndex])
2038 {
2039 DestroyTexture(Texture);
2040 }
2041 m_vvFrameDelayedTextureCleanup[FrameImageIndex].clear();
2042
2043 for(auto &TexturePair : m_vvFrameDelayedTextTexturesCleanup[FrameImageIndex])
2044 {
2045 DestroyTextTexture(Texture&: TexturePair.first, TextureOutline&: TexturePair.second);
2046 }
2047 m_vvFrameDelayedTextTexturesCleanup[FrameImageIndex].clear();
2048
2049 m_StagingBufferCache.Cleanup(ImgIndex: FrameImageIndex);
2050 m_StagingBufferCacheImage.Cleanup(ImgIndex: FrameImageIndex);
2051 m_VertexBufferCache.Cleanup(ImgIndex: FrameImageIndex);
2052 for(auto &ImageBufferCache : m_ImageBufferCaches)
2053 ImageBufferCache.second.Cleanup(ImgIndex: FrameImageIndex);
2054 }
2055
2056 void ShrinkUnusedCaches()
2057 {
2058 size_t FreedMemory = 0;
2059 FreedMemory += m_StagingBufferCache.Shrink(Device&: m_VKDevice);
2060 FreedMemory += m_StagingBufferCacheImage.Shrink(Device&: m_VKDevice);
2061 if(FreedMemory > 0)
2062 {
2063 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2064 if(IsVerbose())
2065 {
2066 dbg_msg(sys: "vulkan", fmt: "deallocated chunks of memory with size: %" PRIzu " from all frames (staging buffer)", FreedMemory);
2067 }
2068 }
2069 FreedMemory = 0;
2070 FreedMemory += m_VertexBufferCache.Shrink(Device&: m_VKDevice);
2071 if(FreedMemory > 0)
2072 {
2073 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2074 if(IsVerbose())
2075 {
2076 dbg_msg(sys: "vulkan", fmt: "deallocated chunks of memory with size: %" PRIzu " from all frames (buffer)", FreedMemory);
2077 }
2078 }
2079 FreedMemory = 0;
2080 for(auto &ImageBufferCache : m_ImageBufferCaches)
2081 FreedMemory += ImageBufferCache.second.Shrink(Device&: m_VKDevice);
2082 if(FreedMemory > 0)
2083 {
2084 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2085 if(IsVerbose())
2086 {
2087 dbg_msg(sys: "vulkan", fmt: "deallocated chunks of memory with size: %" PRIzu " from all frames (texture)", FreedMemory);
2088 }
2089 }
2090 }
2091
2092 [[nodiscard]] bool MemoryBarrier(VkBuffer Buffer, VkDeviceSize Offset, VkDeviceSize Size, VkAccessFlags BufferAccessType, bool BeforeCommand)
2093 {
2094 VkCommandBuffer *pMemCommandBuffer;
2095 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2096 return false;
2097 auto &MemCommandBuffer = *pMemCommandBuffer;
2098
2099 VkBufferMemoryBarrier Barrier{};
2100 Barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
2101 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2102 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2103 Barrier.buffer = Buffer;
2104 Barrier.offset = Offset;
2105 Barrier.size = Size;
2106
2107 VkPipelineStageFlags SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2108 VkPipelineStageFlags DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2109
2110 if(BeforeCommand)
2111 {
2112 Barrier.srcAccessMask = BufferAccessType;
2113 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2114
2115 SourceStage = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2116 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2117 }
2118 else
2119 {
2120 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2121 Barrier.dstAccessMask = BufferAccessType;
2122
2123 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2124 DestinationStage = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2125 }
2126
2127 vkCmdPipelineBarrier(
2128 commandBuffer: MemCommandBuffer,
2129 srcStageMask: SourceStage, dstStageMask: DestinationStage,
2130 dependencyFlags: 0,
2131 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2132 bufferMemoryBarrierCount: 1, pBufferMemoryBarriers: &Barrier,
2133 imageMemoryBarrierCount: 0, pImageMemoryBarriers: nullptr);
2134
2135 return true;
2136 }
2137
2138 /************************
2139 * SWAPPING MECHANISM
2140 ************************/
2141
2142 void StartRenderThread(size_t ThreadIndex)
2143 {
2144 auto &List = m_vvThreadCommandLists[ThreadIndex];
2145 if(!List.empty())
2146 {
2147 m_vThreadHelperHadCommands[ThreadIndex] = true;
2148 auto *pThread = m_vpRenderThreads[ThreadIndex].get();
2149 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
2150 pThread->m_IsRendering = true;
2151 pThread->m_Cond.notify_one();
2152 }
2153 }
2154
2155 void FinishRenderThreads()
2156 {
2157 if(m_ThreadCount > 1)
2158 {
2159 // execute threads
2160
2161 for(size_t ThreadIndex = 0; ThreadIndex < m_ThreadCount - 1; ++ThreadIndex)
2162 {
2163 if(!m_vThreadHelperHadCommands[ThreadIndex])
2164 {
2165 StartRenderThread(ThreadIndex);
2166 }
2167 }
2168
2169 for(size_t ThreadIndex = 0; ThreadIndex < m_ThreadCount - 1; ++ThreadIndex)
2170 {
2171 if(m_vThreadHelperHadCommands[ThreadIndex])
2172 {
2173 auto &pRenderThread = m_vpRenderThreads[ThreadIndex];
2174 m_vThreadHelperHadCommands[ThreadIndex] = false;
2175 std::unique_lock<std::mutex> Lock(pRenderThread->m_Mutex);
2176 pRenderThread->m_Cond.wait(lock&: Lock, p: [&pRenderThread] { return !pRenderThread->m_IsRendering; });
2177 m_vLastPipeline[ThreadIndex + 1] = VK_NULL_HANDLE;
2178 }
2179 }
2180 }
2181 }
2182
2183 void ExecuteMemoryCommandBuffer()
2184 {
2185 if(m_vUsedMemoryCommandBuffer[m_CurImageIndex])
2186 {
2187 auto &MemoryCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
2188 vkEndCommandBuffer(commandBuffer: MemoryCommandBuffer);
2189
2190 VkSubmitInfo SubmitInfo{};
2191 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2192
2193 SubmitInfo.commandBufferCount = 1;
2194 SubmitInfo.pCommandBuffers = &MemoryCommandBuffer;
2195 vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, VK_NULL_HANDLE);
2196 vkQueueWaitIdle(queue: m_VKGraphicsQueue);
2197
2198 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
2199 }
2200 }
2201
2202 void ClearFrameMemoryUsage()
2203 {
2204 ClearFrameData(FrameImageIndex: m_CurImageIndex);
2205 ShrinkUnusedCaches();
2206 }
2207
2208 [[nodiscard]] bool WaitFrame()
2209 {
2210 FinishRenderThreads();
2211 m_LastCommandsInPipeThreadIndex = 0;
2212
2213 UploadNonFlushedBuffers<true>();
2214
2215 auto &CommandBuffer = GetMainGraphicCommandBuffer();
2216
2217 // render threads
2218 if(m_ThreadCount > 1)
2219 {
2220 size_t ThreadedCommandsUsedCount = 0;
2221 size_t RenderThreadCount = m_ThreadCount - 1;
2222 for(size_t i = 0; i < RenderThreadCount; ++i)
2223 {
2224 if(m_vvUsedThreadDrawCommandBuffer[i + 1][m_CurImageIndex])
2225 {
2226 const auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[i + 1][m_CurImageIndex];
2227 m_vHelperThreadDrawCommandBuffers[ThreadedCommandsUsedCount++] = GraphicThreadCommandBuffer;
2228
2229 m_vvUsedThreadDrawCommandBuffer[i + 1][m_CurImageIndex] = false;
2230 }
2231 }
2232 if(ThreadedCommandsUsedCount > 0)
2233 {
2234 vkCmdExecuteCommands(commandBuffer: CommandBuffer, commandBufferCount: ThreadedCommandsUsedCount, pCommandBuffers: m_vHelperThreadDrawCommandBuffers.data());
2235 }
2236
2237 // special case if swap chain was not completed in one runbuffer call
2238
2239 if(m_vvUsedThreadDrawCommandBuffer[0][m_CurImageIndex])
2240 {
2241 auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[0][m_CurImageIndex];
2242 vkEndCommandBuffer(commandBuffer: GraphicThreadCommandBuffer);
2243
2244 vkCmdExecuteCommands(commandBuffer: CommandBuffer, commandBufferCount: 1, pCommandBuffers: &GraphicThreadCommandBuffer);
2245
2246 m_vvUsedThreadDrawCommandBuffer[0][m_CurImageIndex] = false;
2247 }
2248 }
2249
2250 vkCmdEndRenderPass(commandBuffer: CommandBuffer);
2251
2252 if(vkEndCommandBuffer(commandBuffer: CommandBuffer) != VK_SUCCESS)
2253 {
2254 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be ended anymore.");
2255 return false;
2256 }
2257
2258 VkSubmitInfo SubmitInfo{};
2259 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2260
2261 SubmitInfo.commandBufferCount = 1;
2262 SubmitInfo.pCommandBuffers = &CommandBuffer;
2263
2264 std::array<VkCommandBuffer, 2> aCommandBuffers = {};
2265
2266 if(m_vUsedMemoryCommandBuffer[m_CurImageIndex])
2267 {
2268 auto &MemoryCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
2269 vkEndCommandBuffer(commandBuffer: MemoryCommandBuffer);
2270
2271 aCommandBuffers[0] = MemoryCommandBuffer;
2272 aCommandBuffers[1] = CommandBuffer;
2273 SubmitInfo.commandBufferCount = 2;
2274 SubmitInfo.pCommandBuffers = aCommandBuffers.data();
2275
2276 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
2277 }
2278
2279 std::array<VkSemaphore, 1> aWaitSemaphores = {m_AcquireImageSemaphore};
2280 std::array<VkPipelineStageFlags, 1> aWaitStages = {(VkPipelineStageFlags)VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT};
2281 SubmitInfo.waitSemaphoreCount = aWaitSemaphores.size();
2282 SubmitInfo.pWaitSemaphores = aWaitSemaphores.data();
2283 SubmitInfo.pWaitDstStageMask = aWaitStages.data();
2284
2285 std::array<VkSemaphore, 1> aSignalSemaphores = {m_vQueueSubmitSemaphores[m_CurImageIndex]};
2286 SubmitInfo.signalSemaphoreCount = aSignalSemaphores.size();
2287 SubmitInfo.pSignalSemaphores = aSignalSemaphores.data();
2288
2289 vkResetFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[m_CurImageIndex]);
2290
2291 VkResult QueueSubmitRes = vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, fence: m_vQueueSubmitFences[m_CurImageIndex]);
2292 if(QueueSubmitRes != VK_SUCCESS)
2293 {
2294 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: QueueSubmitRes);
2295 if(pCritErrorMsg != nullptr)
2296 {
2297 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_SUBMIT_FAILED, pErr: "Submitting to graphics queue failed.", pErrStrExtra: pCritErrorMsg);
2298 return false;
2299 }
2300 }
2301
2302 std::swap(a&: m_vBusyAcquireImageSemaphores[m_CurImageIndex], b&: m_AcquireImageSemaphore);
2303
2304 VkPresentInfoKHR PresentInfo{};
2305 PresentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
2306
2307 PresentInfo.waitSemaphoreCount = aSignalSemaphores.size();
2308 PresentInfo.pWaitSemaphores = aSignalSemaphores.data();
2309
2310 std::array<VkSwapchainKHR, 1> aSwapChains = {m_VKSwapChain};
2311 PresentInfo.swapchainCount = aSwapChains.size();
2312 PresentInfo.pSwapchains = aSwapChains.data();
2313
2314 PresentInfo.pImageIndices = &m_CurImageIndex;
2315
2316 m_LastPresentedSwapChainImageIndex = m_CurImageIndex;
2317
2318 VkResult QueuePresentRes = vkQueuePresentKHR(queue: m_VKPresentQueue, pPresentInfo: &PresentInfo);
2319 if(QueuePresentRes != VK_SUCCESS && QueuePresentRes != VK_SUBOPTIMAL_KHR)
2320 {
2321 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: QueuePresentRes);
2322 if(pCritErrorMsg != nullptr)
2323 {
2324 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_SWAP_FAILED, pErr: "Presenting graphics queue failed.", pErrStrExtra: pCritErrorMsg);
2325 return false;
2326 }
2327 }
2328
2329 return true;
2330 }
2331
2332 [[nodiscard]] bool PrepareFrame()
2333 {
2334 if(m_RecreateSwapChain)
2335 {
2336 m_RecreateSwapChain = false;
2337 if(IsVerbose())
2338 {
2339 dbg_msg(sys: "vulkan", fmt: "recreating swap chain requested by user (prepare frame).");
2340 }
2341 RecreateSwapChain();
2342 }
2343
2344 auto AcqResult = vkAcquireNextImageKHR(device: m_VKDevice, swapchain: m_VKSwapChain, timeout: std::numeric_limits<uint64_t>::max(), semaphore: m_AcquireImageSemaphore, VK_NULL_HANDLE, pImageIndex: &m_CurImageIndex);
2345 if(AcqResult != VK_SUCCESS)
2346 {
2347 if(AcqResult == VK_ERROR_OUT_OF_DATE_KHR || m_RecreateSwapChain)
2348 {
2349 m_RecreateSwapChain = false;
2350 if(IsVerbose())
2351 {
2352 dbg_msg(sys: "vulkan", fmt: "recreating swap chain requested by acquire next image (prepare frame).");
2353 }
2354 RecreateSwapChain();
2355 return PrepareFrame();
2356 }
2357 else
2358 {
2359 if(AcqResult != VK_SUBOPTIMAL_KHR)
2360 dbg_msg(sys: "vulkan", fmt: "acquire next image failed %d", (int)AcqResult);
2361
2362 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: AcqResult);
2363 if(pCritErrorMsg != nullptr)
2364 {
2365 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_SWAP_FAILED, pErr: "Acquiring next image failed.", pErrStrExtra: pCritErrorMsg);
2366 return false;
2367 }
2368 else if(AcqResult == VK_ERROR_SURFACE_LOST_KHR)
2369 {
2370 m_RenderingPaused = true;
2371 return true;
2372 }
2373 }
2374 }
2375
2376 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[m_CurImageIndex], VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
2377
2378 // next frame
2379 m_CurFrame++;
2380 m_vImageLastFrameCheck[m_CurImageIndex] = m_CurFrame;
2381
2382 // check if older frames weren't used in a long time
2383 for(size_t FrameImageIndex = 0; FrameImageIndex < m_vImageLastFrameCheck.size(); ++FrameImageIndex)
2384 {
2385 auto LastFrame = m_vImageLastFrameCheck[FrameImageIndex];
2386 if(m_CurFrame - LastFrame > (uint64_t)m_SwapChainImageCount)
2387 {
2388 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[FrameImageIndex], VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
2389 ClearFrameData(FrameImageIndex);
2390 m_vImageLastFrameCheck[FrameImageIndex] = m_CurFrame;
2391 }
2392 }
2393
2394 // clear frame's memory data
2395 ClearFrameMemoryUsage();
2396
2397 // clear frame
2398 vkResetCommandBuffer(commandBuffer: GetMainGraphicCommandBuffer(), flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
2399
2400 auto &CommandBuffer = GetMainGraphicCommandBuffer();
2401 VkCommandBufferBeginInfo BeginInfo{};
2402 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2403 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2404
2405 if(vkBeginCommandBuffer(commandBuffer: CommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
2406 {
2407 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be filled anymore.");
2408 return false;
2409 }
2410
2411 VkRenderPassBeginInfo RenderPassInfo{};
2412 RenderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
2413 RenderPassInfo.renderPass = m_VKRenderPass;
2414 RenderPassInfo.framebuffer = m_vFramebufferList[m_CurImageIndex];
2415 RenderPassInfo.renderArea.offset = {.x: 0, .y: 0};
2416 RenderPassInfo.renderArea.extent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
2417
2418 VkClearValue ClearColorVal = {.color: {.float32: {m_aClearColor[0], m_aClearColor[1], m_aClearColor[2], m_aClearColor[3]}}};
2419 RenderPassInfo.clearValueCount = 1;
2420 RenderPassInfo.pClearValues = &ClearColorVal;
2421
2422 vkCmdBeginRenderPass(commandBuffer: CommandBuffer, pRenderPassBegin: &RenderPassInfo, contents: m_ThreadCount > 1 ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS : VK_SUBPASS_CONTENTS_INLINE);
2423
2424 for(auto &LastPipe : m_vLastPipeline)
2425 LastPipe = VK_NULL_HANDLE;
2426
2427 return true;
2428 }
2429
2430 void UploadStagingBuffers()
2431 {
2432 if(!m_vNonFlushedStagingBufferRange.empty())
2433 {
2434 vkFlushMappedMemoryRanges(device: m_VKDevice, memoryRangeCount: m_vNonFlushedStagingBufferRange.size(), pMemoryRanges: m_vNonFlushedStagingBufferRange.data());
2435
2436 m_vNonFlushedStagingBufferRange.clear();
2437 }
2438 }
2439
2440 template<bool FlushForRendering>
2441 void UploadNonFlushedBuffers()
2442 {
2443 // streamed vertices
2444 for(auto &StreamVertexBuffer : m_vStreamedVertexBuffers)
2445 UploadStreamedBuffer<FlushForRendering>(StreamVertexBuffer);
2446 // now the buffer objects
2447 for(auto &StreamUniformBuffer : m_vStreamedUniformBuffers)
2448 UploadStreamedBuffer<FlushForRendering>(StreamUniformBuffer);
2449
2450 UploadStagingBuffers();
2451 }
2452
2453 [[nodiscard]] bool PureMemoryFrame()
2454 {
2455 ExecuteMemoryCommandBuffer();
2456
2457 // reset streamed data
2458 UploadNonFlushedBuffers<false>();
2459
2460 ClearFrameMemoryUsage();
2461
2462 return true;
2463 }
2464
2465 [[nodiscard]] bool NextFrame()
2466 {
2467 if(!m_RenderingPaused)
2468 {
2469 if(!WaitFrame())
2470 return false;
2471 if(!PrepareFrame())
2472 return false;
2473 }
2474 // else only execute the memory command buffer
2475 else
2476 {
2477 if(!PureMemoryFrame())
2478 return false;
2479 }
2480
2481 return true;
2482 }
2483
2484 /************************
2485 * TEXTURES
2486 ************************/
2487
2488 size_t VulkanFormatToPixelSize(VkFormat Format)
2489 {
2490 if(Format == VK_FORMAT_R8G8B8_UNORM)
2491 return 3;
2492 else if(Format == VK_FORMAT_R8G8B8A8_UNORM)
2493 return 4;
2494 else if(Format == VK_FORMAT_R8_UNORM)
2495 return 1;
2496 return 4;
2497 }
2498
2499 [[nodiscard]] bool UpdateTexture(size_t TextureSlot, VkFormat Format, uint8_t *&pData, int64_t XOff, int64_t YOff, size_t Width, size_t Height)
2500 {
2501 const size_t ImageSize = Width * Height * VulkanFormatToPixelSize(Format);
2502 SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> StagingBuffer;
2503 if(!GetStagingBufferImage(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: ImageSize))
2504 return false;
2505
2506 auto &Tex = m_vTextures[TextureSlot];
2507
2508 if(Tex.m_RescaleCount > 0)
2509 {
2510 for(uint32_t i = 0; i < Tex.m_RescaleCount; ++i)
2511 {
2512 Width >>= 1;
2513 Height >>= 1;
2514
2515 XOff /= 2;
2516 YOff /= 2;
2517 }
2518
2519 uint8_t *pTmpData = ResizeImage(pImageData: pData, Width, Height, NewWidth: Width, NewHeight: Height, BPP: VulkanFormatToPixelSize(Format));
2520 free(ptr: pData);
2521 pData = pTmpData;
2522 }
2523
2524 if(!ImageBarrier(Image: Tex.m_Img, MipMapBase: 0, MipMapCount: Tex.m_MipMapCount, LayerBase: 0, LayerCount: 1, Format, OldLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
2525 return false;
2526 if(!CopyBufferToImage(Buffer: StagingBuffer.m_Buffer, BufferOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, Image: Tex.m_Img, X: XOff, Y: YOff, Width, Height, Depth: 1))
2527 return false;
2528
2529 if(Tex.m_MipMapCount > 1)
2530 {
2531 if(!BuildMipmaps(Image: Tex.m_Img, ImageFormat: Format, Width, Height, Depth: 1, MipMapLevelCount: Tex.m_MipMapCount))
2532 return false;
2533 }
2534 else
2535 {
2536 if(!ImageBarrier(Image: Tex.m_Img, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
2537 return false;
2538 }
2539
2540 UploadAndFreeStagingImageMemBlock(Block&: StagingBuffer);
2541
2542 return true;
2543 }
2544
2545 [[nodiscard]] bool CreateTextureCMD(
2546 int Slot,
2547 int Width,
2548 int Height,
2549 VkFormat Format,
2550 VkFormat StoreFormat,
2551 int Flags,
2552 uint8_t *&pData)
2553 {
2554 size_t ImageIndex = (size_t)Slot;
2555 const size_t PixelSize = VulkanFormatToPixelSize(Format);
2556
2557 while(ImageIndex >= m_vTextures.size())
2558 {
2559 m_vTextures.resize(new_size: (m_vTextures.size() * 2) + 1);
2560 }
2561
2562 // resample if needed
2563 uint32_t RescaleCount = 0;
2564 if((size_t)Width > m_MaxTextureSize || (size_t)Height > m_MaxTextureSize)
2565 {
2566 do
2567 {
2568 Width >>= 1;
2569 Height >>= 1;
2570 ++RescaleCount;
2571 } while((size_t)Width > m_MaxTextureSize || (size_t)Height > m_MaxTextureSize);
2572
2573 uint8_t *pTmpData = ResizeImage(pImageData: pData, Width, Height, NewWidth: Width, NewHeight: Height, BPP: PixelSize);
2574 free(ptr: pData);
2575 pData = pTmpData;
2576 }
2577
2578 bool Requires2DTexture = (Flags & TextureFlag::NO_2D_TEXTURE) == 0;
2579 bool Requires2DTextureArray = (Flags & TextureFlag::TO_2D_ARRAY_TEXTURE) != 0;
2580 bool RequiresMipMaps = (Flags & TextureFlag::NO_MIPMAPS) == 0;
2581 size_t MipMapLevelCount = 1;
2582 if(RequiresMipMaps)
2583 {
2584 VkExtent3D ImgSize{.width: (uint32_t)Width, .height: (uint32_t)Height, .depth: 1};
2585 MipMapLevelCount = ImageMipLevelCount(ImgExtent: ImgSize);
2586 if(!m_OptimalRGBAImageBlitting)
2587 MipMapLevelCount = 1;
2588 }
2589
2590 CTexture &Texture = m_vTextures[ImageIndex];
2591
2592 Texture.m_Width = Width;
2593 Texture.m_Height = Height;
2594 Texture.m_RescaleCount = RescaleCount;
2595 Texture.m_MipMapCount = MipMapLevelCount;
2596
2597 if(Requires2DTexture)
2598 {
2599 if(!CreateTextureImage(ImageIndex, NewImage&: Texture.m_Img, NewImgMem&: Texture.m_ImgMem, pData, Format, Width, Height, Depth: 1, PixelSize, MipMapLevelCount))
2600 return false;
2601 VkFormat ImgFormat = Format;
2602 VkImageView ImgView = CreateTextureImageView(TexImage: Texture.m_Img, ImgFormat, ViewType: VK_IMAGE_VIEW_TYPE_2D, Depth: 1, MipMapLevelCount);
2603 Texture.m_ImgView = ImgView;
2604 VkSampler ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_REPEAT);
2605 Texture.m_aSamplers[0] = ImgSampler;
2606 ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE);
2607 Texture.m_aSamplers[1] = ImgSampler;
2608
2609 if(!CreateNewTexturedStandardDescriptorSets(TextureSlot: ImageIndex, DescrIndex: 0))
2610 return false;
2611 if(!CreateNewTexturedStandardDescriptorSets(TextureSlot: ImageIndex, DescrIndex: 1))
2612 return false;
2613 }
2614
2615 if(Requires2DTextureArray)
2616 {
2617 int Image3DWidth = Width;
2618 int Image3DHeight = Height;
2619
2620 int ConvertWidth = Width;
2621 int ConvertHeight = Height;
2622
2623 if(ConvertWidth == 0 || (ConvertWidth % 16) != 0 || ConvertHeight == 0 || (ConvertHeight % 16) != 0)
2624 {
2625 dbg_msg(sys: "vulkan", fmt: "3D/2D array texture was resized");
2626 int NewWidth = maximum<int>(a: HighestBit(OfVar: ConvertWidth), b: 16);
2627 int NewHeight = maximum<int>(a: HighestBit(OfVar: ConvertHeight), b: 16);
2628 uint8_t *pNewTexData = ResizeImage(pImageData: pData, Width: ConvertWidth, Height: ConvertHeight, NewWidth, NewHeight, BPP: PixelSize);
2629
2630 ConvertWidth = NewWidth;
2631 ConvertHeight = NewHeight;
2632
2633 free(ptr: pData);
2634 pData = pNewTexData;
2635 }
2636
2637 bool Needs3DTexDel = false;
2638 uint8_t *pTexData3D = static_cast<uint8_t *>(malloc(size: (size_t)PixelSize * ConvertWidth * ConvertHeight));
2639 if(!Texture2DTo3D(pImageBuffer: pData, ImageWidth: ConvertWidth, ImageHeight: ConvertHeight, PixelSize, SplitCountWidth: 16, SplitCountHeight: 16, pTarget3DImageData: pTexData3D, Target3DImageWidth&: Image3DWidth, Target3DImageHeight&: Image3DHeight))
2640 {
2641 free(ptr: pTexData3D);
2642 pTexData3D = nullptr;
2643 }
2644 Needs3DTexDel = true;
2645
2646 if(pTexData3D != nullptr)
2647 {
2648 const size_t ImageDepth2DArray = (size_t)16 * 16;
2649 VkExtent3D ImgSize{.width: (uint32_t)Image3DWidth, .height: (uint32_t)Image3DHeight, .depth: 1};
2650 if(RequiresMipMaps)
2651 {
2652 MipMapLevelCount = ImageMipLevelCount(ImgExtent: ImgSize);
2653 if(!m_OptimalRGBAImageBlitting)
2654 MipMapLevelCount = 1;
2655 }
2656
2657 if(!CreateTextureImage(ImageIndex, NewImage&: Texture.m_Img3D, NewImgMem&: Texture.m_Img3DMem, pData: pTexData3D, Format, Width: Image3DWidth, Height: Image3DHeight, Depth: ImageDepth2DArray, PixelSize, MipMapLevelCount))
2658 return false;
2659 VkFormat ImgFormat = Format;
2660 VkImageView ImgView = CreateTextureImageView(TexImage: Texture.m_Img3D, ImgFormat, ViewType: VK_IMAGE_VIEW_TYPE_2D_ARRAY, Depth: ImageDepth2DArray, MipMapLevelCount);
2661 Texture.m_Img3DView = ImgView;
2662 VkSampler ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY);
2663 Texture.m_Sampler3D = ImgSampler;
2664
2665 if(!CreateNew3DTexturedStandardDescriptorSets(TextureSlot: ImageIndex))
2666 return false;
2667
2668 if(Needs3DTexDel)
2669 free(ptr: pTexData3D);
2670 }
2671 }
2672 return true;
2673 }
2674
2675 [[nodiscard]] bool BuildMipmaps(VkImage Image, VkFormat ImageFormat, size_t Width, size_t Height, size_t Depth, size_t MipMapLevelCount)
2676 {
2677 VkCommandBuffer *pMemCommandBuffer;
2678 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2679 return false;
2680 auto &MemCommandBuffer = *pMemCommandBuffer;
2681
2682 VkImageMemoryBarrier Barrier{};
2683 Barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2684 Barrier.image = Image;
2685 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2686 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2687 Barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2688 Barrier.subresourceRange.levelCount = 1;
2689 Barrier.subresourceRange.baseArrayLayer = 0;
2690 Barrier.subresourceRange.layerCount = Depth;
2691
2692 int32_t TmpMipWidth = (int32_t)Width;
2693 int32_t TmpMipHeight = (int32_t)Height;
2694
2695 for(size_t i = 1; i < MipMapLevelCount; ++i)
2696 {
2697 Barrier.subresourceRange.baseMipLevel = i - 1;
2698 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2699 Barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2700 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2701 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2702
2703 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer, srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dependencyFlags: 0, memoryBarrierCount: 0, pMemoryBarriers: nullptr, bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr, imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2704
2705 VkImageBlit Blit{};
2706 Blit.srcOffsets[0] = {.x: 0, .y: 0, .z: 0};
2707 Blit.srcOffsets[1] = {.x: TmpMipWidth, .y: TmpMipHeight, .z: 1};
2708 Blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2709 Blit.srcSubresource.mipLevel = i - 1;
2710 Blit.srcSubresource.baseArrayLayer = 0;
2711 Blit.srcSubresource.layerCount = Depth;
2712 Blit.dstOffsets[0] = {.x: 0, .y: 0, .z: 0};
2713 Blit.dstOffsets[1] = {.x: TmpMipWidth > 1 ? TmpMipWidth / 2 : 1, .y: TmpMipHeight > 1 ? TmpMipHeight / 2 : 1, .z: 1};
2714 Blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2715 Blit.dstSubresource.mipLevel = i;
2716 Blit.dstSubresource.baseArrayLayer = 0;
2717 Blit.dstSubresource.layerCount = Depth;
2718
2719 vkCmdBlitImage(commandBuffer: MemCommandBuffer,
2720 srcImage: Image, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2721 dstImage: Image, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2722 regionCount: 1, pRegions: &Blit,
2723 filter: m_AllowsLinearBlitting ? VK_FILTER_LINEAR : VK_FILTER_NEAREST);
2724
2725 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2726 Barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2727 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2728 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2729
2730 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer,
2731 srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, dependencyFlags: 0,
2732 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2733 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2734 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2735
2736 if(TmpMipWidth > 1)
2737 TmpMipWidth /= 2;
2738 if(TmpMipHeight > 1)
2739 TmpMipHeight /= 2;
2740 }
2741
2742 Barrier.subresourceRange.baseMipLevel = MipMapLevelCount - 1;
2743 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2744 Barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2745 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2746 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2747
2748 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer,
2749 srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, dependencyFlags: 0,
2750 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2751 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2752 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2753
2754 return true;
2755 }
2756
2757 [[nodiscard]] bool CreateTextureImage(size_t ImageIndex, VkImage &NewImage, SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &NewImgMem, const uint8_t *pData, VkFormat Format, size_t Width, size_t Height, size_t Depth, size_t PixelSize, size_t MipMapLevelCount)
2758 {
2759 VkDeviceSize ImageSize = Width * Height * Depth * PixelSize;
2760
2761 SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> StagingBuffer;
2762 if(!GetStagingBufferImage(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: ImageSize))
2763 return false;
2764
2765 VkFormat ImgFormat = Format;
2766
2767 if(!CreateImage(Width, Height, Depth, MipMapLevelCount, Format: ImgFormat, Tiling: VK_IMAGE_TILING_OPTIMAL, Image&: NewImage, ImageMemory&: NewImgMem))
2768 return false;
2769
2770 if(!ImageBarrier(Image: NewImage, MipMapBase: 0, MipMapCount: MipMapLevelCount, LayerBase: 0, LayerCount: Depth, Format: ImgFormat, OldLayout: VK_IMAGE_LAYOUT_UNDEFINED, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
2771 return false;
2772 if(!CopyBufferToImage(Buffer: StagingBuffer.m_Buffer, BufferOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, Image: NewImage, X: 0, Y: 0, Width: static_cast<uint32_t>(Width), Height: static_cast<uint32_t>(Height), Depth))
2773 return false;
2774
2775 UploadAndFreeStagingImageMemBlock(Block&: StagingBuffer);
2776
2777 if(MipMapLevelCount > 1)
2778 {
2779 if(!BuildMipmaps(Image: NewImage, ImageFormat: ImgFormat, Width, Height, Depth, MipMapLevelCount))
2780 return false;
2781 }
2782 else
2783 {
2784 if(!ImageBarrier(Image: NewImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: Depth, Format: ImgFormat, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
2785 return false;
2786 }
2787
2788 return true;
2789 }
2790
2791 VkImageView CreateTextureImageView(VkImage TexImage, VkFormat ImgFormat, VkImageViewType ViewType, size_t Depth, size_t MipMapLevelCount)
2792 {
2793 return CreateImageView(Image: TexImage, Format: ImgFormat, ViewType, Depth, MipMapLevelCount);
2794 }
2795
2796 [[nodiscard]] bool CreateTextureSamplersImpl(VkSampler &CreatedSampler, VkSamplerAddressMode AddrModeU, VkSamplerAddressMode AddrModeV, VkSamplerAddressMode AddrModeW)
2797 {
2798 VkSamplerCreateInfo SamplerInfo{};
2799 SamplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
2800 SamplerInfo.magFilter = VK_FILTER_LINEAR;
2801 SamplerInfo.minFilter = VK_FILTER_LINEAR;
2802 SamplerInfo.addressModeU = AddrModeU;
2803 SamplerInfo.addressModeV = AddrModeV;
2804 SamplerInfo.addressModeW = AddrModeW;
2805 SamplerInfo.anisotropyEnable = VK_FALSE;
2806 SamplerInfo.maxAnisotropy = m_MaxSamplerAnisotropy;
2807 SamplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2808 SamplerInfo.unnormalizedCoordinates = VK_FALSE;
2809 SamplerInfo.compareEnable = VK_FALSE;
2810 SamplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
2811 SamplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
2812 SamplerInfo.mipLodBias = (m_GlobalTextureLodBIAS / 1000.0f);
2813 SamplerInfo.minLod = -1000;
2814 SamplerInfo.maxLod = 1000;
2815
2816 if(vkCreateSampler(device: m_VKDevice, pCreateInfo: &SamplerInfo, pAllocator: nullptr, pSampler: &CreatedSampler) != VK_SUCCESS)
2817 {
2818 dbg_msg(sys: "vulkan", fmt: "failed to create texture sampler!");
2819 return false;
2820 }
2821 return true;
2822 }
2823
2824 [[nodiscard]] bool CreateTextureSamplers()
2825 {
2826 bool Ret = true;
2827 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_REPEAT], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT);
2828 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE);
2829 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT);
2830 return Ret;
2831 }
2832
2833 void DestroyTextureSamplers()
2834 {
2835 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_REPEAT], pAllocator: nullptr);
2836 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE], pAllocator: nullptr);
2837 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY], pAllocator: nullptr);
2838 }
2839
2840 VkSampler GetTextureSampler(ESupportedSamplerTypes SamplerType)
2841 {
2842 return m_aSamplers[SamplerType];
2843 }
2844
2845 VkImageView CreateImageView(VkImage Image, VkFormat Format, VkImageViewType ViewType, size_t Depth, size_t MipMapLevelCount)
2846 {
2847 VkImageViewCreateInfo ViewCreateInfo{};
2848 ViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2849 ViewCreateInfo.image = Image;
2850 ViewCreateInfo.viewType = ViewType;
2851 ViewCreateInfo.format = Format;
2852 ViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2853 ViewCreateInfo.subresourceRange.baseMipLevel = 0;
2854 ViewCreateInfo.subresourceRange.levelCount = MipMapLevelCount;
2855 ViewCreateInfo.subresourceRange.baseArrayLayer = 0;
2856 ViewCreateInfo.subresourceRange.layerCount = Depth;
2857
2858 VkImageView ImageView;
2859 if(vkCreateImageView(device: m_VKDevice, pCreateInfo: &ViewCreateInfo, pAllocator: nullptr, pView: &ImageView) != VK_SUCCESS)
2860 {
2861 return VK_NULL_HANDLE;
2862 }
2863
2864 return ImageView;
2865 }
2866
2867 [[nodiscard]] bool CreateImage(uint32_t Width, uint32_t Height, uint32_t Depth, size_t MipMapLevelCount, VkFormat Format, VkImageTiling Tiling, VkImage &Image, SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &ImageMemory, VkImageUsageFlags ImageUsage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT)
2868 {
2869 VkImageCreateInfo ImageInfo{};
2870 ImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
2871 ImageInfo.imageType = VK_IMAGE_TYPE_2D;
2872 ImageInfo.extent.width = Width;
2873 ImageInfo.extent.height = Height;
2874 ImageInfo.extent.depth = 1;
2875 ImageInfo.mipLevels = MipMapLevelCount;
2876 ImageInfo.arrayLayers = Depth;
2877 ImageInfo.format = Format;
2878 ImageInfo.tiling = Tiling;
2879 ImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2880 ImageInfo.usage = ImageUsage;
2881 ImageInfo.samples = (ImageUsage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) == 0 ? VK_SAMPLE_COUNT_1_BIT : GetSampleCount();
2882 ImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2883
2884 if(vkCreateImage(device: m_VKDevice, pCreateInfo: &ImageInfo, pAllocator: nullptr, pImage: &Image) != VK_SUCCESS)
2885 {
2886 dbg_msg(sys: "vulkan", fmt: "failed to create image!");
2887 }
2888
2889 VkMemoryRequirements MemRequirements;
2890 vkGetImageMemoryRequirements(device: m_VKDevice, image: Image, pMemoryRequirements: &MemRequirements);
2891
2892 if(!GetImageMemory(RetBlock&: ImageMemory, RequiredSize: MemRequirements.size, RequiredAlignment: MemRequirements.alignment, RequiredMemoryTypeBits: MemRequirements.memoryTypeBits))
2893 return false;
2894
2895 vkBindImageMemory(device: m_VKDevice, image: Image, memory: ImageMemory.m_BufferMem.m_Mem, memoryOffset: ImageMemory.m_HeapData.m_OffsetToAlign);
2896
2897 return true;
2898 }
2899
2900 [[nodiscard]] bool ImageBarrier(const VkImage &Image, size_t MipMapBase, size_t MipMapCount, size_t LayerBase, size_t LayerCount, VkFormat Format, VkImageLayout OldLayout, VkImageLayout NewLayout)
2901 {
2902 VkCommandBuffer *pMemCommandBuffer;
2903 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2904 return false;
2905 auto &MemCommandBuffer = *pMemCommandBuffer;
2906
2907 VkImageMemoryBarrier Barrier{};
2908 Barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2909 Barrier.oldLayout = OldLayout;
2910 Barrier.newLayout = NewLayout;
2911 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2912 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2913 Barrier.image = Image;
2914 Barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2915 Barrier.subresourceRange.baseMipLevel = MipMapBase;
2916 Barrier.subresourceRange.levelCount = MipMapCount;
2917 Barrier.subresourceRange.baseArrayLayer = LayerBase;
2918 Barrier.subresourceRange.layerCount = LayerCount;
2919
2920 VkPipelineStageFlags SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2921 VkPipelineStageFlags DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2922
2923 if(OldLayout == VK_IMAGE_LAYOUT_UNDEFINED && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2924 {
2925 Barrier.srcAccessMask = 0;
2926 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2927
2928 SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2929 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2930 }
2931 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
2932 {
2933 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2934 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2935
2936 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2937 DestinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2938 }
2939 else if(OldLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2940 {
2941 Barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2942 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2943
2944 SourceStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2945 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2946 }
2947 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
2948 {
2949 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2950 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2951
2952 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2953 DestinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
2954 }
2955 else if(OldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
2956 {
2957 Barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2958 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2959
2960 SourceStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
2961 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2962 }
2963 else if(OldLayout == VK_IMAGE_LAYOUT_UNDEFINED && NewLayout == VK_IMAGE_LAYOUT_GENERAL)
2964 {
2965 Barrier.srcAccessMask = 0;
2966 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2967
2968 SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2969 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2970 }
2971 else if(OldLayout == VK_IMAGE_LAYOUT_GENERAL && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2972 {
2973 Barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2974 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2975
2976 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2977 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2978 }
2979 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_GENERAL)
2980 {
2981 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2982 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2983
2984 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2985 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2986 }
2987 else
2988 {
2989 dbg_msg(sys: "vulkan", fmt: "unsupported layout transition!");
2990 }
2991
2992 vkCmdPipelineBarrier(
2993 commandBuffer: MemCommandBuffer,
2994 srcStageMask: SourceStage, dstStageMask: DestinationStage,
2995 dependencyFlags: 0,
2996 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2997 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2998 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2999
3000 return true;
3001 }
3002
3003 [[nodiscard]] bool CopyBufferToImage(VkBuffer Buffer, VkDeviceSize BufferOffset, VkImage Image, int32_t X, int32_t Y, uint32_t Width, uint32_t Height, size_t Depth)
3004 {
3005 VkCommandBuffer *pCommandBuffer;
3006 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
3007 return false;
3008 auto &CommandBuffer = *pCommandBuffer;
3009
3010 VkBufferImageCopy Region{};
3011 Region.bufferOffset = BufferOffset;
3012 Region.bufferRowLength = 0;
3013 Region.bufferImageHeight = 0;
3014 Region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3015 Region.imageSubresource.mipLevel = 0;
3016 Region.imageSubresource.baseArrayLayer = 0;
3017 Region.imageSubresource.layerCount = Depth;
3018 Region.imageOffset = {.x: X, .y: Y, .z: 0};
3019 Region.imageExtent = {
3020 .width: Width,
3021 .height: Height,
3022 .depth: 1};
3023
3024 vkCmdCopyBufferToImage(commandBuffer: CommandBuffer, srcBuffer: Buffer, dstImage: Image, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, regionCount: 1, pRegions: &Region);
3025
3026 return true;
3027 }
3028
3029 /************************
3030 * BUFFERS
3031 ************************/
3032
3033 [[nodiscard]] bool CreateBufferObject(size_t BufferIndex, const void *pUploadData, VkDeviceSize BufferDataSize, bool IsOneFrameBuffer)
3034 {
3035 std::vector<uint8_t> UploadDataTmp;
3036 if(pUploadData == nullptr)
3037 {
3038 UploadDataTmp.resize(new_size: BufferDataSize);
3039 pUploadData = UploadDataTmp.data();
3040 }
3041
3042 while(BufferIndex >= m_vBufferObjects.size())
3043 {
3044 m_vBufferObjects.resize(new_size: (m_vBufferObjects.size() * 2) + 1);
3045 }
3046 auto &BufferObject = m_vBufferObjects[BufferIndex];
3047
3048 VkBuffer VertexBuffer;
3049 size_t BufferOffset = 0;
3050 if(!IsOneFrameBuffer)
3051 {
3052 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
3053 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pUploadData, RequiredSize: BufferDataSize))
3054 return false;
3055
3056 SMemoryBlock<VERTEX_BUFFER_CACHE_ID> Mem;
3057 if(!GetVertexBuffer(ResBlock&: Mem, RequiredSize: BufferDataSize))
3058 return false;
3059
3060 BufferObject.m_BufferObject.m_Mem = Mem;
3061 VertexBuffer = Mem.m_Buffer;
3062 BufferOffset = Mem.m_HeapData.m_OffsetToAlign;
3063
3064 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Mem.m_HeapData.m_OffsetToAlign, Size: BufferDataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
3065 return false;
3066 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: Mem.m_HeapData.m_OffsetToAlign, CopySize: BufferDataSize))
3067 return false;
3068 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Mem.m_HeapData.m_OffsetToAlign, Size: BufferDataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
3069 return false;
3070 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
3071 }
3072 else
3073 {
3074 SDeviceMemoryBlock VertexBufferMemory;
3075 if(!CreateStreamVertexBuffer(RenderThreadIndex: MAIN_THREAD_INDEX, NewBuffer&: VertexBuffer, NewBufferMem&: VertexBufferMemory, BufferOffset, pData: pUploadData, DataSize: BufferDataSize))
3076 return false;
3077 }
3078 BufferObject.m_IsStreamedBuffer = IsOneFrameBuffer;
3079 BufferObject.m_CurBuffer = VertexBuffer;
3080 BufferObject.m_CurBufferOffset = BufferOffset;
3081
3082 return true;
3083 }
3084
3085 void DeleteBufferObject(size_t BufferIndex)
3086 {
3087 auto &BufferObject = m_vBufferObjects[BufferIndex];
3088 if(!BufferObject.m_IsStreamedBuffer)
3089 {
3090 FreeVertexMemBlock(Block&: BufferObject.m_BufferObject.m_Mem);
3091 }
3092 BufferObject = {};
3093 }
3094
3095 [[nodiscard]] bool CopyBuffer(VkBuffer SrcBuffer, VkBuffer DstBuffer, VkDeviceSize SrcOffset, VkDeviceSize DstOffset, VkDeviceSize CopySize)
3096 {
3097 VkCommandBuffer *pCommandBuffer;
3098 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
3099 return false;
3100 auto &CommandBuffer = *pCommandBuffer;
3101 VkBufferCopy CopyRegion{};
3102 CopyRegion.srcOffset = SrcOffset;
3103 CopyRegion.dstOffset = DstOffset;
3104 CopyRegion.size = CopySize;
3105 vkCmdCopyBuffer(commandBuffer: CommandBuffer, srcBuffer: SrcBuffer, dstBuffer: DstBuffer, regionCount: 1, pRegions: &CopyRegion);
3106
3107 return true;
3108 }
3109
3110 /************************
3111 * RENDER STATES
3112 ************************/
3113
3114 void GetStateMatrix(const CCommandBuffer::SState &State, std::array<float, (size_t)4 * 2> &Matrix)
3115 {
3116 Matrix = {
3117 // column 1
3118 2.f / (State.m_ScreenBR.x - State.m_ScreenTL.x),
3119 0,
3120 // column 2
3121 0,
3122 2.f / (State.m_ScreenBR.y - State.m_ScreenTL.y),
3123 // column 3
3124 0,
3125 0,
3126 // column 4
3127 -((State.m_ScreenTL.x + State.m_ScreenBR.x) / (State.m_ScreenBR.x - State.m_ScreenTL.x)),
3128 -((State.m_ScreenTL.y + State.m_ScreenBR.y) / (State.m_ScreenBR.y - State.m_ScreenTL.y)),
3129 };
3130 }
3131
3132 [[nodiscard]] bool GetIsTextured(const CCommandBuffer::SState &State)
3133 {
3134 return State.m_Texture != -1;
3135 }
3136
3137 size_t GetAddressModeIndex(const CCommandBuffer::SState &State)
3138 {
3139 switch(State.m_WrapMode)
3140 {
3141 case EWrapMode::REPEAT:
3142 return VULKAN_BACKEND_ADDRESS_MODE_REPEAT;
3143 case EWrapMode::CLAMP:
3144 return VULKAN_BACKEND_ADDRESS_MODE_CLAMP_EDGES;
3145 default:
3146 dbg_assert_failed("Invalid wrap mode: %d", (int)State.m_WrapMode);
3147 };
3148 }
3149
3150 size_t GetBlendModeIndex(const CCommandBuffer::SState &State)
3151 {
3152 switch(State.m_BlendMode)
3153 {
3154 case EBlendMode::NONE:
3155 return VULKAN_BACKEND_BLEND_MODE_NONE;
3156 case EBlendMode::ALPHA:
3157 return VULKAN_BACKEND_BLEND_MODE_ALPHA;
3158 case EBlendMode::ADDITIVE:
3159 return VULKAN_BACKEND_BLEND_MODE_ADDITATIVE;
3160 default:
3161 dbg_assert_failed("Invalid blend mode: %d", (int)State.m_BlendMode);
3162 };
3163 }
3164
3165 size_t GetDynamicModeIndexFromState(const CCommandBuffer::SState &State) const
3166 {
3167 return (State.m_ClipEnable || m_HasDynamicViewport || m_VKSwapImgAndViewportExtent.m_HasForcedViewport) ? VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT : VULKAN_BACKEND_CLIP_MODE_NONE;
3168 }
3169
3170 size_t GetDynamicModeIndexFromExecBuffer(const SRenderCommandExecuteBuffer &ExecBuffer)
3171 {
3172 return (ExecBuffer.m_HasDynamicState) ? VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT : VULKAN_BACKEND_CLIP_MODE_NONE;
3173 }
3174
3175 VkPipeline &GetPipeline(SPipelineContainer &Container, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3176 {
3177 return Container.m_aaaPipelines[BlendModeIndex][DynamicIndex][(size_t)IsTextured];
3178 }
3179
3180 VkPipelineLayout &GetPipeLayout(SPipelineContainer &Container, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3181 {
3182 return Container.m_aaaPipelineLayouts[BlendModeIndex][DynamicIndex][(size_t)IsTextured];
3183 }
3184
3185 VkPipelineLayout &GetStandardPipeLayout(bool IsLineGeometry, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3186 {
3187 if(IsLineGeometry)
3188 return GetPipeLayout(Container&: m_StandardLinePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3189 else
3190 return GetPipeLayout(Container&: m_StandardPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3191 }
3192
3193 VkPipeline &GetStandardPipe(bool IsLineGeometry, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3194 {
3195 if(IsLineGeometry)
3196 return GetPipeline(Container&: m_StandardLinePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3197 else
3198 return GetPipeline(Container&: m_StandardPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3199 }
3200
3201 VkPipelineLayout &GetTileLayerPipeLayout(bool IsBorder, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3202 {
3203 if(!IsBorder)
3204 return GetPipeLayout(Container&: m_TilePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3205 else
3206 return GetPipeLayout(Container&: m_TileBorderPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3207 }
3208
3209 VkPipeline &GetTileLayerPipe(bool IsBorder, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3210 {
3211 if(!IsBorder)
3212 return GetPipeline(Container&: m_TilePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3213 else
3214 return GetPipeline(Container&: m_TileBorderPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3215 }
3216
3217 void GetStateIndices(const SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, bool &IsTextured, size_t &BlendModeIndex, size_t &DynamicIndex, size_t &AddressModeIndex)
3218 {
3219 IsTextured = GetIsTextured(State);
3220 AddressModeIndex = GetAddressModeIndex(State);
3221 BlendModeIndex = GetBlendModeIndex(State);
3222 DynamicIndex = GetDynamicModeIndexFromExecBuffer(ExecBuffer);
3223 }
3224
3225 void ExecBufferFillDynamicStates(const CCommandBuffer::SState &State, SRenderCommandExecuteBuffer &ExecBuffer)
3226 {
3227 // Workaround for a bug in molten-vk: https://github.com/KhronosGroup/MoltenVK/issues/2304
3228#ifdef CONF_PLATFORM_MACOS
3229 auto HasDynamicState = true;
3230#else
3231 size_t DynamicStateIndex = GetDynamicModeIndexFromState(State);
3232 auto HasDynamicState = DynamicStateIndex == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT;
3233#endif
3234
3235 if(HasDynamicState)
3236 {
3237 VkViewport Viewport;
3238 if(m_HasDynamicViewport)
3239 {
3240 Viewport.x = (float)m_DynamicViewportOffset.x;
3241 Viewport.y = (float)m_DynamicViewportOffset.y;
3242 Viewport.width = (float)m_DynamicViewportSize.width;
3243 Viewport.height = (float)m_DynamicViewportSize.height;
3244 Viewport.minDepth = 0.0f;
3245 Viewport.maxDepth = 1.0f;
3246 }
3247 // else check if there is a forced viewport
3248 else if(m_VKSwapImgAndViewportExtent.m_HasForcedViewport)
3249 {
3250 Viewport.x = 0.0f;
3251 Viewport.y = 0.0f;
3252 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_ForcedViewport.width;
3253 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_ForcedViewport.height;
3254 Viewport.minDepth = 0.0f;
3255 Viewport.maxDepth = 1.0f;
3256 }
3257 else
3258 {
3259 Viewport.x = 0.0f;
3260 Viewport.y = 0.0f;
3261 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
3262 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
3263 Viewport.minDepth = 0.0f;
3264 Viewport.maxDepth = 1.0f;
3265 }
3266
3267 VkRect2D Scissor;
3268 // convert from OGL to vulkan clip
3269
3270 // the scissor always assumes the presented viewport, because the front-end keeps the calculation
3271 // for the forced viewport in sync
3272 auto ScissorViewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
3273 if(State.m_ClipEnable)
3274 {
3275 int32_t ScissorY = (int32_t)ScissorViewport.height - ((int32_t)State.m_ClipY + (int32_t)State.m_ClipH);
3276 uint32_t ScissorH = (int32_t)State.m_ClipH;
3277 Scissor.offset = {.x: (int32_t)State.m_ClipX, .y: ScissorY};
3278 Scissor.extent = {.width: (uint32_t)State.m_ClipW, .height: ScissorH};
3279 }
3280 else
3281 {
3282 Scissor.offset = {.x: 0, .y: 0};
3283 Scissor.extent = {.width: ScissorViewport.width, .height: ScissorViewport.height};
3284 }
3285
3286 // if there is a dynamic viewport make sure the scissor data is scaled down to that
3287 if(m_HasDynamicViewport)
3288 {
3289 Scissor.offset.x = (int32_t)(((float)Scissor.offset.x / (float)ScissorViewport.width) * (float)m_DynamicViewportSize.width) + m_DynamicViewportOffset.x;
3290 Scissor.offset.y = (int32_t)(((float)Scissor.offset.y / (float)ScissorViewport.height) * (float)m_DynamicViewportSize.height) + m_DynamicViewportOffset.y;
3291 Scissor.extent.width = (uint32_t)(((float)Scissor.extent.width / (float)ScissorViewport.width) * (float)m_DynamicViewportSize.width);
3292 Scissor.extent.height = (uint32_t)(((float)Scissor.extent.height / (float)ScissorViewport.height) * (float)m_DynamicViewportSize.height);
3293 }
3294
3295 Viewport.x = std::clamp(val: Viewport.x, lo: 0.0f, hi: std::numeric_limits<decltype(Viewport.x)>::max());
3296 Viewport.y = std::clamp(val: Viewport.y, lo: 0.0f, hi: std::numeric_limits<decltype(Viewport.y)>::max());
3297
3298 Scissor.offset.x = std::clamp(val: Scissor.offset.x, lo: 0, hi: std::numeric_limits<decltype(Scissor.offset.x)>::max());
3299 Scissor.offset.y = std::clamp(val: Scissor.offset.y, lo: 0, hi: std::numeric_limits<decltype(Scissor.offset.y)>::max());
3300
3301 ExecBuffer.m_HasDynamicState = true;
3302 ExecBuffer.m_Viewport = Viewport;
3303 ExecBuffer.m_Scissor = Scissor;
3304 }
3305 else
3306 {
3307 ExecBuffer.m_HasDynamicState = false;
3308 }
3309 }
3310
3311 void BindPipeline(size_t RenderThreadIndex, VkCommandBuffer &CommandBuffer, SRenderCommandExecuteBuffer &ExecBuffer, VkPipeline &BindingPipe, const CCommandBuffer::SState &State)
3312 {
3313 if(m_vLastPipeline[RenderThreadIndex] != BindingPipe)
3314 {
3315 vkCmdBindPipeline(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline: BindingPipe);
3316 m_vLastPipeline[RenderThreadIndex] = BindingPipe;
3317 }
3318
3319 size_t DynamicStateIndex = GetDynamicModeIndexFromExecBuffer(ExecBuffer);
3320 if(DynamicStateIndex == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT)
3321 {
3322 vkCmdSetViewport(commandBuffer: CommandBuffer, firstViewport: 0, viewportCount: 1, pViewports: &ExecBuffer.m_Viewport);
3323 vkCmdSetScissor(commandBuffer: CommandBuffer, firstScissor: 0, scissorCount: 1, pScissors: &ExecBuffer.m_Scissor);
3324 }
3325 }
3326
3327 /**************************
3328 * RENDERING IMPLEMENTATION
3329 ***************************/
3330
3331 void RenderTileLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, size_t DrawCalls, const CCommandBuffer::SState &State, size_t BufferContainerIndex)
3332 {
3333 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
3334 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
3335
3336 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
3337 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
3338
3339 bool IsTextured = GetIsTextured(State);
3340 if(IsTextured)
3341 {
3342 ExecBuffer.m_aDescriptors[0] = m_vTextures[State.m_Texture].m_VKStandard3DTexturedDescrSet;
3343 }
3344
3345 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
3346
3347 ExecBuffer.m_EstimatedRenderCallCount = DrawCalls;
3348
3349 ExecBufferFillDynamicStates(State, ExecBuffer);
3350 }
3351
3352 [[nodiscard]] bool RenderTileLayer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, bool IsBorder, const GL_SColorf &Color, const vec2 &Scale, const vec2 &Off, size_t IndicesDrawNum, char *const *pIndicesOffsets, const unsigned int *pDrawCount)
3353 {
3354 std::array<float, (size_t)4 * 2> m;
3355 GetStateMatrix(State, Matrix&: m);
3356
3357 bool IsTextured;
3358 size_t BlendModeIndex;
3359 size_t DynamicIndex;
3360 size_t AddressModeIndex;
3361 GetStateIndices(ExecBuffer, State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
3362 auto &PipeLayout = GetTileLayerPipeLayout(IsBorder, IsTextured, BlendModeIndex, DynamicIndex);
3363 auto &PipeLine = GetTileLayerPipe(IsBorder, IsTextured, BlendModeIndex, DynamicIndex);
3364
3365 VkCommandBuffer *pCommandBuffer;
3366 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
3367 return false;
3368 auto &CommandBuffer = *pCommandBuffer;
3369
3370 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State);
3371
3372 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
3373 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
3374 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
3375
3376 if(IsTextured)
3377 {
3378 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
3379 }
3380
3381 SUniformTileGPosBorder VertexPushConstants;
3382 size_t VertexPushConstantSize = sizeof(SUniformTileGPos);
3383 SUniformTileGVertColor FragPushConstants;
3384 size_t FragPushConstantSize = sizeof(SUniformTileGVertColor);
3385
3386 mem_copy(dest: VertexPushConstants.m_aPos, source: m.data(), size: m.size() * sizeof(float));
3387 FragPushConstants = Color;
3388
3389 if(IsBorder)
3390 {
3391 VertexPushConstants.m_Scale = Scale;
3392 VertexPushConstants.m_Offset = Off;
3393 VertexPushConstantSize = sizeof(SUniformTileGPosBorder);
3394 }
3395
3396 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: VertexPushConstantSize, pValues: &VertexPushConstants);
3397 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformTileGPosBorder) + sizeof(SUniformTileGVertColorAlign), size: FragPushConstantSize, pValues: &FragPushConstants);
3398
3399 size_t DrawCount = IndicesDrawNum;
3400 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
3401 for(size_t i = 0; i < DrawCount; ++i)
3402 {
3403 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pIndicesOffsets[i] / sizeof(uint32_t));
3404
3405 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pDrawCount[i]), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
3406 }
3407
3408 return true;
3409 }
3410
3411 template<typename TName, bool Is3DTextured>
3412 [[nodiscard]] bool RenderStandard(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, EPrimitiveType PrimType, const TName *pVertices, int PrimitiveCount)
3413 {
3414 std::array<float, (size_t)4 * 2> m;
3415 GetStateMatrix(State, Matrix&: m);
3416
3417 bool IsLineGeometry = PrimType == EPrimitiveType::LINES;
3418
3419 bool IsTextured;
3420 size_t BlendModeIndex;
3421 size_t DynamicIndex;
3422 size_t AddressModeIndex;
3423 GetStateIndices(ExecBuffer, State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
3424 auto &PipeLayout = Is3DTextured ? GetPipeLayout(Container&: m_Standard3DPipeline, IsTextured, BlendModeIndex, DynamicIndex) : GetStandardPipeLayout(IsLineGeometry, IsTextured, BlendModeIndex, DynamicIndex);
3425 auto &PipeLine = Is3DTextured ? GetPipeline(Container&: m_Standard3DPipeline, IsTextured, BlendModeIndex, DynamicIndex) : GetStandardPipe(IsLineGeometry, IsTextured, BlendModeIndex, DynamicIndex);
3426
3427 VkCommandBuffer *pCommandBuffer;
3428 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
3429 return false;
3430 auto &CommandBuffer = *pCommandBuffer;
3431
3432 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State);
3433
3434 size_t VertPerPrim = 2;
3435 bool IsIndexed = false;
3436 if(PrimType == EPrimitiveType::QUADS)
3437 {
3438 VertPerPrim = 4;
3439 IsIndexed = true;
3440 }
3441 else if(PrimType == EPrimitiveType::TRIANGLES)
3442 {
3443 VertPerPrim = 3;
3444 }
3445
3446 VkBuffer VKBuffer;
3447 SDeviceMemoryBlock VKBufferMem;
3448 size_t BufferOff = 0;
3449 if(!CreateStreamVertexBuffer(RenderThreadIndex: ExecBuffer.m_ThreadIndex, NewBuffer&: VKBuffer, NewBufferMem&: VKBufferMem, BufferOffset&: BufferOff, pData: pVertices, DataSize: VertPerPrim * sizeof(TName) * PrimitiveCount))
3450 return false;
3451
3452 std::array<VkBuffer, 1> aVertexBuffers = {VKBuffer};
3453 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)BufferOff};
3454 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
3455
3456 if(IsIndexed)
3457 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
3458
3459 if(IsTextured)
3460 {
3461 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
3462 }
3463
3464 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGPos), pValues: m.data());
3465
3466 if(IsIndexed)
3467 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(PrimitiveCount * 6), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
3468 else
3469 vkCmdDraw(commandBuffer: CommandBuffer, vertexCount: static_cast<uint32_t>(PrimitiveCount * VertPerPrim), instanceCount: 1, firstVertex: 0, firstInstance: 0);
3470
3471 return true;
3472 }
3473
3474public:
3475 CCommandProcessorFragment_Vulkan()
3476 {
3477 m_vTextures.reserve(n: CCommandBuffer::MAX_TEXTURES);
3478 }
3479
3480 /************************
3481 * VULKAN SETUP CODE
3482 ************************/
3483
3484 [[nodiscard]] bool GetVulkanExtensions(SDL_Window *pWindow, std::vector<std::string> &vVKExtensions)
3485 {
3486 unsigned int ExtCount = 0;
3487 if(!SDL_Vulkan_GetInstanceExtensions(window: pWindow, pCount: &ExtCount, pNames: nullptr))
3488 {
3489 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get instance extensions from SDL.");
3490 return false;
3491 }
3492
3493 std::vector<const char *> vExtensionList(ExtCount);
3494 if(!SDL_Vulkan_GetInstanceExtensions(window: pWindow, pCount: &ExtCount, pNames: vExtensionList.data()))
3495 {
3496 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get instance extensions from SDL.");
3497 return false;
3498 }
3499
3500 vVKExtensions.reserve(n: ExtCount);
3501 for(uint32_t i = 0; i < ExtCount; i++)
3502 {
3503 vVKExtensions.emplace_back(args&: vExtensionList[i]);
3504 }
3505
3506 return true;
3507 }
3508
3509 std::set<std::string> OurVKLayers()
3510 {
3511 std::set<std::string> OurLayers;
3512
3513 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
3514 {
3515 OurLayers.emplace(args: "VK_LAYER_KHRONOS_validation");
3516 // deprecated, but VK_LAYER_KHRONOS_validation was released after vulkan 1.1
3517 OurLayers.emplace(args: "VK_LAYER_LUNARG_standard_validation");
3518 }
3519
3520 return OurLayers;
3521 }
3522
3523 std::set<std::string> OurDeviceExtensions()
3524 {
3525 std::set<std::string> OurExt;
3526 OurExt.emplace(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
3527 return OurExt;
3528 }
3529
3530 std::vector<VkImageUsageFlags> OurImageUsages()
3531 {
3532 std::vector<VkImageUsageFlags> vImgUsages;
3533
3534 vImgUsages.emplace_back(args: VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
3535 vImgUsages.emplace_back(args: VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
3536
3537 return vImgUsages;
3538 }
3539
3540 [[nodiscard]] bool GetVulkanLayers(std::vector<std::string> &vVKLayers)
3541 {
3542 uint32_t LayerCount = 0;
3543 VkResult Res = vkEnumerateInstanceLayerProperties(pPropertyCount: &LayerCount, NULL);
3544 if(Res != VK_SUCCESS)
3545 {
3546 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get vulkan layers.");
3547 return false;
3548 }
3549
3550 std::vector<VkLayerProperties> vVKInstanceLayers(LayerCount);
3551 Res = vkEnumerateInstanceLayerProperties(pPropertyCount: &LayerCount, pProperties: vVKInstanceLayers.data());
3552 if(Res != VK_SUCCESS)
3553 {
3554 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get vulkan layers.");
3555 return false;
3556 }
3557
3558 std::set<std::string> ReqLayerNames = OurVKLayers();
3559 vVKLayers.clear();
3560 for(const auto &LayerName : vVKInstanceLayers)
3561 {
3562 if(ReqLayerNames.contains(x: std::string(LayerName.layerName)))
3563 {
3564 vVKLayers.emplace_back(args: LayerName.layerName);
3565 }
3566 }
3567
3568 return true;
3569 }
3570
3571 bool IsGpuDenied(uint32_t Vendor, uint32_t DriverVersion, uint32_t ApiMajor, uint32_t ApiMinor, uint32_t ApiPatch)
3572 {
3573#ifdef CONF_FAMILY_WINDOWS
3574 // AMD
3575 if(0x1002 == Vendor)
3576 {
3577 auto Major = (DriverVersion >> 22);
3578 auto Minor = (DriverVersion >> 12) & 0x3ff;
3579 auto Patch = DriverVersion & 0xfff;
3580
3581 return Major == 2 && Minor == 0 && Patch > 137 && Patch < 220 && ((ApiMajor <= 1 && ApiMinor < 3) || (ApiMajor <= 1 && ApiMinor == 3 && ApiPatch < 206));
3582 }
3583#endif
3584 return false;
3585 }
3586
3587 [[nodiscard]] bool CreateVulkanInstance(const std::vector<std::string> &vVKLayers, const std::vector<std::string> &vVKExtensions, bool TryDebugExtensions)
3588 {
3589 std::vector<const char *> vLayersCStr;
3590 vLayersCStr.reserve(n: vVKLayers.size());
3591 for(const auto &Layer : vVKLayers)
3592 vLayersCStr.emplace_back(args: Layer.c_str());
3593
3594 std::vector<const char *> vExtCStr;
3595 vExtCStr.reserve(n: vVKExtensions.size() + 1);
3596 for(const auto &Ext : vVKExtensions)
3597 vExtCStr.emplace_back(args: Ext.c_str());
3598
3599#ifdef VK_EXT_debug_utils
3600 if(TryDebugExtensions && (g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL))
3601 {
3602 // debug message support
3603 vExtCStr.emplace_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
3604 }
3605#endif
3606
3607 VkApplicationInfo VKAppInfo = {};
3608 VKAppInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
3609 VKAppInfo.pNext = NULL;
3610 VKAppInfo.pApplicationName = "DDNet";
3611 VKAppInfo.applicationVersion = 1;
3612 VKAppInfo.pEngineName = "DDNet-Vulkan";
3613 VKAppInfo.engineVersion = 1;
3614 VKAppInfo.apiVersion = VK_API_VERSION_1_1;
3615
3616 void *pExt = nullptr;
3617#if defined(VK_EXT_validation_features) && VK_EXT_VALIDATION_FEATURES_SPEC_VERSION >= 5
3618 VkValidationFeaturesEXT Features = {};
3619 std::array<VkValidationFeatureEnableEXT, 2> aEnables = {VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT};
3620 if(TryDebugExtensions && (g_Config.m_DbgGfx == DEBUG_GFX_MODE_AFFECTS_PERFORMANCE || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL))
3621 {
3622 Features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
3623 Features.enabledValidationFeatureCount = aEnables.size();
3624 Features.pEnabledValidationFeatures = aEnables.data();
3625
3626 pExt = &Features;
3627 }
3628#endif
3629
3630 VkInstanceCreateInfo VKInstanceInfo = {};
3631 VKInstanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
3632 VKInstanceInfo.pNext = pExt;
3633 VKInstanceInfo.flags = 0;
3634 VKInstanceInfo.pApplicationInfo = &VKAppInfo;
3635 VKInstanceInfo.enabledExtensionCount = static_cast<uint32_t>(vExtCStr.size());
3636 VKInstanceInfo.ppEnabledExtensionNames = vExtCStr.data();
3637 VKInstanceInfo.enabledLayerCount = static_cast<uint32_t>(vLayersCStr.size());
3638 VKInstanceInfo.ppEnabledLayerNames = vLayersCStr.data();
3639
3640 bool TryAgain = false;
3641
3642 VkResult Res = vkCreateInstance(pCreateInfo: &VKInstanceInfo, NULL, pInstance: &m_VKInstance);
3643 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: Res);
3644 if(pCritErrorMsg != nullptr)
3645 {
3646 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating instance failed.", pErrStrExtra: pCritErrorMsg);
3647 return false;
3648 }
3649 else if(Res == VK_ERROR_LAYER_NOT_PRESENT || Res == VK_ERROR_EXTENSION_NOT_PRESENT)
3650 TryAgain = true;
3651
3652 if(TryAgain && TryDebugExtensions)
3653 return CreateVulkanInstance(vVKLayers, vVKExtensions, TryDebugExtensions: false);
3654
3655 return true;
3656 }
3657
3658 STWGraphicGpu::ETWGraphicsGpuType VKGPUTypeToGraphicsGpuType(VkPhysicalDeviceType VKGPUType)
3659 {
3660 if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU)
3661 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_DISCRETE;
3662 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU)
3663 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INTEGRATED;
3664 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU)
3665 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_VIRTUAL;
3666 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_CPU)
3667 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_CPU;
3668
3669 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_CPU;
3670 }
3671
3672 // from: https://github.com/SaschaWillems/vulkan.gpuinfo.org/blob/5c3986798afc39d736b825bf8a5fbf92b8d9ed49/includes/functions.php#L364
3673 const char *GetDriverVersion(char (&aBuff)[256], uint32_t DriverVersion, uint32_t VendorId)
3674 {
3675 // NVIDIA
3676 if(VendorId == 4318)
3677 {
3678 str_format(buffer: aBuff, buffer_size: std::size(aBuff), format: "%d.%d.%d.%d",
3679 (DriverVersion >> 22) & 0x3ff,
3680 (DriverVersion >> 14) & 0x0ff,
3681 (DriverVersion >> 6) & 0x0ff,
3682 (DriverVersion) & 0x003f);
3683 }
3684#ifdef CONF_FAMILY_WINDOWS
3685 // windows only
3686 else if(VendorId == 0x8086)
3687 {
3688 str_format(aBuff, std::size(aBuff),
3689 "%d.%d",
3690 (DriverVersion >> 14),
3691 (DriverVersion) & 0x3fff);
3692 }
3693#endif
3694 else
3695 {
3696 // Use Vulkan version conventions if vendor mapping is not available
3697 str_format(buffer: aBuff, buffer_size: std::size(aBuff),
3698 format: "%d.%d.%d",
3699 (DriverVersion >> 22),
3700 (DriverVersion >> 12) & 0x3ff,
3701 DriverVersion & 0xfff);
3702 }
3703
3704 return aBuff;
3705 }
3706
3707 [[nodiscard]] bool SelectGpu(char *pRendererName, char *pVendorName, char *pVersionName)
3708 {
3709 uint32_t DevicesCount = 0;
3710 auto Res = vkEnumeratePhysicalDevices(instance: m_VKInstance, pPhysicalDeviceCount: &DevicesCount, pPhysicalDevices: nullptr);
3711 if(Res != VK_SUCCESS)
3712 {
3713 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: CheckVulkanCriticalError(CallResult: Res));
3714 return false;
3715 }
3716 if(DevicesCount == 0)
3717 {
3718 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No vulkan compatible devices found.");
3719 return false;
3720 }
3721
3722 std::vector<VkPhysicalDevice> vDeviceList(DevicesCount);
3723 Res = vkEnumeratePhysicalDevices(instance: m_VKInstance, pPhysicalDeviceCount: &DevicesCount, pPhysicalDevices: vDeviceList.data());
3724 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
3725 {
3726 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: CheckVulkanCriticalError(CallResult: Res));
3727 return false;
3728 }
3729 if(DevicesCount == 0)
3730 {
3731 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED_MISSING_INTEGRATED_GPU_DRIVER, pWarning: "No vulkan compatible devices found.");
3732 return false;
3733 }
3734 // make sure to use the correct amount of devices available
3735 // the amount of physical devices can be smaller than the amount of devices reported
3736 // see vkEnumeratePhysicalDevices for details
3737 vDeviceList.resize(new_size: DevicesCount);
3738
3739 size_t Index = 0;
3740 std::vector<VkPhysicalDeviceProperties> vDevicePropList(vDeviceList.size());
3741 m_pGpuList->m_vGpus.reserve(n: vDeviceList.size());
3742
3743 size_t FoundDeviceIndex = 0;
3744
3745 STWGraphicGpu::ETWGraphicsGpuType AutoGpuType = STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INVALID;
3746
3747 bool IsAutoGpu = str_comp(a: g_Config.m_GfxGpuName, b: "auto") == 0;
3748
3749 bool UserSelectedGpuChosen = false;
3750 for(auto &CurDevice : vDeviceList)
3751 {
3752 vkGetPhysicalDeviceProperties(physicalDevice: CurDevice, pProperties: &(vDevicePropList[Index]));
3753
3754 auto &DeviceProp = vDevicePropList[Index];
3755
3756 STWGraphicGpu::ETWGraphicsGpuType GPUType = VKGPUTypeToGraphicsGpuType(VKGPUType: DeviceProp.deviceType);
3757
3758 int DevApiMajor = (int)VK_API_VERSION_MAJOR(DeviceProp.apiVersion);
3759 int DevApiMinor = (int)VK_API_VERSION_MINOR(DeviceProp.apiVersion);
3760 int DevApiPatch = (int)VK_API_VERSION_PATCH(DeviceProp.apiVersion);
3761
3762 auto IsDenied = CCommandProcessorFragment_Vulkan::IsGpuDenied(Vendor: DeviceProp.vendorID, DriverVersion: DeviceProp.driverVersion, ApiMajor: DevApiMajor, ApiMinor: DevApiMinor, ApiPatch: DevApiPatch);
3763 if((DevApiMajor > gs_BackendVulkanMajor || (DevApiMajor == gs_BackendVulkanMajor && DevApiMinor >= gs_BackendVulkanMinor)) && !IsDenied)
3764 {
3765 STWGraphicGpu::STWGraphicGpuItem NewGpu;
3766 str_copy(dst&: NewGpu.m_aName, src: DeviceProp.deviceName);
3767 NewGpu.m_GpuType = GPUType;
3768 m_pGpuList->m_vGpus.push_back(x: NewGpu);
3769
3770 // We always decide what the 'auto' GPU would be, even if user is forcing a GPU by name in config
3771 // Reminder: A worse GPU enumeration has a higher value than a better GPU enumeration, thus the '>'
3772 if(AutoGpuType > STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INTEGRATED)
3773 {
3774 str_copy(dst&: m_pGpuList->m_AutoGpu.m_aName, src: DeviceProp.deviceName);
3775 m_pGpuList->m_AutoGpu.m_GpuType = GPUType;
3776
3777 AutoGpuType = GPUType;
3778
3779 if(IsAutoGpu)
3780 FoundDeviceIndex = Index;
3781 }
3782 // We only select the first GPU that matches, because it comes first in the enumeration array, it's preferred by the system
3783 // Reminder: We can't break the cycle here if the name matches because we need to choose the best GPU for 'auto' mode
3784 if(!IsAutoGpu && !UserSelectedGpuChosen && str_comp(a: DeviceProp.deviceName, b: g_Config.m_GfxGpuName) == 0)
3785 {
3786 FoundDeviceIndex = Index;
3787 UserSelectedGpuChosen = true;
3788 }
3789 }
3790 Index++;
3791 }
3792
3793 if(m_pGpuList->m_vGpus.empty())
3794 {
3795 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED_NO_DEVICE_WITH_REQUIRED_VERSION, pWarning: "No devices with required vulkan version found.");
3796 return false;
3797 }
3798
3799 {
3800 auto &DeviceProp = vDevicePropList[FoundDeviceIndex];
3801
3802 int DevApiMajor = (int)VK_API_VERSION_MAJOR(DeviceProp.apiVersion);
3803 int DevApiMinor = (int)VK_API_VERSION_MINOR(DeviceProp.apiVersion);
3804 int DevApiPatch = (int)VK_API_VERSION_PATCH(DeviceProp.apiVersion);
3805
3806 str_copy(dst: pRendererName, src: DeviceProp.deviceName, dst_size: gs_GpuInfoStringSize);
3807 const char *pVendorNameStr = NULL;
3808 switch(DeviceProp.vendorID)
3809 {
3810 case 0x1002:
3811 pVendorNameStr = "AMD";
3812 break;
3813 case 0x1010:
3814 pVendorNameStr = "ImgTec";
3815 break;
3816 case 0x106B:
3817 pVendorNameStr = "Apple";
3818 break;
3819 case 0x10DE:
3820 pVendorNameStr = "NVIDIA";
3821 break;
3822 case 0x13B5:
3823 pVendorNameStr = "ARM";
3824 break;
3825 case 0x5143:
3826 pVendorNameStr = "Qualcomm";
3827 break;
3828 case 0x8086:
3829 pVendorNameStr = "INTEL";
3830 break;
3831 case 0x10005:
3832 pVendorNameStr = "Mesa";
3833 break;
3834 default:
3835 dbg_msg(sys: "vulkan", fmt: "unknown gpu vendor %u", DeviceProp.vendorID);
3836 pVendorNameStr = "unknown";
3837 break;
3838 }
3839
3840 char aBuff[256];
3841 str_copy(dst: pVendorName, src: pVendorNameStr, dst_size: gs_GpuInfoStringSize);
3842 str_format(buffer: pVersionName, buffer_size: gs_GpuInfoStringSize, format: "Vulkan %d.%d.%d (driver: %s)", DevApiMajor, DevApiMinor, DevApiPatch, GetDriverVersion(aBuff, DriverVersion: DeviceProp.driverVersion, VendorId: DeviceProp.vendorID));
3843
3844 // get important device limits
3845 m_NonCoherentMemAlignment = DeviceProp.limits.nonCoherentAtomSize;
3846 m_OptimalImageCopyMemAlignment = DeviceProp.limits.optimalBufferCopyOffsetAlignment;
3847 m_MaxTextureSize = DeviceProp.limits.maxImageDimension2D;
3848 m_MaxSamplerAnisotropy = DeviceProp.limits.maxSamplerAnisotropy;
3849
3850 m_MinUniformAlign = DeviceProp.limits.minUniformBufferOffsetAlignment;
3851 m_MaxMultiSample = DeviceProp.limits.framebufferColorSampleCounts;
3852
3853 if(IsVerbose())
3854 {
3855 dbg_msg(sys: "vulkan", fmt: "device prop: non-coherent align: %" PRIzu ", optimal image copy align: %" PRIzu ", max texture size: %u, max sampler anisotropy: %u", (size_t)m_NonCoherentMemAlignment, (size_t)m_OptimalImageCopyMemAlignment, m_MaxTextureSize, m_MaxSamplerAnisotropy);
3856 dbg_msg(sys: "vulkan", fmt: "device prop: min uniform align: %u, multi sample: %u", m_MinUniformAlign, (uint32_t)m_MaxMultiSample);
3857 }
3858 }
3859
3860 VkPhysicalDevice CurDevice = vDeviceList[FoundDeviceIndex];
3861
3862 uint32_t FamQueueCount = 0;
3863 vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice: CurDevice, pQueueFamilyPropertyCount: &FamQueueCount, pQueueFamilyProperties: nullptr);
3864 if(FamQueueCount == 0)
3865 {
3866 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No vulkan queue family properties found.");
3867 return false;
3868 }
3869
3870 std::vector<VkQueueFamilyProperties> vQueuePropList(FamQueueCount);
3871 vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice: CurDevice, pQueueFamilyPropertyCount: &FamQueueCount, pQueueFamilyProperties: vQueuePropList.data());
3872
3873 uint32_t QueueNodeIndex = std::numeric_limits<uint32_t>::max();
3874 for(uint32_t i = 0; i < FamQueueCount; i++)
3875 {
3876 if(vQueuePropList[i].queueCount > 0 && (vQueuePropList[i].queueFlags & VK_QUEUE_GRAPHICS_BIT))
3877 {
3878 QueueNodeIndex = i;
3879 }
3880 /*if(vQueuePropList[i].queueCount > 0 && (vQueuePropList[i].queueFlags & VK_QUEUE_COMPUTE_BIT))
3881 {
3882 QueueNodeIndex = i;
3883 }*/
3884 }
3885
3886 if(QueueNodeIndex == std::numeric_limits<uint32_t>::max())
3887 {
3888 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No vulkan queue found that matches the requirements: graphics queue.");
3889 return false;
3890 }
3891
3892 m_VKGPU = CurDevice;
3893 m_VKGraphicsQueueIndex = QueueNodeIndex;
3894 return true;
3895 }
3896
3897 [[nodiscard]] bool CreateLogicalDevice(const std::vector<std::string> &vVKLayers)
3898 {
3899 std::vector<const char *> vLayerCNames;
3900 vLayerCNames.reserve(n: vVKLayers.size());
3901 for(const auto &Layer : vVKLayers)
3902 vLayerCNames.emplace_back(args: Layer.c_str());
3903
3904 uint32_t DevPropCount = 0;
3905 if(vkEnumerateDeviceExtensionProperties(physicalDevice: m_VKGPU, NULL, pPropertyCount: &DevPropCount, NULL) != VK_SUCCESS)
3906 {
3907 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Querying logical device extension properties failed.");
3908 return false;
3909 }
3910
3911 std::vector<VkExtensionProperties> vDevPropList(DevPropCount);
3912 if(vkEnumerateDeviceExtensionProperties(physicalDevice: m_VKGPU, NULL, pPropertyCount: &DevPropCount, pProperties: vDevPropList.data()) != VK_SUCCESS)
3913 {
3914 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Querying logical device extension properties failed.");
3915 return false;
3916 }
3917
3918 std::vector<const char *> vDevPropCNames;
3919 std::set<std::string> OurDevExt = OurDeviceExtensions();
3920
3921 for(const auto &CurExtProp : vDevPropList)
3922 {
3923 if(OurDevExt.contains(x: std::string(CurExtProp.extensionName)))
3924 {
3925 vDevPropCNames.emplace_back(args: CurExtProp.extensionName);
3926 }
3927 }
3928
3929 VkDeviceQueueCreateInfo VKQueueCreateInfo;
3930 VKQueueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
3931 VKQueueCreateInfo.queueFamilyIndex = m_VKGraphicsQueueIndex;
3932 VKQueueCreateInfo.queueCount = 1;
3933 float QueuePrio = 1.0f;
3934 VKQueueCreateInfo.pQueuePriorities = &QueuePrio;
3935 VKQueueCreateInfo.pNext = NULL;
3936 VKQueueCreateInfo.flags = 0;
3937
3938 VkDeviceCreateInfo VKCreateInfo;
3939 VKCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
3940 VKCreateInfo.queueCreateInfoCount = 1;
3941 VKCreateInfo.pQueueCreateInfos = &VKQueueCreateInfo;
3942 VKCreateInfo.ppEnabledLayerNames = vLayerCNames.data();
3943 VKCreateInfo.enabledLayerCount = static_cast<uint32_t>(vLayerCNames.size());
3944 VKCreateInfo.ppEnabledExtensionNames = vDevPropCNames.data();
3945 VKCreateInfo.enabledExtensionCount = static_cast<uint32_t>(vDevPropCNames.size());
3946 VKCreateInfo.pNext = NULL;
3947 VKCreateInfo.pEnabledFeatures = NULL;
3948 VKCreateInfo.flags = 0;
3949
3950 if(vkCreateDevice(physicalDevice: m_VKGPU, pCreateInfo: &VKCreateInfo, pAllocator: nullptr, pDevice: &m_VKDevice) != VK_SUCCESS)
3951 {
3952 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Logical device could not be created.");
3953 return false;
3954 }
3955
3956 return true;
3957 }
3958
3959 [[nodiscard]] bool CreateSurface(SDL_Window *pWindow)
3960 {
3961 if(!SDL_Vulkan_CreateSurface(window: pWindow, instance: m_VKInstance, surface: &m_VKPresentSurface))
3962 {
3963 dbg_msg(sys: "vulkan", fmt: "error from sdl: %s", SDL_GetError());
3964 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating a vulkan surface for the SDL window failed.");
3965 return false;
3966 }
3967
3968 VkBool32 IsSupported = false;
3969 vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice: m_VKGPU, queueFamilyIndex: m_VKGraphicsQueueIndex, surface: m_VKPresentSurface, pSupported: &IsSupported);
3970 if(!IsSupported)
3971 {
3972 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface does not support presenting the framebuffer to a screen. (maybe the wrong GPU was selected?)");
3973 return false;
3974 }
3975
3976 return true;
3977 }
3978
3979 void DestroySurface()
3980 {
3981 vkDestroySurfaceKHR(instance: m_VKInstance, surface: m_VKPresentSurface, pAllocator: nullptr);
3982 }
3983
3984 [[nodiscard]] bool GetPresentationMode(VkPresentModeKHR &VKIOMode)
3985 {
3986 uint32_t PresentModeCount = 0;
3987 if(vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pPresentModeCount: &PresentModeCount, NULL) != VK_SUCCESS)
3988 {
3989 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface presentation modes could not be fetched.");
3990 return false;
3991 }
3992
3993 std::vector<VkPresentModeKHR> vPresentModeList(PresentModeCount);
3994 if(vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pPresentModeCount: &PresentModeCount, pPresentModes: vPresentModeList.data()) != VK_SUCCESS)
3995 {
3996 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface presentation modes could not be fetched.");
3997 return false;
3998 }
3999
4000 VKIOMode = g_Config.m_GfxVsync ? VK_PRESENT_MODE_FIFO_KHR : VK_PRESENT_MODE_IMMEDIATE_KHR;
4001 for(const auto &Mode : vPresentModeList)
4002 {
4003 if(Mode == VKIOMode)
4004 return true;
4005 }
4006
4007 dbg_msg(sys: "vulkan", fmt: "warning: requested presentation mode was not available. falling back to mailbox / fifo relaxed.");
4008 VKIOMode = g_Config.m_GfxVsync ? VK_PRESENT_MODE_FIFO_RELAXED_KHR : VK_PRESENT_MODE_MAILBOX_KHR;
4009 for(const auto &Mode : vPresentModeList)
4010 {
4011 if(Mode == VKIOMode)
4012 return true;
4013 }
4014
4015 dbg_msg(sys: "vulkan", fmt: "warning: requested presentation mode was not available. using first available.");
4016 if(PresentModeCount > 0)
4017 VKIOMode = vPresentModeList[0];
4018
4019 return true;
4020 }
4021
4022 [[nodiscard]] bool GetSurfaceProperties(VkSurfaceCapabilitiesKHR &VKSurfCapabilities)
4023 {
4024 if(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceCapabilities: &VKSurfCapabilities) != VK_SUCCESS)
4025 {
4026 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface capabilities could not be fetched.");
4027 return false;
4028 }
4029 return true;
4030 }
4031
4032 uint32_t GetNumberOfSwapImages(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4033 {
4034 uint32_t ImgNumber = VKCapabilities.minImageCount + 1;
4035 if(IsVerbose())
4036 {
4037 dbg_msg(sys: "vulkan", fmt: "minimal swap image count %u", VKCapabilities.minImageCount);
4038 }
4039 return (VKCapabilities.maxImageCount > 0 && ImgNumber > VKCapabilities.maxImageCount) ? VKCapabilities.maxImageCount : ImgNumber;
4040 }
4041
4042 SSwapImgViewportExtent GetSwapImageSize(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4043 {
4044 VkExtent2D RetSize = {.width: m_CanvasWidth, .height: m_CanvasHeight};
4045
4046 if(VKCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max())
4047 {
4048 RetSize.width = std::clamp<uint32_t>(val: RetSize.width, lo: VKCapabilities.minImageExtent.width, hi: VKCapabilities.maxImageExtent.width);
4049 RetSize.height = std::clamp<uint32_t>(val: RetSize.height, lo: VKCapabilities.minImageExtent.height, hi: VKCapabilities.maxImageExtent.height);
4050 }
4051 else
4052 {
4053 RetSize = VKCapabilities.currentExtent;
4054 }
4055
4056 VkExtent2D AutoViewportExtent = RetSize;
4057 bool UsesForcedViewport = false;
4058 // keep this in sync with graphics_threaded AdjustViewport's check
4059 if(AutoViewportExtent.height > 4 * AutoViewportExtent.width / 5)
4060 {
4061 AutoViewportExtent.height = 4 * AutoViewportExtent.width / 5;
4062 UsesForcedViewport = true;
4063 }
4064
4065 SSwapImgViewportExtent Ext;
4066 Ext.m_SwapImageViewport = RetSize;
4067 Ext.m_ForcedViewport = AutoViewportExtent;
4068 Ext.m_HasForcedViewport = UsesForcedViewport;
4069
4070 return Ext;
4071 }
4072
4073 [[nodiscard]] bool GetImageUsage(const VkSurfaceCapabilitiesKHR &VKCapabilities, VkImageUsageFlags &VKOutUsage)
4074 {
4075 std::vector<VkImageUsageFlags> vOurImgUsages = OurImageUsages();
4076 if(vOurImgUsages.empty())
4077 {
4078 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Framebuffer image attachment types not supported.");
4079 return false;
4080 }
4081
4082 VKOutUsage = vOurImgUsages[0];
4083
4084 for(const auto &ImgUsage : vOurImgUsages)
4085 {
4086 VkImageUsageFlags ImgUsageFlags = ImgUsage & VKCapabilities.supportedUsageFlags;
4087 if(ImgUsageFlags != ImgUsage)
4088 {
4089 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Framebuffer image attachment types not supported.");
4090 return false;
4091 }
4092
4093 VKOutUsage = (VKOutUsage | ImgUsage);
4094 }
4095
4096 return true;
4097 }
4098
4099 VkSurfaceTransformFlagBitsKHR GetTransform(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4100 {
4101 if(VKCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
4102 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
4103 return VKCapabilities.currentTransform;
4104 }
4105
4106 [[nodiscard]] bool GetFormat()
4107 {
4108 uint32_t SurfFormats = 0;
4109 VkResult Res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceFormatCount: &SurfFormats, pSurfaceFormats: nullptr);
4110 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
4111 {
4112 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface format fetching failed.");
4113 return false;
4114 }
4115
4116 std::vector<VkSurfaceFormatKHR> vSurfFormatList(SurfFormats);
4117 Res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceFormatCount: &SurfFormats, pSurfaceFormats: vSurfFormatList.data());
4118 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
4119 {
4120 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface format fetching failed.");
4121 return false;
4122 }
4123
4124 if(Res == VK_INCOMPLETE)
4125 {
4126 dbg_msg(sys: "vulkan", fmt: "warning: not all surface formats are requestable with your current settings.");
4127 }
4128
4129 if(vSurfFormatList.size() == 1 && vSurfFormatList[0].format == VK_FORMAT_UNDEFINED)
4130 {
4131 m_VKSurfFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
4132 m_VKSurfFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
4133 dbg_msg(sys: "vulkan", fmt: "warning: surface format was undefined. This can potentially cause bugs.");
4134 return true;
4135 }
4136
4137 for(const auto &FindFormat : vSurfFormatList)
4138 {
4139 if(FindFormat.format == VK_FORMAT_B8G8R8A8_UNORM && FindFormat.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
4140 {
4141 m_VKSurfFormat = FindFormat;
4142 return true;
4143 }
4144 else if(FindFormat.format == VK_FORMAT_R8G8B8A8_UNORM && FindFormat.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
4145 {
4146 m_VKSurfFormat = FindFormat;
4147 return true;
4148 }
4149 }
4150
4151 dbg_msg(sys: "vulkan", fmt: "warning: surface format was not RGBA(or variants of it). This can potentially cause weird looking images(too bright etc.).");
4152 m_VKSurfFormat = vSurfFormatList[0];
4153 return true;
4154 }
4155
4156 [[nodiscard]] bool CreateSwapChain(VkSwapchainKHR &OldSwapChain)
4157 {
4158 VkSurfaceCapabilitiesKHR VKSurfCap;
4159 if(!GetSurfaceProperties(VKSurfCapabilities&: VKSurfCap))
4160 return false;
4161
4162 VkPresentModeKHR PresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
4163 if(!GetPresentationMode(VKIOMode&: PresentMode))
4164 return false;
4165
4166 uint32_t SwapImgCount = GetNumberOfSwapImages(VKCapabilities: VKSurfCap);
4167
4168 m_VKSwapImgAndViewportExtent = GetSwapImageSize(VKCapabilities: VKSurfCap);
4169
4170 VkImageUsageFlags UsageFlags;
4171 if(!GetImageUsage(VKCapabilities: VKSurfCap, VKOutUsage&: UsageFlags))
4172 return false;
4173
4174 VkSurfaceTransformFlagBitsKHR TransformFlagBits = GetTransform(VKCapabilities: VKSurfCap);
4175
4176 if(!GetFormat())
4177 return false;
4178
4179 OldSwapChain = m_VKSwapChain;
4180
4181 VkSwapchainCreateInfoKHR SwapInfo;
4182 SwapInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
4183 SwapInfo.pNext = nullptr;
4184 SwapInfo.flags = 0;
4185 SwapInfo.surface = m_VKPresentSurface;
4186 SwapInfo.minImageCount = SwapImgCount;
4187 SwapInfo.imageFormat = m_VKSurfFormat.format;
4188 SwapInfo.imageColorSpace = m_VKSurfFormat.colorSpace;
4189 SwapInfo.imageExtent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
4190 SwapInfo.imageArrayLayers = 1;
4191 SwapInfo.imageUsage = UsageFlags;
4192 SwapInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
4193 SwapInfo.queueFamilyIndexCount = 0;
4194 SwapInfo.pQueueFamilyIndices = nullptr;
4195 SwapInfo.preTransform = TransformFlagBits;
4196 SwapInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
4197 SwapInfo.presentMode = PresentMode;
4198 SwapInfo.clipped = true;
4199 SwapInfo.oldSwapchain = OldSwapChain;
4200
4201 m_VKSwapChain = VK_NULL_HANDLE;
4202 VkResult SwapchainCreateRes = vkCreateSwapchainKHR(device: m_VKDevice, pCreateInfo: &SwapInfo, pAllocator: nullptr, pSwapchain: &m_VKSwapChain);
4203 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: SwapchainCreateRes);
4204 if(pCritErrorMsg != nullptr)
4205 {
4206 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the swap chain failed.", pErrStrExtra: pCritErrorMsg);
4207 return false;
4208 }
4209 else if(SwapchainCreateRes == VK_ERROR_NATIVE_WINDOW_IN_USE_KHR)
4210 return false;
4211
4212 return true;
4213 }
4214
4215 void DestroySwapChain(bool ForceDestroy)
4216 {
4217 if(ForceDestroy)
4218 {
4219 vkDestroySwapchainKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pAllocator: nullptr);
4220 m_VKSwapChain = VK_NULL_HANDLE;
4221 }
4222 }
4223
4224 [[nodiscard]] bool GetSwapChainImageHandles()
4225 {
4226 uint32_t ImgCount = 0;
4227 if(vkGetSwapchainImagesKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pSwapchainImageCount: &ImgCount, pSwapchainImages: nullptr) != VK_SUCCESS)
4228 {
4229 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get swap chain images.");
4230 return false;
4231 }
4232
4233 m_SwapChainImageCount = ImgCount;
4234
4235 m_vSwapChainImages.resize(new_size: ImgCount);
4236 if(vkGetSwapchainImagesKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pSwapchainImageCount: &ImgCount, pSwapchainImages: m_vSwapChainImages.data()) != VK_SUCCESS)
4237 {
4238 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get swap chain images.");
4239 return false;
4240 }
4241
4242 return true;
4243 }
4244
4245 void ClearSwapChainImageHandles()
4246 {
4247 m_vSwapChainImages.clear();
4248 }
4249
4250 void GetDeviceQueue()
4251 {
4252 vkGetDeviceQueue(device: m_VKDevice, queueFamilyIndex: m_VKGraphicsQueueIndex, queueIndex: 0, pQueue: &m_VKGraphicsQueue);
4253 vkGetDeviceQueue(device: m_VKDevice, queueFamilyIndex: m_VKGraphicsQueueIndex, queueIndex: 0, pQueue: &m_VKPresentQueue);
4254 }
4255
4256#ifdef VK_EXT_debug_utils
4257 static VKAPI_ATTR VkBool32 VKAPI_CALL VKDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT MessageSeverity, VkDebugUtilsMessageTypeFlagsEXT MessageType, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData)
4258 {
4259 if((MessageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0)
4260 {
4261 dbg_msg(sys: "vulkan_debug", fmt: "validation error: %s", pCallbackData->pMessage);
4262 }
4263 else
4264 {
4265 dbg_msg(sys: "vulkan_debug", fmt: "%s", pCallbackData->pMessage);
4266 }
4267
4268 return VK_FALSE;
4269 }
4270
4271 VkResult CreateDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pDebugMessenger)
4272 {
4273 auto pfnVulkanCreateDebugUtilsFunction = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance: m_VKInstance, pName: "vkCreateDebugUtilsMessengerEXT");
4274 if(pfnVulkanCreateDebugUtilsFunction != nullptr)
4275 {
4276 return pfnVulkanCreateDebugUtilsFunction(m_VKInstance, pCreateInfo, pAllocator, pDebugMessenger);
4277 }
4278 else
4279 {
4280 return VK_ERROR_EXTENSION_NOT_PRESENT;
4281 }
4282 }
4283
4284 void DestroyDebugUtilsMessengerEXT(VkDebugUtilsMessengerEXT &DebugMessenger)
4285 {
4286 auto pfnVulkanDestroyDebugUtilsFunction = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance: m_VKInstance, pName: "vkDestroyDebugUtilsMessengerEXT");
4287 if(pfnVulkanDestroyDebugUtilsFunction != nullptr)
4288 {
4289 pfnVulkanDestroyDebugUtilsFunction(m_VKInstance, DebugMessenger, nullptr);
4290 }
4291 }
4292#endif
4293
4294 void SetupDebugCallback()
4295 {
4296#ifdef VK_EXT_debug_utils
4297 VkDebugUtilsMessengerCreateInfoEXT CreateInfo = {};
4298 CreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
4299 CreateInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
4300 CreateInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; // | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT <- too annoying
4301 CreateInfo.pfnUserCallback = VKDebugCallback;
4302
4303 if(CreateDebugUtilsMessengerEXT(pCreateInfo: &CreateInfo, pAllocator: nullptr, pDebugMessenger: &m_DebugMessenger) != VK_SUCCESS)
4304 {
4305 m_DebugMessenger = VK_NULL_HANDLE;
4306 dbg_msg(sys: "vulkan", fmt: "didn't find vulkan debug layer.");
4307 }
4308 else
4309 {
4310 dbg_msg(sys: "vulkan", fmt: "enabled vulkan debug context.");
4311 }
4312#endif
4313 }
4314
4315 void UnregisterDebugCallback()
4316 {
4317#ifdef VK_EXT_debug_utils
4318 if(m_DebugMessenger != VK_NULL_HANDLE)
4319 DestroyDebugUtilsMessengerEXT(DebugMessenger&: m_DebugMessenger);
4320#endif
4321 }
4322
4323 [[nodiscard]] bool CreateImageViews()
4324 {
4325 m_vSwapChainImageViewList.resize(new_size: m_SwapChainImageCount);
4326
4327 for(size_t i = 0; i < m_SwapChainImageCount; i++)
4328 {
4329 VkImageViewCreateInfo CreateInfo{};
4330 CreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
4331 CreateInfo.image = m_vSwapChainImages[i];
4332 CreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
4333 CreateInfo.format = m_VKSurfFormat.format;
4334 CreateInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
4335 CreateInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
4336 CreateInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
4337 CreateInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
4338 CreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4339 CreateInfo.subresourceRange.baseMipLevel = 0;
4340 CreateInfo.subresourceRange.levelCount = 1;
4341 CreateInfo.subresourceRange.baseArrayLayer = 0;
4342 CreateInfo.subresourceRange.layerCount = 1;
4343
4344 if(vkCreateImageView(device: m_VKDevice, pCreateInfo: &CreateInfo, pAllocator: nullptr, pView: &m_vSwapChainImageViewList[i]) != VK_SUCCESS)
4345 {
4346 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not create image views for the swap chain framebuffers.");
4347 return false;
4348 }
4349 }
4350
4351 return true;
4352 }
4353
4354 void DestroyImageViews()
4355 {
4356 for(auto &ImageView : m_vSwapChainImageViewList)
4357 {
4358 vkDestroyImageView(device: m_VKDevice, imageView: ImageView, pAllocator: nullptr);
4359 }
4360
4361 m_vSwapChainImageViewList.clear();
4362 }
4363
4364 [[nodiscard]] bool CreateMultiSamplerImageAttachments()
4365 {
4366 m_vSwapChainMultiSamplingImages.resize(new_size: m_SwapChainImageCount);
4367 if(HasMultiSampling())
4368 {
4369 for(size_t i = 0; i < m_SwapChainImageCount; ++i)
4370 {
4371 if(!CreateImage(Width: m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width, Height: m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height, Depth: 1, MipMapLevelCount: 1, Format: m_VKSurfFormat.format, Tiling: VK_IMAGE_TILING_OPTIMAL, Image&: m_vSwapChainMultiSamplingImages[i].m_Image, ImageMemory&: m_vSwapChainMultiSamplingImages[i].m_ImgMem, ImageUsage: VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
4372 return false;
4373 m_vSwapChainMultiSamplingImages[i].m_ImgView = CreateImageView(Image: m_vSwapChainMultiSamplingImages[i].m_Image, Format: m_VKSurfFormat.format, ViewType: VK_IMAGE_VIEW_TYPE_2D, Depth: 1, MipMapLevelCount: 1);
4374 }
4375 }
4376
4377 return true;
4378 }
4379
4380 void DestroyMultiSamplerImageAttachments()
4381 {
4382 if(HasMultiSampling())
4383 {
4384 m_vSwapChainMultiSamplingImages.resize(new_size: m_SwapChainImageCount);
4385 for(size_t i = 0; i < m_SwapChainImageCount; ++i)
4386 {
4387 vkDestroyImage(device: m_VKDevice, image: m_vSwapChainMultiSamplingImages[i].m_Image, pAllocator: nullptr);
4388 vkDestroyImageView(device: m_VKDevice, imageView: m_vSwapChainMultiSamplingImages[i].m_ImgView, pAllocator: nullptr);
4389 FreeImageMemBlock(Block&: m_vSwapChainMultiSamplingImages[i].m_ImgMem);
4390 }
4391 }
4392 m_vSwapChainMultiSamplingImages.clear();
4393 }
4394
4395 [[nodiscard]] bool CreateRenderPass(bool ClearAttachments)
4396 {
4397 bool HasMultiSamplingTargets = HasMultiSampling();
4398 VkAttachmentDescription MultiSamplingColorAttachment{};
4399 MultiSamplingColorAttachment.format = m_VKSurfFormat.format;
4400 MultiSamplingColorAttachment.samples = GetSampleCount();
4401 MultiSamplingColorAttachment.loadOp = ClearAttachments ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4402 MultiSamplingColorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4403 MultiSamplingColorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4404 MultiSamplingColorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4405 MultiSamplingColorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4406 MultiSamplingColorAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4407
4408 VkAttachmentDescription ColorAttachment{};
4409 ColorAttachment.format = m_VKSurfFormat.format;
4410 ColorAttachment.samples = VK_SAMPLE_COUNT_1_BIT;
4411 ColorAttachment.loadOp = ClearAttachments && !HasMultiSamplingTargets ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4412 ColorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
4413 ColorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4414 ColorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4415 ColorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4416 ColorAttachment.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
4417
4418 VkAttachmentReference MultiSamplingColorAttachmentRef{};
4419 MultiSamplingColorAttachmentRef.attachment = 0;
4420 MultiSamplingColorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4421
4422 VkAttachmentReference ColorAttachmentRef{};
4423 ColorAttachmentRef.attachment = HasMultiSamplingTargets ? 1 : 0;
4424 ColorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4425
4426 VkSubpassDescription Subpass{};
4427 Subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
4428 Subpass.colorAttachmentCount = 1;
4429 Subpass.pColorAttachments = HasMultiSamplingTargets ? &MultiSamplingColorAttachmentRef : &ColorAttachmentRef;
4430 Subpass.pResolveAttachments = HasMultiSamplingTargets ? &ColorAttachmentRef : nullptr;
4431
4432 std::array<VkAttachmentDescription, 2> aAttachments;
4433 aAttachments[0] = MultiSamplingColorAttachment;
4434 aAttachments[1] = ColorAttachment;
4435
4436 VkSubpassDependency Dependency{};
4437 Dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
4438 Dependency.dstSubpass = 0;
4439 Dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
4440 Dependency.srcAccessMask = 0;
4441 Dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
4442 Dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
4443
4444 VkRenderPassCreateInfo CreateRenderPassInfo{};
4445 CreateRenderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
4446 CreateRenderPassInfo.attachmentCount = HasMultiSamplingTargets ? 2 : 1;
4447 CreateRenderPassInfo.pAttachments = HasMultiSamplingTargets ? aAttachments.data() : aAttachments.data() + 1;
4448 CreateRenderPassInfo.subpassCount = 1;
4449 CreateRenderPassInfo.pSubpasses = &Subpass;
4450 CreateRenderPassInfo.dependencyCount = 1;
4451 CreateRenderPassInfo.pDependencies = &Dependency;
4452
4453 if(vkCreateRenderPass(device: m_VKDevice, pCreateInfo: &CreateRenderPassInfo, pAllocator: nullptr, pRenderPass: &m_VKRenderPass) != VK_SUCCESS)
4454 {
4455 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the render pass failed.");
4456 return false;
4457 }
4458
4459 return true;
4460 }
4461
4462 void DestroyRenderPass()
4463 {
4464 vkDestroyRenderPass(device: m_VKDevice, renderPass: m_VKRenderPass, pAllocator: nullptr);
4465 }
4466
4467 [[nodiscard]] bool CreateFramebuffers()
4468 {
4469 m_vFramebufferList.resize(new_size: m_SwapChainImageCount);
4470
4471 for(size_t i = 0; i < m_SwapChainImageCount; i++)
4472 {
4473 std::array<VkImageView, 2> aAttachments = {
4474 m_vSwapChainMultiSamplingImages[i].m_ImgView,
4475 m_vSwapChainImageViewList[i]};
4476
4477 bool HasMultiSamplingTargets = HasMultiSampling();
4478
4479 VkFramebufferCreateInfo FramebufferInfo{};
4480 FramebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
4481 FramebufferInfo.renderPass = m_VKRenderPass;
4482 FramebufferInfo.attachmentCount = HasMultiSamplingTargets ? aAttachments.size() : aAttachments.size() - 1;
4483 FramebufferInfo.pAttachments = HasMultiSamplingTargets ? aAttachments.data() : aAttachments.data() + 1;
4484 FramebufferInfo.width = m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
4485 FramebufferInfo.height = m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
4486 FramebufferInfo.layers = 1;
4487
4488 if(vkCreateFramebuffer(device: m_VKDevice, pCreateInfo: &FramebufferInfo, pAllocator: nullptr, pFramebuffer: &m_vFramebufferList[i]) != VK_SUCCESS)
4489 {
4490 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the framebuffers failed.");
4491 return false;
4492 }
4493 }
4494
4495 return true;
4496 }
4497
4498 void DestroyFramebuffers()
4499 {
4500 for(auto &FrameBuffer : m_vFramebufferList)
4501 {
4502 vkDestroyFramebuffer(device: m_VKDevice, framebuffer: FrameBuffer, pAllocator: nullptr);
4503 }
4504
4505 m_vFramebufferList.clear();
4506 }
4507
4508 [[nodiscard]] bool CreateShaderModule(const std::vector<uint8_t> &vCode, VkShaderModule &ShaderModule)
4509 {
4510 VkShaderModuleCreateInfo CreateInfo{};
4511 CreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
4512 CreateInfo.codeSize = vCode.size();
4513 CreateInfo.pCode = (const uint32_t *)(vCode.data());
4514
4515 if(vkCreateShaderModule(device: m_VKDevice, pCreateInfo: &CreateInfo, pAllocator: nullptr, pShaderModule: &ShaderModule) != VK_SUCCESS)
4516 {
4517 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Shader module was not created.");
4518 return false;
4519 }
4520
4521 return true;
4522 }
4523
4524 [[nodiscard]] bool CreateDescriptorSetLayouts()
4525 {
4526 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4527 SamplerLayoutBinding.binding = 0;
4528 SamplerLayoutBinding.descriptorCount = 1;
4529 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
4530 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4531 SamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
4532
4533 std::array<VkDescriptorSetLayoutBinding, 1> aBindings = {SamplerLayoutBinding};
4534 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4535 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4536 LayoutInfo.bindingCount = aBindings.size();
4537 LayoutInfo.pBindings = aBindings.data();
4538
4539 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_StandardTexturedDescriptorSetLayout) != VK_SUCCESS)
4540 {
4541 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4542 return false;
4543 }
4544
4545 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_Standard3DTexturedDescriptorSetLayout) != VK_SUCCESS)
4546 {
4547 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4548 return false;
4549 }
4550 return true;
4551 }
4552
4553 void DestroyDescriptorSetLayouts()
4554 {
4555 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_StandardTexturedDescriptorSetLayout, pAllocator: nullptr);
4556 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_Standard3DTexturedDescriptorSetLayout, pAllocator: nullptr);
4557 }
4558
4559 [[nodiscard]] bool LoadShader(const char *pFilename, std::vector<uint8_t> *&pvShaderData)
4560 {
4561 auto ShaderFileIterator = m_ShaderFiles.find(x: pFilename);
4562 if(ShaderFileIterator == m_ShaderFiles.end())
4563 {
4564 void *pShaderBuff;
4565 unsigned FileSize;
4566 if(!m_pStorage->ReadFile(pFilename, Type: IStorage::TYPE_ALL, ppResult: &pShaderBuff, pResultLen: &FileSize))
4567 return false;
4568
4569 std::vector<uint8_t> vShaderBuff;
4570 vShaderBuff.resize(new_size: FileSize);
4571 mem_copy(dest: vShaderBuff.data(), source: pShaderBuff, size: FileSize);
4572 free(ptr: pShaderBuff);
4573
4574 ShaderFileIterator = m_ShaderFiles.insert(x: {pFilename, {.m_vBinary: std::move(vShaderBuff)}}).first;
4575 }
4576
4577 pvShaderData = &ShaderFileIterator->second.m_vBinary;
4578
4579 return true;
4580 }
4581
4582 [[nodiscard]] bool CreateShaders(const char *pVertName, const char *pFragName, VkPipelineShaderStageCreateInfo (&aShaderStages)[2], SShaderModule &ShaderModule)
4583 {
4584 bool ShaderLoaded = true;
4585
4586 std::vector<uint8_t> *pvVertBuff;
4587 std::vector<uint8_t> *pvFragBuff;
4588 ShaderLoaded &= LoadShader(pFilename: pVertName, pvShaderData&: pvVertBuff);
4589 ShaderLoaded &= LoadShader(pFilename: pFragName, pvShaderData&: pvFragBuff);
4590
4591 ShaderModule.m_VKDevice = m_VKDevice;
4592
4593 if(!ShaderLoaded)
4594 {
4595 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "A shader file could not load correctly.");
4596 return false;
4597 }
4598
4599 if(!CreateShaderModule(vCode: *pvVertBuff, ShaderModule&: ShaderModule.m_VertShaderModule))
4600 return false;
4601
4602 if(!CreateShaderModule(vCode: *pvFragBuff, ShaderModule&: ShaderModule.m_FragShaderModule))
4603 return false;
4604
4605 VkPipelineShaderStageCreateInfo &VertShaderStageInfo = aShaderStages[0];
4606 VertShaderStageInfo = {};
4607 VertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
4608 VertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
4609 VertShaderStageInfo.module = ShaderModule.m_VertShaderModule;
4610 VertShaderStageInfo.pName = "main";
4611
4612 VkPipelineShaderStageCreateInfo &FragShaderStageInfo = aShaderStages[1];
4613 FragShaderStageInfo = {};
4614 FragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
4615 FragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
4616 FragShaderStageInfo.module = ShaderModule.m_FragShaderModule;
4617 FragShaderStageInfo.pName = "main";
4618 return true;
4619 }
4620
4621 bool GetStandardPipelineInfo(VkPipelineInputAssemblyStateCreateInfo &InputAssembly,
4622 VkViewport &Viewport,
4623 VkRect2D &Scissor,
4624 VkPipelineViewportStateCreateInfo &ViewportState,
4625 VkPipelineRasterizationStateCreateInfo &Rasterizer,
4626 VkPipelineMultisampleStateCreateInfo &Multisampling,
4627 VkPipelineColorBlendAttachmentState &ColorBlendAttachment,
4628 VkPipelineColorBlendStateCreateInfo &ColorBlending) const
4629 {
4630 InputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
4631 InputAssembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
4632 InputAssembly.primitiveRestartEnable = VK_FALSE;
4633
4634 Viewport.x = 0.0f;
4635 Viewport.y = 0.0f;
4636 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
4637 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
4638 Viewport.minDepth = 0.0f;
4639 Viewport.maxDepth = 1.0f;
4640
4641 Scissor.offset = {.x: 0, .y: 0};
4642 Scissor.extent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
4643
4644 ViewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
4645 ViewportState.viewportCount = 1;
4646 ViewportState.pViewports = &Viewport;
4647 ViewportState.scissorCount = 1;
4648 ViewportState.pScissors = &Scissor;
4649
4650 Rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
4651 Rasterizer.depthClampEnable = VK_FALSE;
4652 Rasterizer.rasterizerDiscardEnable = VK_FALSE;
4653 Rasterizer.polygonMode = VK_POLYGON_MODE_FILL;
4654 Rasterizer.lineWidth = 1.0f;
4655 Rasterizer.cullMode = VK_CULL_MODE_NONE;
4656 Rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE;
4657 Rasterizer.depthBiasEnable = VK_FALSE;
4658
4659 Multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
4660 Multisampling.sampleShadingEnable = VK_FALSE;
4661 Multisampling.rasterizationSamples = GetSampleCount();
4662
4663 ColorBlendAttachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
4664 ColorBlendAttachment.blendEnable = VK_TRUE;
4665
4666 ColorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
4667 ColorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
4668 ColorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD;
4669 ColorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
4670 ColorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
4671 ColorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
4672
4673 ColorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
4674 ColorBlending.logicOpEnable = VK_FALSE;
4675 ColorBlending.logicOp = VK_LOGIC_OP_COPY;
4676 ColorBlending.attachmentCount = 1;
4677 ColorBlending.pAttachments = &ColorBlendAttachment;
4678 ColorBlending.blendConstants[0] = 0.0f;
4679 ColorBlending.blendConstants[1] = 0.0f;
4680 ColorBlending.blendConstants[2] = 0.0f;
4681 ColorBlending.blendConstants[3] = 0.0f;
4682
4683 return true;
4684 }
4685
4686 template<bool ForceRequireDescriptors, size_t ArraySize, size_t DescrArraySize, size_t PushArraySize>
4687 [[nodiscard]] bool CreateGraphicsPipeline(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, uint32_t Stride, std::array<VkVertexInputAttributeDescription, ArraySize> &aInputAttr,
4688 std::array<VkDescriptorSetLayout, DescrArraySize> &aSetLayouts, std::array<VkPushConstantRange, PushArraySize> &aPushConstants, EVulkanBackendTextureModes TexMode,
4689 EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode, bool IsLinePrim = false)
4690 {
4691 VkPipelineShaderStageCreateInfo aShaderStages[2];
4692 SShaderModule Module;
4693 if(!CreateShaders(pVertName, pFragName, aShaderStages, ShaderModule&: Module))
4694 return false;
4695
4696 bool HasSampler = TexMode == VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
4697
4698 VkPipelineVertexInputStateCreateInfo VertexInputInfo{};
4699 VertexInputInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
4700 VkVertexInputBindingDescription BindingDescription{};
4701 BindingDescription.binding = 0;
4702 BindingDescription.stride = Stride;
4703 BindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
4704
4705 VertexInputInfo.vertexBindingDescriptionCount = 1;
4706 VertexInputInfo.vertexAttributeDescriptionCount = aInputAttr.size();
4707 VertexInputInfo.pVertexBindingDescriptions = &BindingDescription;
4708 VertexInputInfo.pVertexAttributeDescriptions = aInputAttr.data();
4709
4710 VkPipelineInputAssemblyStateCreateInfo InputAssembly{};
4711 VkViewport Viewport{};
4712 VkRect2D Scissor{};
4713 VkPipelineViewportStateCreateInfo ViewportState{};
4714 VkPipelineRasterizationStateCreateInfo Rasterizer{};
4715 VkPipelineMultisampleStateCreateInfo Multisampling{};
4716 VkPipelineColorBlendAttachmentState ColorBlendAttachment{};
4717 VkPipelineColorBlendStateCreateInfo ColorBlending{};
4718
4719 GetStandardPipelineInfo(InputAssembly, Viewport, Scissor, ViewportState, Rasterizer, Multisampling, ColorBlendAttachment, ColorBlending);
4720 InputAssembly.topology = IsLinePrim ? VK_PRIMITIVE_TOPOLOGY_LINE_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
4721
4722 VkPipelineLayoutCreateInfo PipelineLayoutInfo{};
4723 PipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
4724 PipelineLayoutInfo.setLayoutCount = (HasSampler || ForceRequireDescriptors) ? aSetLayouts.size() : 0;
4725 PipelineLayoutInfo.pSetLayouts = (HasSampler || ForceRequireDescriptors) && !aSetLayouts.empty() ? aSetLayouts.data() : nullptr;
4726
4727 PipelineLayoutInfo.pushConstantRangeCount = aPushConstants.size();
4728 PipelineLayoutInfo.pPushConstantRanges = !aPushConstants.empty() ? aPushConstants.data() : nullptr;
4729
4730 VkPipelineLayout &PipeLayout = GetPipeLayout(Container&: PipeContainer, IsTextured: HasSampler, BlendModeIndex: size_t(BlendMode), DynamicIndex: size_t(DynamicMode));
4731 VkPipeline &Pipeline = GetPipeline(Container&: PipeContainer, IsTextured: HasSampler, BlendModeIndex: size_t(BlendMode), DynamicIndex: size_t(DynamicMode));
4732
4733 if(vkCreatePipelineLayout(device: m_VKDevice, pCreateInfo: &PipelineLayoutInfo, pAllocator: nullptr, pPipelineLayout: &PipeLayout) != VK_SUCCESS)
4734 {
4735 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating pipeline layout failed.");
4736 return false;
4737 }
4738
4739 VkGraphicsPipelineCreateInfo PipelineInfo{};
4740 PipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
4741 PipelineInfo.stageCount = 2;
4742 PipelineInfo.pStages = aShaderStages;
4743 PipelineInfo.pVertexInputState = &VertexInputInfo;
4744 PipelineInfo.pInputAssemblyState = &InputAssembly;
4745 PipelineInfo.pViewportState = &ViewportState;
4746 PipelineInfo.pRasterizationState = &Rasterizer;
4747 PipelineInfo.pMultisampleState = &Multisampling;
4748 PipelineInfo.pColorBlendState = &ColorBlending;
4749 PipelineInfo.layout = PipeLayout;
4750 PipelineInfo.renderPass = m_VKRenderPass;
4751 PipelineInfo.subpass = 0;
4752 PipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
4753
4754 std::array<VkDynamicState, 2> aDynamicStates = {
4755 VK_DYNAMIC_STATE_VIEWPORT,
4756 VK_DYNAMIC_STATE_SCISSOR,
4757 };
4758
4759 VkPipelineDynamicStateCreateInfo DynamicStateCreate{};
4760 DynamicStateCreate.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
4761 DynamicStateCreate.dynamicStateCount = aDynamicStates.size();
4762 DynamicStateCreate.pDynamicStates = aDynamicStates.data();
4763
4764 if(DynamicMode == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT)
4765 {
4766 PipelineInfo.pDynamicState = &DynamicStateCreate;
4767 }
4768
4769 if(vkCreateGraphicsPipelines(device: m_VKDevice, VK_NULL_HANDLE, createInfoCount: 1, pCreateInfos: &PipelineInfo, pAllocator: nullptr, pPipelines: &Pipeline) != VK_SUCCESS)
4770 {
4771 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the graphic pipeline failed.");
4772 return false;
4773 }
4774
4775 return true;
4776 }
4777
4778 [[nodiscard]] bool CreateStandardGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode, bool IsLinePrim)
4779 {
4780 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4781
4782 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4783 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4784 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4785
4786 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_StandardTexturedDescriptorSetLayout};
4787
4788 std::array<VkPushConstantRange, 1> aPushConstants{};
4789 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGPos)};
4790
4791 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode, IsLinePrim);
4792 }
4793
4794 [[nodiscard]] bool CreateStandardGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler, bool IsLinePipe)
4795 {
4796 bool Ret = true;
4797
4798 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4799
4800 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4801 {
4802 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4803 {
4804 Ret &= CreateStandardGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: IsLinePipe ? m_StandardLinePipeline : m_StandardPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j), IsLinePrim: IsLinePipe);
4805 }
4806 }
4807
4808 return Ret;
4809 }
4810
4811 [[nodiscard]] bool CreateStandard3DGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4812 {
4813 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4814
4815 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4816 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * 2};
4817 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R32G32B32_SFLOAT, .offset: sizeof(float) * 2 + sizeof(uint8_t) * 4};
4818
4819 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_Standard3DTexturedDescriptorSetLayout};
4820
4821 std::array<VkPushConstantRange, 1> aPushConstants{};
4822 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGPos)};
4823
4824 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * 2 + sizeof(uint8_t) * 4 + sizeof(float) * 3, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4825 }
4826
4827 [[nodiscard]] bool CreateStandard3DGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler)
4828 {
4829 bool Ret = true;
4830
4831 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4832
4833 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4834 {
4835 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4836 {
4837 Ret &= CreateStandard3DGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_Standard3DPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4838 }
4839 }
4840
4841 return Ret;
4842 }
4843
4844 [[nodiscard]] bool CreateTextDescriptorSetLayout()
4845 {
4846 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4847 SamplerLayoutBinding.binding = 0;
4848 SamplerLayoutBinding.descriptorCount = 1;
4849 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
4850 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4851 SamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
4852
4853 auto SamplerLayoutBinding2 = SamplerLayoutBinding;
4854 SamplerLayoutBinding2.binding = 1;
4855
4856 std::array<VkDescriptorSetLayoutBinding, 2> aBindings = {SamplerLayoutBinding, SamplerLayoutBinding2};
4857 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4858 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4859 LayoutInfo.bindingCount = aBindings.size();
4860 LayoutInfo.pBindings = aBindings.data();
4861
4862 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_TextDescriptorSetLayout) != VK_SUCCESS)
4863 {
4864 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4865 return false;
4866 }
4867
4868 return true;
4869 }
4870
4871 void DestroyTextDescriptorSetLayout()
4872 {
4873 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_TextDescriptorSetLayout, pAllocator: nullptr);
4874 }
4875
4876 [[nodiscard]] bool CreateTextGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4877 {
4878 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4879 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4880 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4881 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4882
4883 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_TextDescriptorSetLayout};
4884
4885 std::array<VkPushConstantRange, 2> aPushConstants{};
4886 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGTextPos)};
4887 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformGTextPos) + sizeof(SUniformTextGFragmentOffset), .size: sizeof(SUniformTextGFragmentConstants)};
4888
4889 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4890 }
4891
4892 [[nodiscard]] bool CreateTextGraphicsPipeline(const char *pVertName, const char *pFragName)
4893 {
4894 bool Ret = true;
4895
4896 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
4897
4898 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4899 {
4900 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4901 {
4902 Ret &= CreateTextGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_TextPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4903 }
4904 }
4905
4906 return Ret;
4907 }
4908
4909 template<bool HasSampler>
4910 [[nodiscard]] bool CreateTileGraphicsPipelineImpl(const char *pVertName, const char *pFragName, bool IsBorder, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4911 {
4912 std::array<VkVertexInputAttributeDescription, HasSampler ? 2 : 1> aAttributeDescriptions = {};
4913 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32_SFLOAT, 0};
4914 if(HasSampler)
4915 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UINT, sizeof(float) * 2};
4916
4917 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
4918 aSetLayouts[0] = m_Standard3DTexturedDescriptorSetLayout;
4919
4920 uint32_t VertPushConstantSize = sizeof(SUniformTileGPos);
4921 if(IsBorder)
4922 VertPushConstantSize = sizeof(SUniformTileGPosBorder);
4923
4924 uint32_t FragPushConstantSize = sizeof(SUniformTileGVertColor);
4925
4926 std::array<VkPushConstantRange, 2> aPushConstants{};
4927 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
4928 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformTileGPosBorder) + sizeof(SUniformTileGVertColorAlign), .size: FragPushConstantSize};
4929
4930 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, HasSampler ? (sizeof(float) * 2 + sizeof(uint8_t) * 4) : (sizeof(float) * 2), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4931 }
4932
4933 template<bool HasSampler>
4934 [[nodiscard]] bool CreateTileGraphicsPipeline(const char *pVertName, const char *pFragName, bool IsBorder)
4935 {
4936 bool Ret = true;
4937
4938 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4939
4940 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4941 {
4942 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4943 {
4944 Ret &= CreateTileGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, IsBorder, !IsBorder ? m_TilePipeline : m_TileBorderPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
4945 }
4946 }
4947
4948 return Ret;
4949 }
4950
4951 [[nodiscard]] bool CreatePrimExGraphicsPipelineImpl(const char *pVertName, const char *pFragName, bool Rotationless, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4952 {
4953 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4954 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4955 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4956 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4957
4958 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
4959 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
4960 uint32_t VertPushConstantSize = sizeof(SUniformPrimExGPos);
4961 if(Rotationless)
4962 VertPushConstantSize = sizeof(SUniformPrimExGPosRotationless);
4963
4964 uint32_t FragPushConstantSize = sizeof(SUniformPrimExGVertColor);
4965
4966 std::array<VkPushConstantRange, 2> aPushConstants{};
4967 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
4968 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformPrimExGPos) + sizeof(SUniformPrimExGVertColorAlign), .size: FragPushConstantSize};
4969
4970 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4971 }
4972
4973 [[nodiscard]] bool CreatePrimExGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler, bool Rotationless)
4974 {
4975 bool Ret = true;
4976
4977 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4978
4979 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4980 {
4981 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4982 {
4983 Ret &= CreatePrimExGraphicsPipelineImpl(pVertName, pFragName, Rotationless, PipeContainer&: Rotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4984 }
4985 }
4986
4987 return Ret;
4988 }
4989
4990 [[nodiscard]] bool CreateUniformDescriptorSetLayout(VkDescriptorSetLayout &SetLayout, VkShaderStageFlags StageFlags)
4991 {
4992 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4993 SamplerLayoutBinding.binding = 1;
4994 SamplerLayoutBinding.descriptorCount = 1;
4995 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
4996 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4997 SamplerLayoutBinding.stageFlags = StageFlags;
4998
4999 std::array<VkDescriptorSetLayoutBinding, 1> aBindings = {SamplerLayoutBinding};
5000 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
5001 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
5002 LayoutInfo.bindingCount = aBindings.size();
5003 LayoutInfo.pBindings = aBindings.data();
5004
5005 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &SetLayout) != VK_SUCCESS)
5006 {
5007 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
5008 return false;
5009 }
5010 return true;
5011 }
5012
5013 [[nodiscard]] bool CreateSpriteMultiUniformDescriptorSetLayout()
5014 {
5015 return CreateUniformDescriptorSetLayout(SetLayout&: m_SpriteMultiUniformDescriptorSetLayout, StageFlags: VK_SHADER_STAGE_VERTEX_BIT);
5016 }
5017
5018 [[nodiscard]] bool CreateQuadUniformDescriptorSetLayout()
5019 {
5020 return CreateUniformDescriptorSetLayout(SetLayout&: m_QuadUniformDescriptorSetLayout, StageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
5021 }
5022
5023 void DestroyUniformDescriptorSetLayouts()
5024 {
5025 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_QuadUniformDescriptorSetLayout, pAllocator: nullptr);
5026 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_SpriteMultiUniformDescriptorSetLayout, pAllocator: nullptr);
5027 }
5028
5029 [[nodiscard]] bool CreateUniformDescriptorSets(size_t RenderThreadIndex, VkDescriptorSetLayout &SetLayout, SDeviceDescriptorSet *pSets, size_t SetCount, VkBuffer BindBuffer, size_t SingleBufferInstanceSize, VkDeviceSize MemoryOffset)
5030 {
5031 VkDescriptorPool RetDescr;
5032 if(!GetDescriptorPoolForAlloc(RetDescr, DescriptorPools&: m_vUniformBufferDescrPools[RenderThreadIndex], pSets, AllocNum: SetCount))
5033 return false;
5034 VkDescriptorSetAllocateInfo DesAllocInfo{};
5035 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5036 DesAllocInfo.descriptorSetCount = 1;
5037 DesAllocInfo.pSetLayouts = &SetLayout;
5038 for(size_t i = 0; i < SetCount; ++i)
5039 {
5040 DesAllocInfo.descriptorPool = pSets[i].m_pPools->m_vPools[pSets[i].m_PoolIndex].m_Pool;
5041 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &pSets[i].m_Descriptor) != VK_SUCCESS)
5042 {
5043 return false;
5044 }
5045
5046 VkDescriptorBufferInfo BufferInfo{};
5047 BufferInfo.buffer = BindBuffer;
5048 BufferInfo.offset = MemoryOffset + SingleBufferInstanceSize * i;
5049 BufferInfo.range = SingleBufferInstanceSize;
5050
5051 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5052
5053 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5054 aDescriptorWrites[0].dstSet = pSets[i].m_Descriptor;
5055 aDescriptorWrites[0].dstBinding = 1;
5056 aDescriptorWrites[0].dstArrayElement = 0;
5057 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
5058 aDescriptorWrites[0].descriptorCount = 1;
5059 aDescriptorWrites[0].pBufferInfo = &BufferInfo;
5060
5061 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5062 }
5063
5064 return true;
5065 }
5066
5067 void DestroyUniformDescriptorSets(SDeviceDescriptorSet *pSets, size_t SetCount)
5068 {
5069 for(size_t i = 0; i < SetCount; ++i)
5070 {
5071 vkFreeDescriptorSets(device: m_VKDevice, descriptorPool: pSets[i].m_pPools->m_vPools[pSets[i].m_PoolIndex].m_Pool, descriptorSetCount: 1, pDescriptorSets: &pSets[i].m_Descriptor);
5072 pSets[i].m_Descriptor = VK_NULL_HANDLE;
5073 }
5074 }
5075
5076 [[nodiscard]] bool CreateSpriteMultiGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5077 {
5078 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
5079 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
5080 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
5081 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
5082
5083 std::array<VkDescriptorSetLayout, 2> aSetLayouts;
5084 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5085 aSetLayouts[1] = m_SpriteMultiUniformDescriptorSetLayout;
5086
5087 uint32_t VertPushConstantSize = sizeof(SUniformSpriteMultiGPos);
5088 uint32_t FragPushConstantSize = sizeof(SUniformSpriteMultiGVertColor);
5089
5090 std::array<VkPushConstantRange, 2> aPushConstants{};
5091 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
5092 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformSpriteMultiGPos) + sizeof(SUniformSpriteMultiGVertColorAlign), .size: FragPushConstantSize};
5093
5094 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5095 }
5096
5097 [[nodiscard]] bool CreateSpriteMultiGraphicsPipeline(const char *pVertName, const char *pFragName)
5098 {
5099 bool Ret = true;
5100
5101 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
5102
5103 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5104 {
5105 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5106 {
5107 Ret &= CreateSpriteMultiGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_SpriteMultiPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
5108 }
5109 }
5110
5111 return Ret;
5112 }
5113
5114 [[nodiscard]] bool CreateSpriteMultiPushGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5115 {
5116 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
5117 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
5118 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
5119 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
5120
5121 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
5122 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5123
5124 uint32_t VertPushConstantSize = sizeof(SUniformSpriteMultiPushGPos);
5125 uint32_t FragPushConstantSize = sizeof(SUniformSpriteMultiPushGVertColor);
5126
5127 std::array<VkPushConstantRange, 2> aPushConstants{};
5128 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
5129 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformSpriteMultiPushGPos), .size: FragPushConstantSize};
5130
5131 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5132 }
5133
5134 [[nodiscard]] bool CreateSpriteMultiPushGraphicsPipeline(const char *pVertName, const char *pFragName)
5135 {
5136 bool Ret = true;
5137
5138 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
5139
5140 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5141 {
5142 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5143 {
5144 Ret &= CreateSpriteMultiPushGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_SpriteMultiPushPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
5145 }
5146 }
5147
5148 return Ret;
5149 }
5150
5151 template<bool IsTextured>
5152 [[nodiscard]] bool CreateQuadGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5153 {
5154 std::array<VkVertexInputAttributeDescription, IsTextured ? 3 : 2> aAttributeDescriptions = {};
5155 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32B32A32_SFLOAT, 0};
5156 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UNORM, sizeof(float) * 4};
5157 if(IsTextured)
5158 aAttributeDescriptions[2] = {2, 0, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 4 + sizeof(uint8_t) * 4};
5159
5160 std::array<VkDescriptorSetLayout, IsTextured ? 2 : 1> aSetLayouts;
5161 if(IsTextured)
5162 {
5163 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5164 aSetLayouts[1] = m_QuadUniformDescriptorSetLayout;
5165 }
5166 else
5167 {
5168 aSetLayouts[0] = m_QuadUniformDescriptorSetLayout;
5169 }
5170
5171 uint32_t PushConstantSize = sizeof(SUniformQuadGPos);
5172
5173 std::array<VkPushConstantRange, 1> aPushConstants{};
5174 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: PushConstantSize};
5175
5176 return CreateGraphicsPipeline<true>(pVertName, pFragName, PipeContainer, sizeof(float) * 4 + sizeof(uint8_t) * 4 + (IsTextured ? (sizeof(float) * 2) : 0), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5177 }
5178
5179 template<bool HasSampler>
5180 [[nodiscard]] bool CreateQuadGraphicsPipeline(const char *pVertName, const char *pFragName)
5181 {
5182 bool Ret = true;
5183
5184 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
5185
5186 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5187 {
5188 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5189 {
5190 Ret &= CreateQuadGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, m_QuadPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
5191 }
5192 }
5193
5194 return Ret;
5195 }
5196
5197 template<bool IsTextured>
5198 [[nodiscard]] bool CreateQuadGroupedGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5199 {
5200 std::array<VkVertexInputAttributeDescription, IsTextured ? 3 : 2> aAttributeDescriptions = {};
5201 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32B32A32_SFLOAT, 0};
5202 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UNORM, sizeof(float) * 4};
5203 if(IsTextured)
5204 aAttributeDescriptions[2] = {2, 0, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 4 + sizeof(uint8_t) * 4};
5205
5206 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
5207 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5208
5209 uint32_t PushConstantSize = sizeof(SUniformQuadGroupedGPos);
5210
5211 std::array<VkPushConstantRange, 1> aPushConstants{};
5212 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, .offset: 0, .size: PushConstantSize};
5213
5214 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, sizeof(float) * 4 + sizeof(uint8_t) * 4 + (IsTextured ? (sizeof(float) * 2) : 0), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5215 }
5216
5217 template<bool HasSampler>
5218 [[nodiscard]] bool CreateQuadGroupedGraphicsPipeline(const char *pVertName, const char *pFragName)
5219 {
5220 bool Ret = true;
5221
5222 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
5223
5224 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5225 {
5226 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5227 {
5228 Ret &= CreateQuadGroupedGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, m_QuadGroupedPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
5229 }
5230 }
5231
5232 return Ret;
5233 }
5234
5235 [[nodiscard]] bool CreateCommandPool()
5236 {
5237 VkCommandPoolCreateInfo CreatePoolInfo{};
5238 CreatePoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
5239 CreatePoolInfo.queueFamilyIndex = m_VKGraphicsQueueIndex;
5240 CreatePoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
5241
5242 m_vCommandPools.resize(new_size: m_ThreadCount);
5243 for(size_t i = 0; i < m_ThreadCount; ++i)
5244 {
5245 if(vkCreateCommandPool(device: m_VKDevice, pCreateInfo: &CreatePoolInfo, pAllocator: nullptr, pCommandPool: &m_vCommandPools[i]) != VK_SUCCESS)
5246 {
5247 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the command pool failed.");
5248 return false;
5249 }
5250 }
5251 return true;
5252 }
5253
5254 void DestroyCommandPool()
5255 {
5256 for(size_t i = 0; i < m_ThreadCount; ++i)
5257 {
5258 vkDestroyCommandPool(device: m_VKDevice, commandPool: m_vCommandPools[i], pAllocator: nullptr);
5259 }
5260 }
5261
5262 [[nodiscard]] bool CreateCommandBuffers()
5263 {
5264 m_vMainDrawCommandBuffers.resize(new_size: m_SwapChainImageCount);
5265 if(m_ThreadCount > 1)
5266 {
5267 m_vvThreadDrawCommandBuffers.resize(new_size: m_ThreadCount);
5268 m_vvUsedThreadDrawCommandBuffer.resize(new_size: m_ThreadCount);
5269 m_vHelperThreadDrawCommandBuffers.resize(new_size: m_ThreadCount);
5270 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5271 {
5272 ThreadDrawCommandBuffers.resize(new_size: m_SwapChainImageCount);
5273 }
5274 for(auto &UsedThreadDrawCommandBuffer : m_vvUsedThreadDrawCommandBuffer)
5275 {
5276 UsedThreadDrawCommandBuffer.resize(new_size: m_SwapChainImageCount, x: false);
5277 }
5278 }
5279 m_vMemoryCommandBuffers.resize(new_size: m_SwapChainImageCount);
5280 m_vUsedMemoryCommandBuffer.resize(new_size: m_SwapChainImageCount, x: false);
5281
5282 VkCommandBufferAllocateInfo AllocInfo{};
5283 AllocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
5284 AllocInfo.commandPool = m_vCommandPools[0];
5285 AllocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
5286 AllocInfo.commandBufferCount = (uint32_t)m_vMainDrawCommandBuffers.size();
5287
5288 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: m_vMainDrawCommandBuffers.data()) != VK_SUCCESS)
5289 {
5290 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating command buffers failed.");
5291 return false;
5292 }
5293
5294 AllocInfo.commandBufferCount = (uint32_t)m_vMemoryCommandBuffers.size();
5295
5296 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: m_vMemoryCommandBuffers.data()) != VK_SUCCESS)
5297 {
5298 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating memory command buffers failed.");
5299 return false;
5300 }
5301
5302 if(m_ThreadCount > 1)
5303 {
5304 size_t Count = 0;
5305 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5306 {
5307 AllocInfo.commandPool = m_vCommandPools[Count];
5308 ++Count;
5309 AllocInfo.commandBufferCount = (uint32_t)ThreadDrawCommandBuffers.size();
5310 AllocInfo.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
5311 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: ThreadDrawCommandBuffers.data()) != VK_SUCCESS)
5312 {
5313 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating thread command buffers failed.");
5314 return false;
5315 }
5316 }
5317 }
5318
5319 return true;
5320 }
5321
5322 void DestroyCommandBuffer()
5323 {
5324 if(m_ThreadCount > 1)
5325 {
5326 size_t Count = 0;
5327 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5328 {
5329 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[Count], commandBufferCount: static_cast<uint32_t>(ThreadDrawCommandBuffers.size()), pCommandBuffers: ThreadDrawCommandBuffers.data());
5330 ++Count;
5331 }
5332 }
5333
5334 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[0], commandBufferCount: static_cast<uint32_t>(m_vMemoryCommandBuffers.size()), pCommandBuffers: m_vMemoryCommandBuffers.data());
5335 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[0], commandBufferCount: static_cast<uint32_t>(m_vMainDrawCommandBuffers.size()), pCommandBuffers: m_vMainDrawCommandBuffers.data());
5336
5337 m_vvThreadDrawCommandBuffers.clear();
5338 m_vvUsedThreadDrawCommandBuffer.clear();
5339 m_vHelperThreadDrawCommandBuffers.clear();
5340
5341 m_vMainDrawCommandBuffers.clear();
5342 m_vMemoryCommandBuffers.clear();
5343 m_vUsedMemoryCommandBuffer.clear();
5344 }
5345
5346 [[nodiscard]] bool CreateSyncObjects()
5347 {
5348 auto SyncObjectCount = m_SwapChainImageCount;
5349 m_vQueueSubmitSemaphores.resize(new_size: SyncObjectCount);
5350 m_vBusyAcquireImageSemaphores.resize(new_size: SyncObjectCount);
5351
5352 m_vQueueSubmitFences.resize(new_size: SyncObjectCount);
5353
5354 VkSemaphoreCreateInfo CreateSemaphoreInfo{};
5355 CreateSemaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
5356
5357 VkFenceCreateInfo FenceInfo{};
5358 FenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
5359 FenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
5360
5361 if(vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_AcquireImageSemaphore) != VK_SUCCESS)
5362 {
5363 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating acquire next image semaphore failed.");
5364 return false;
5365 }
5366 for(size_t i = 0; i < SyncObjectCount; i++)
5367 {
5368 if(vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_vQueueSubmitSemaphores[i]) != VK_SUCCESS ||
5369 vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_vBusyAcquireImageSemaphores[i]) != VK_SUCCESS ||
5370 vkCreateFence(device: m_VKDevice, pCreateInfo: &FenceInfo, pAllocator: nullptr, pFence: &m_vQueueSubmitFences[i]) != VK_SUCCESS)
5371 {
5372 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating swap chain sync objects(fences, semaphores) failed.");
5373 return false;
5374 }
5375 }
5376
5377 return true;
5378 }
5379
5380 void DestroySyncObjects()
5381 {
5382 for(size_t i = 0; i < m_vBusyAcquireImageSemaphores.size(); i++)
5383 {
5384 vkDestroySemaphore(device: m_VKDevice, semaphore: m_vBusyAcquireImageSemaphores[i], pAllocator: nullptr);
5385 vkDestroySemaphore(device: m_VKDevice, semaphore: m_vQueueSubmitSemaphores[i], pAllocator: nullptr);
5386 vkDestroyFence(device: m_VKDevice, fence: m_vQueueSubmitFences[i], pAllocator: nullptr);
5387 }
5388 vkDestroySemaphore(device: m_VKDevice, semaphore: m_AcquireImageSemaphore, pAllocator: nullptr);
5389
5390 m_vBusyAcquireImageSemaphores.clear();
5391 m_vQueueSubmitSemaphores.clear();
5392
5393 m_vQueueSubmitFences.clear();
5394 }
5395
5396 void DestroyBufferOfFrame(size_t ImageIndex, SFrameBuffers &Buffer)
5397 {
5398 CleanBufferPair(ImageIndex, Buffer&: Buffer.m_Buffer, BufferMem&: Buffer.m_BufferMem);
5399 }
5400
5401 void DestroyUniBufferOfFrame(size_t ImageIndex, SFrameUniformBuffers &Buffer)
5402 {
5403 CleanBufferPair(ImageIndex, Buffer&: Buffer.m_Buffer, BufferMem&: Buffer.m_BufferMem);
5404 for(auto &DescrSet : Buffer.m_aUniformSets)
5405 {
5406 if(DescrSet.m_Descriptor != VK_NULL_HANDLE)
5407 {
5408 DestroyUniformDescriptorSets(pSets: &DescrSet, SetCount: 1);
5409 }
5410 }
5411 }
5412
5413 /*************
5414 * SWAP CHAIN
5415 **************/
5416
5417 void CleanupVulkanSwapChain(bool ForceSwapChainDestruct)
5418 {
5419 m_StandardPipeline.Destroy(Device&: m_VKDevice);
5420 m_StandardLinePipeline.Destroy(Device&: m_VKDevice);
5421 m_Standard3DPipeline.Destroy(Device&: m_VKDevice);
5422 m_TextPipeline.Destroy(Device&: m_VKDevice);
5423 m_TilePipeline.Destroy(Device&: m_VKDevice);
5424 m_TileBorderPipeline.Destroy(Device&: m_VKDevice);
5425 m_PrimExPipeline.Destroy(Device&: m_VKDevice);
5426 m_PrimExRotationlessPipeline.Destroy(Device&: m_VKDevice);
5427 m_SpriteMultiPipeline.Destroy(Device&: m_VKDevice);
5428 m_SpriteMultiPushPipeline.Destroy(Device&: m_VKDevice);
5429 m_QuadPipeline.Destroy(Device&: m_VKDevice);
5430 m_QuadGroupedPipeline.Destroy(Device&: m_VKDevice);
5431
5432 DestroyFramebuffers();
5433
5434 DestroyRenderPass();
5435
5436 DestroyMultiSamplerImageAttachments();
5437
5438 DestroyImageViews();
5439 ClearSwapChainImageHandles();
5440
5441 DestroySwapChain(ForceDestroy: ForceSwapChainDestruct);
5442
5443 m_SwapchainCreated = false;
5444 }
5445
5446 template<bool IsLastCleanup>
5447 void CleanupVulkan(size_t SwapchainCount)
5448 {
5449 if(IsLastCleanup)
5450 {
5451 if(m_SwapchainCreated)
5452 CleanupVulkanSwapChain(ForceSwapChainDestruct: true);
5453
5454 // clean all images, buffers, buffer containers
5455 for(auto &Texture : m_vTextures)
5456 {
5457 if(Texture.m_VKTextDescrSet.m_Descriptor != VK_NULL_HANDLE && IsVerbose())
5458 {
5459 dbg_msg(sys: "vulkan", fmt: "text textures not cleared over cmd.");
5460 }
5461 DestroyTexture(Texture);
5462 }
5463
5464 for(auto &BufferObject : m_vBufferObjects)
5465 {
5466 if(!BufferObject.m_IsStreamedBuffer)
5467 FreeVertexMemBlock(Block&: BufferObject.m_BufferObject.m_Mem);
5468 }
5469
5470 m_vBufferContainers.clear();
5471 }
5472
5473 m_vImageLastFrameCheck.clear();
5474
5475 m_vLastPipeline.clear();
5476
5477 for(size_t i = 0; i < m_ThreadCount; ++i)
5478 {
5479 m_vStreamedVertexBuffers[i].Destroy(DestroyBuffer: [&](size_t ImageIndex, SFrameBuffers &Buffer) { DestroyBufferOfFrame(ImageIndex, Buffer); });
5480 m_vStreamedUniformBuffers[i].Destroy(DestroyBuffer: [&](size_t ImageIndex, SFrameUniformBuffers &Buffer) { DestroyUniBufferOfFrame(ImageIndex, Buffer); });
5481 }
5482 m_vStreamedVertexBuffers.clear();
5483 m_vStreamedUniformBuffers.clear();
5484
5485 for(size_t i = 0; i < SwapchainCount; ++i)
5486 {
5487 ClearFrameData(FrameImageIndex: i);
5488 }
5489
5490 m_vvFrameDelayedBufferCleanup.clear();
5491 m_vvFrameDelayedTextureCleanup.clear();
5492 m_vvFrameDelayedTextTexturesCleanup.clear();
5493
5494 m_StagingBufferCache.DestroyFrameData(ImageCount: SwapchainCount);
5495 m_StagingBufferCacheImage.DestroyFrameData(ImageCount: SwapchainCount);
5496 m_VertexBufferCache.DestroyFrameData(ImageCount: SwapchainCount);
5497 for(auto &ImageBufferCache : m_ImageBufferCaches)
5498 ImageBufferCache.second.DestroyFrameData(ImageCount: SwapchainCount);
5499
5500 if(IsLastCleanup)
5501 {
5502 m_StagingBufferCache.Destroy(Device&: m_VKDevice);
5503 m_StagingBufferCacheImage.Destroy(Device&: m_VKDevice);
5504 m_VertexBufferCache.Destroy(Device&: m_VKDevice);
5505 for(auto &ImageBufferCache : m_ImageBufferCaches)
5506 ImageBufferCache.second.Destroy(Device&: m_VKDevice);
5507
5508 m_ImageBufferCaches.clear();
5509
5510 DestroyTextureSamplers();
5511 DestroyDescriptorPools();
5512
5513 DeletePresentedImageDataImage();
5514 }
5515
5516 DestroySyncObjects();
5517 DestroyCommandBuffer();
5518
5519 if(IsLastCleanup)
5520 {
5521 DestroyCommandPool();
5522 }
5523
5524 if(IsLastCleanup)
5525 {
5526 DestroyUniformDescriptorSetLayouts();
5527 DestroyTextDescriptorSetLayout();
5528 DestroyDescriptorSetLayouts();
5529 }
5530 }
5531
5532 void CleanupVulkanSDL()
5533 {
5534 if(m_VKInstance != VK_NULL_HANDLE)
5535 {
5536 DestroySurface();
5537 vkDestroyDevice(device: m_VKDevice, pAllocator: nullptr);
5538
5539 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
5540 {
5541 UnregisterDebugCallback();
5542 }
5543 vkDestroyInstance(instance: m_VKInstance, pAllocator: nullptr);
5544 m_VKInstance = VK_NULL_HANDLE;
5545 }
5546 }
5547
5548 int RecreateSwapChain()
5549 {
5550 int Ret = 0;
5551 vkDeviceWaitIdle(device: m_VKDevice);
5552
5553 if(IsVerbose())
5554 {
5555 dbg_msg(sys: "vulkan", fmt: "recreating swap chain.");
5556 }
5557
5558 VkSwapchainKHR OldSwapChain = VK_NULL_HANDLE;
5559 uint32_t OldSwapChainImageCount = m_SwapChainImageCount;
5560
5561 if(m_SwapchainCreated)
5562 CleanupVulkanSwapChain(ForceSwapChainDestruct: false);
5563
5564 // set new multi sampling if it was requested
5565 if(m_NextMultiSamplingCount != std::numeric_limits<uint32_t>::max())
5566 {
5567 m_MultiSamplingCount = m_NextMultiSamplingCount;
5568 m_NextMultiSamplingCount = std::numeric_limits<uint32_t>::max();
5569 }
5570
5571 if(!m_SwapchainCreated)
5572 Ret = InitVulkanSwapChain(OldSwapChain);
5573
5574 if(OldSwapChainImageCount != m_SwapChainImageCount)
5575 {
5576 CleanupVulkan<false>(SwapchainCount: OldSwapChainImageCount);
5577 InitVulkan<false>();
5578 }
5579
5580 if(OldSwapChain != VK_NULL_HANDLE)
5581 {
5582 vkDestroySwapchainKHR(device: m_VKDevice, swapchain: OldSwapChain, pAllocator: nullptr);
5583 }
5584
5585 if(Ret != 0 && IsVerbose())
5586 {
5587 dbg_msg(sys: "vulkan", fmt: "recreating swap chain failed.");
5588 }
5589
5590 return Ret;
5591 }
5592
5593 int InitVulkanSDL(SDL_Window *pWindow, uint32_t CanvasWidth, uint32_t CanvasHeight, char *pRendererString, char *pVendorString, char *pVersionString)
5594 {
5595 std::vector<std::string> vVKExtensions;
5596 std::vector<std::string> vVKLayers;
5597
5598 m_CanvasWidth = CanvasWidth;
5599 m_CanvasHeight = CanvasHeight;
5600
5601 if(!GetVulkanExtensions(pWindow, vVKExtensions))
5602 return -1;
5603
5604 if(!GetVulkanLayers(vVKLayers))
5605 return -1;
5606
5607 if(!CreateVulkanInstance(vVKLayers, vVKExtensions, TryDebugExtensions: true))
5608 return -1;
5609
5610 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
5611 {
5612 SetupDebugCallback();
5613
5614 for(auto &VKLayer : vVKLayers)
5615 {
5616 dbg_msg(sys: "vulkan", fmt: "Validation layer: %s", VKLayer.c_str());
5617 }
5618 }
5619
5620 if(!SelectGpu(pRendererName: pRendererString, pVendorName: pVendorString, pVersionName: pVersionString))
5621 return -1;
5622
5623 if(!CreateLogicalDevice(vVKLayers))
5624 return -1;
5625
5626 GetDeviceQueue();
5627
5628 if(!CreateSurface(pWindow))
5629 return -1;
5630
5631 return 0;
5632 }
5633
5634 /************************
5635 * MEMORY MANAGEMENT
5636 ************************/
5637
5638 uint32_t FindMemoryType(VkPhysicalDevice PhyDevice, uint32_t TypeFilter, VkMemoryPropertyFlags Properties)
5639 {
5640 VkPhysicalDeviceMemoryProperties MemProperties;
5641 vkGetPhysicalDeviceMemoryProperties(physicalDevice: PhyDevice, pMemoryProperties: &MemProperties);
5642
5643 for(uint32_t i = 0; i < MemProperties.memoryTypeCount; i++)
5644 {
5645 if((TypeFilter & (1 << i)) && (MemProperties.memoryTypes[i].propertyFlags & Properties) == Properties)
5646 {
5647 return i;
5648 }
5649 }
5650
5651 return 0;
5652 }
5653
5654 [[nodiscard]] bool CreateBuffer(VkDeviceSize BufferSize, EMemoryBlockUsage MemUsage, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags MemoryProperties, VkBuffer &VKBuffer, SDeviceMemoryBlock &VKBufferMemory)
5655 {
5656 VkBufferCreateInfo BufferInfo{};
5657 BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5658 BufferInfo.size = BufferSize;
5659 BufferInfo.usage = BufferUsage;
5660 BufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
5661
5662 if(vkCreateBuffer(device: m_VKDevice, pCreateInfo: &BufferInfo, pAllocator: nullptr, pBuffer: &VKBuffer) != VK_SUCCESS)
5663 {
5664 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Buffer creation failed.");
5665 return false;
5666 }
5667
5668 VkMemoryRequirements MemRequirements;
5669 vkGetBufferMemoryRequirements(device: m_VKDevice, buffer: VKBuffer, pMemoryRequirements: &MemRequirements);
5670
5671 VkMemoryAllocateInfo MemAllocInfo{};
5672 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
5673 MemAllocInfo.allocationSize = MemRequirements.size;
5674 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: MemRequirements.memoryTypeBits, Properties: MemoryProperties);
5675
5676 VKBufferMemory.m_Size = MemRequirements.size;
5677
5678 if(MemUsage == MEMORY_BLOCK_USAGE_BUFFER)
5679 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5680 else if(MemUsage == MEMORY_BLOCK_USAGE_STAGING)
5681 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5682 else if(MemUsage == MEMORY_BLOCK_USAGE_STREAM)
5683 m_pStreamMemoryUsage->store(i: m_pStreamMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5684
5685 if(IsVerbose())
5686 {
5687 VerboseAllocatedMemory(Size: MemRequirements.size, FrameImageIndex: m_CurImageIndex, MemUsage);
5688 }
5689
5690 if(!AllocateVulkanMemory(pAllocateInfo: &MemAllocInfo, pMemory: &VKBufferMemory.m_Mem))
5691 {
5692 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Allocation for buffer object failed.");
5693 return false;
5694 }
5695
5696 VKBufferMemory.m_UsageType = MemUsage;
5697
5698 if(vkBindBufferMemory(device: m_VKDevice, buffer: VKBuffer, memory: VKBufferMemory.m_Mem, memoryOffset: 0) != VK_SUCCESS)
5699 {
5700 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Binding memory to buffer failed.");
5701 return false;
5702 }
5703
5704 return true;
5705 }
5706
5707 [[nodiscard]] bool AllocateDescriptorPool(SDeviceDescriptorPools &DescriptorPools, size_t AllocPoolSize)
5708 {
5709 SDeviceDescriptorPool NewPool;
5710 NewPool.m_Size = AllocPoolSize;
5711
5712 VkDescriptorPoolSize PoolSize{};
5713 if(DescriptorPools.m_IsUniformPool)
5714 PoolSize.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
5715 else
5716 PoolSize.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5717 PoolSize.descriptorCount = AllocPoolSize;
5718
5719 VkDescriptorPoolCreateInfo PoolInfo{};
5720 PoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
5721 PoolInfo.poolSizeCount = 1;
5722 PoolInfo.pPoolSizes = &PoolSize;
5723 PoolInfo.maxSets = AllocPoolSize;
5724 PoolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
5725
5726 if(vkCreateDescriptorPool(device: m_VKDevice, pCreateInfo: &PoolInfo, pAllocator: nullptr, pDescriptorPool: &NewPool.m_Pool) != VK_SUCCESS)
5727 {
5728 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the descriptor pool failed.");
5729 return false;
5730 }
5731
5732 DescriptorPools.m_vPools.push_back(x: NewPool);
5733
5734 return true;
5735 }
5736
5737 [[nodiscard]] bool CreateDescriptorPools()
5738 {
5739 m_StandardTextureDescrPool.m_IsUniformPool = false;
5740 m_StandardTextureDescrPool.m_DefaultAllocSize = 1024;
5741 m_TextTextureDescrPool.m_IsUniformPool = false;
5742 m_TextTextureDescrPool.m_DefaultAllocSize = 8;
5743
5744 m_vUniformBufferDescrPools.resize(new_size: m_ThreadCount);
5745 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5746 {
5747 UniformBufferDescrPool.m_IsUniformPool = true;
5748 UniformBufferDescrPool.m_DefaultAllocSize = 512;
5749 }
5750
5751 bool Ret = AllocateDescriptorPool(DescriptorPools&: m_StandardTextureDescrPool, AllocPoolSize: CCommandBuffer::MAX_TEXTURES);
5752 Ret |= AllocateDescriptorPool(DescriptorPools&: m_TextTextureDescrPool, AllocPoolSize: 8);
5753
5754 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5755 {
5756 Ret |= AllocateDescriptorPool(DescriptorPools&: UniformBufferDescrPool, AllocPoolSize: 64);
5757 }
5758
5759 return Ret;
5760 }
5761
5762 void DestroyDescriptorPools()
5763 {
5764 for(auto &DescrPool : m_StandardTextureDescrPool.m_vPools)
5765 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5766 for(auto &DescrPool : m_TextTextureDescrPool.m_vPools)
5767 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5768
5769 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5770 {
5771 for(auto &DescrPool : UniformBufferDescrPool.m_vPools)
5772 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5773 }
5774 m_vUniformBufferDescrPools.clear();
5775 }
5776
5777 [[nodiscard]] bool GetDescriptorPoolForAlloc(VkDescriptorPool &RetDescr, SDeviceDescriptorPools &DescriptorPools, SDeviceDescriptorSet *pSets, size_t AllocNum)
5778 {
5779 size_t CurAllocNum = AllocNum;
5780 size_t CurAllocOffset = 0;
5781 RetDescr = VK_NULL_HANDLE;
5782
5783 while(CurAllocNum > 0)
5784 {
5785 size_t AllocatedInThisRun = 0;
5786
5787 bool Found = false;
5788 size_t DescriptorPoolIndex = std::numeric_limits<size_t>::max();
5789 for(size_t i = 0; i < DescriptorPools.m_vPools.size(); ++i)
5790 {
5791 auto &Pool = DescriptorPools.m_vPools[i];
5792 if(Pool.m_CurSize + CurAllocNum < Pool.m_Size)
5793 {
5794 AllocatedInThisRun = CurAllocNum;
5795 Pool.m_CurSize += CurAllocNum;
5796 Found = true;
5797 if(RetDescr == VK_NULL_HANDLE)
5798 RetDescr = Pool.m_Pool;
5799 DescriptorPoolIndex = i;
5800 break;
5801 }
5802 else
5803 {
5804 size_t RemainingPoolCount = Pool.m_Size - Pool.m_CurSize;
5805 if(RemainingPoolCount > 0)
5806 {
5807 AllocatedInThisRun = RemainingPoolCount;
5808 Pool.m_CurSize += RemainingPoolCount;
5809 Found = true;
5810 if(RetDescr == VK_NULL_HANDLE)
5811 RetDescr = Pool.m_Pool;
5812 DescriptorPoolIndex = i;
5813 break;
5814 }
5815 }
5816 }
5817
5818 if(!Found)
5819 {
5820 DescriptorPoolIndex = DescriptorPools.m_vPools.size();
5821
5822 if(!AllocateDescriptorPool(DescriptorPools, AllocPoolSize: DescriptorPools.m_DefaultAllocSize))
5823 return false;
5824
5825 AllocatedInThisRun = minimum(a: (size_t)DescriptorPools.m_DefaultAllocSize, b: CurAllocNum);
5826
5827 auto &Pool = DescriptorPools.m_vPools.back();
5828 Pool.m_CurSize += AllocatedInThisRun;
5829 if(RetDescr == VK_NULL_HANDLE)
5830 RetDescr = Pool.m_Pool;
5831 }
5832
5833 for(size_t i = CurAllocOffset; i < CurAllocOffset + AllocatedInThisRun; ++i)
5834 {
5835 pSets[i].m_pPools = &DescriptorPools;
5836 pSets[i].m_PoolIndex = DescriptorPoolIndex;
5837 }
5838 CurAllocOffset += AllocatedInThisRun;
5839 CurAllocNum -= AllocatedInThisRun;
5840 }
5841
5842 return true;
5843 }
5844
5845 void FreeDescriptorSetFromPool(SDeviceDescriptorSet &DescrSet)
5846 {
5847 if(DescrSet.m_PoolIndex != std::numeric_limits<size_t>::max())
5848 {
5849 vkFreeDescriptorSets(device: m_VKDevice, descriptorPool: DescrSet.m_pPools->m_vPools[DescrSet.m_PoolIndex].m_Pool, descriptorSetCount: 1, pDescriptorSets: &DescrSet.m_Descriptor);
5850 DescrSet.m_pPools->m_vPools[DescrSet.m_PoolIndex].m_CurSize -= 1;
5851 }
5852 }
5853
5854 [[nodiscard]] bool CreateNewTexturedStandardDescriptorSets(size_t TextureSlot, size_t DescrIndex)
5855 {
5856 auto &Texture = m_vTextures[TextureSlot];
5857
5858 auto &DescrSet = Texture.m_aVKStandardTexturedDescrSets[DescrIndex];
5859
5860 VkDescriptorSetAllocateInfo DesAllocInfo{};
5861 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5862 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_StandardTextureDescrPool, pSets: &DescrSet, AllocNum: 1))
5863 return false;
5864 DesAllocInfo.descriptorSetCount = 1;
5865 DesAllocInfo.pSetLayouts = &m_StandardTexturedDescriptorSetLayout;
5866
5867 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSet.m_Descriptor) != VK_SUCCESS)
5868 {
5869 return false;
5870 }
5871
5872 VkDescriptorImageInfo ImageInfo{};
5873 ImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5874 ImageInfo.imageView = Texture.m_ImgView;
5875 ImageInfo.sampler = Texture.m_aSamplers[DescrIndex];
5876
5877 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5878
5879 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5880 aDescriptorWrites[0].dstSet = DescrSet.m_Descriptor;
5881 aDescriptorWrites[0].dstBinding = 0;
5882 aDescriptorWrites[0].dstArrayElement = 0;
5883 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5884 aDescriptorWrites[0].descriptorCount = 1;
5885 aDescriptorWrites[0].pImageInfo = &ImageInfo;
5886
5887 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5888
5889 return true;
5890 }
5891
5892 void DestroyTexturedStandardDescriptorSets(CTexture &Texture, size_t DescrIndex)
5893 {
5894 auto &DescrSet = Texture.m_aVKStandardTexturedDescrSets[DescrIndex];
5895 FreeDescriptorSetFromPool(DescrSet);
5896 DescrSet = {};
5897 }
5898
5899 [[nodiscard]] bool CreateNew3DTexturedStandardDescriptorSets(size_t TextureSlot)
5900 {
5901 auto &Texture = m_vTextures[TextureSlot];
5902
5903 auto &DescrSet = Texture.m_VKStandard3DTexturedDescrSet;
5904
5905 VkDescriptorSetAllocateInfo DesAllocInfo{};
5906 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5907 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_StandardTextureDescrPool, pSets: &DescrSet, AllocNum: 1))
5908 return false;
5909 DesAllocInfo.descriptorSetCount = 1;
5910 DesAllocInfo.pSetLayouts = &m_Standard3DTexturedDescriptorSetLayout;
5911
5912 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSet.m_Descriptor) != VK_SUCCESS)
5913 {
5914 return false;
5915 }
5916
5917 VkDescriptorImageInfo ImageInfo{};
5918 ImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5919 ImageInfo.imageView = Texture.m_Img3DView;
5920 ImageInfo.sampler = Texture.m_Sampler3D;
5921
5922 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5923
5924 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5925 aDescriptorWrites[0].dstSet = DescrSet.m_Descriptor;
5926 aDescriptorWrites[0].dstBinding = 0;
5927 aDescriptorWrites[0].dstArrayElement = 0;
5928 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5929 aDescriptorWrites[0].descriptorCount = 1;
5930 aDescriptorWrites[0].pImageInfo = &ImageInfo;
5931
5932 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5933
5934 return true;
5935 }
5936
5937 void DestroyTextured3DStandardDescriptorSets(CTexture &Texture)
5938 {
5939 auto &DescrSet = Texture.m_VKStandard3DTexturedDescrSet;
5940 FreeDescriptorSetFromPool(DescrSet);
5941 }
5942
5943 [[nodiscard]] bool CreateNewTextDescriptorSets(size_t Texture, size_t TextureOutline)
5944 {
5945 auto &TextureText = m_vTextures[Texture];
5946 auto &TextureTextOutline = m_vTextures[TextureOutline];
5947 auto &DescrSetText = TextureText.m_VKTextDescrSet;
5948
5949 VkDescriptorSetAllocateInfo DesAllocInfo{};
5950 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5951 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_TextTextureDescrPool, pSets: &DescrSetText, AllocNum: 1))
5952 return false;
5953 DesAllocInfo.descriptorSetCount = 1;
5954 DesAllocInfo.pSetLayouts = &m_TextDescriptorSetLayout;
5955
5956 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSetText.m_Descriptor) != VK_SUCCESS)
5957 {
5958 return false;
5959 }
5960
5961 std::array<VkDescriptorImageInfo, 2> aImageInfo{};
5962 aImageInfo[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5963 aImageInfo[0].imageView = TextureText.m_ImgView;
5964 aImageInfo[0].sampler = TextureText.m_aSamplers[0];
5965 aImageInfo[1].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5966 aImageInfo[1].imageView = TextureTextOutline.m_ImgView;
5967 aImageInfo[1].sampler = TextureTextOutline.m_aSamplers[0];
5968
5969 std::array<VkWriteDescriptorSet, 2> aDescriptorWrites{};
5970
5971 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5972 aDescriptorWrites[0].dstSet = DescrSetText.m_Descriptor;
5973 aDescriptorWrites[0].dstBinding = 0;
5974 aDescriptorWrites[0].dstArrayElement = 0;
5975 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5976 aDescriptorWrites[0].descriptorCount = 1;
5977 aDescriptorWrites[0].pImageInfo = aImageInfo.data();
5978 aDescriptorWrites[1] = aDescriptorWrites[0];
5979 aDescriptorWrites[1].dstBinding = 1;
5980 aDescriptorWrites[1].pImageInfo = &aImageInfo[1];
5981
5982 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5983
5984 return true;
5985 }
5986
5987 void DestroyTextDescriptorSets(CTexture &Texture, CTexture &TextureOutline)
5988 {
5989 auto &DescrSet = Texture.m_VKTextDescrSet;
5990 FreeDescriptorSetFromPool(DescrSet);
5991 }
5992
5993 [[nodiscard]] bool HasMultiSampling() const
5994 {
5995 return GetSampleCount() != VK_SAMPLE_COUNT_1_BIT;
5996 }
5997
5998 VkSampleCountFlagBits GetMaxSampleCount() const
5999 {
6000 if(m_MaxMultiSample & VK_SAMPLE_COUNT_64_BIT)
6001 return VK_SAMPLE_COUNT_64_BIT;
6002 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_32_BIT)
6003 return VK_SAMPLE_COUNT_32_BIT;
6004 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_16_BIT)
6005 return VK_SAMPLE_COUNT_16_BIT;
6006 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_8_BIT)
6007 return VK_SAMPLE_COUNT_8_BIT;
6008 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_4_BIT)
6009 return VK_SAMPLE_COUNT_4_BIT;
6010 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_2_BIT)
6011 return VK_SAMPLE_COUNT_2_BIT;
6012
6013 return VK_SAMPLE_COUNT_1_BIT;
6014 }
6015
6016 VkSampleCountFlagBits GetSampleCount() const
6017 {
6018 auto MaxSampleCount = GetMaxSampleCount();
6019 if(m_MultiSamplingCount >= 64 && MaxSampleCount >= VK_SAMPLE_COUNT_64_BIT)
6020 return VK_SAMPLE_COUNT_64_BIT;
6021 else if(m_MultiSamplingCount >= 32 && MaxSampleCount >= VK_SAMPLE_COUNT_32_BIT)
6022 return VK_SAMPLE_COUNT_32_BIT;
6023 else if(m_MultiSamplingCount >= 16 && MaxSampleCount >= VK_SAMPLE_COUNT_16_BIT)
6024 return VK_SAMPLE_COUNT_16_BIT;
6025 else if(m_MultiSamplingCount >= 8 && MaxSampleCount >= VK_SAMPLE_COUNT_8_BIT)
6026 return VK_SAMPLE_COUNT_8_BIT;
6027 else if(m_MultiSamplingCount >= 4 && MaxSampleCount >= VK_SAMPLE_COUNT_4_BIT)
6028 return VK_SAMPLE_COUNT_4_BIT;
6029 else if(m_MultiSamplingCount >= 2 && MaxSampleCount >= VK_SAMPLE_COUNT_2_BIT)
6030 return VK_SAMPLE_COUNT_2_BIT;
6031
6032 return VK_SAMPLE_COUNT_1_BIT;
6033 }
6034
6035 int InitVulkanSwapChain(VkSwapchainKHR &OldSwapChain)
6036 {
6037 OldSwapChain = VK_NULL_HANDLE;
6038 if(!CreateSwapChain(OldSwapChain))
6039 return -1;
6040
6041 if(!GetSwapChainImageHandles())
6042 return -1;
6043
6044 if(!CreateImageViews())
6045 return -1;
6046
6047 if(!CreateMultiSamplerImageAttachments())
6048 {
6049 return -1;
6050 }
6051
6052 m_LastPresentedSwapChainImageIndex = std::numeric_limits<decltype(m_LastPresentedSwapChainImageIndex)>::max();
6053
6054 if(!CreateRenderPass(ClearAttachments: true))
6055 return -1;
6056
6057 if(!CreateFramebuffers())
6058 return -1;
6059
6060 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim.vert.spv", pFragName: "shader/vulkan/prim.frag.spv", HasSampler: false, IsLinePipe: false))
6061 return -1;
6062
6063 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim_textured.vert.spv", pFragName: "shader/vulkan/prim_textured.frag.spv", HasSampler: true, IsLinePipe: false))
6064 return -1;
6065
6066 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim.vert.spv", pFragName: "shader/vulkan/prim.frag.spv", HasSampler: false, IsLinePipe: true))
6067 return -1;
6068
6069 if(!CreateStandard3DGraphicsPipeline(pVertName: "shader/vulkan/prim3d.vert.spv", pFragName: "shader/vulkan/prim3d.frag.spv", HasSampler: false))
6070 return -1;
6071
6072 if(!CreateStandard3DGraphicsPipeline(pVertName: "shader/vulkan/prim3d_textured.vert.spv", pFragName: "shader/vulkan/prim3d_textured.frag.spv", HasSampler: true))
6073 return -1;
6074
6075 if(!CreateTextGraphicsPipeline(pVertName: "shader/vulkan/text.vert.spv", pFragName: "shader/vulkan/text.frag.spv"))
6076 return -1;
6077
6078 if(!CreateTileGraphicsPipeline<false>(pVertName: "shader/vulkan/tile.vert.spv", pFragName: "shader/vulkan/tile.frag.spv", IsBorder: false))
6079 return -1;
6080
6081 if(!CreateTileGraphicsPipeline<true>(pVertName: "shader/vulkan/tile_textured.vert.spv", pFragName: "shader/vulkan/tile_textured.frag.spv", IsBorder: false))
6082 return -1;
6083
6084 if(!CreateTileGraphicsPipeline<false>(pVertName: "shader/vulkan/tile_border.vert.spv", pFragName: "shader/vulkan/tile_border.frag.spv", IsBorder: true))
6085 return -1;
6086
6087 if(!CreateTileGraphicsPipeline<true>(pVertName: "shader/vulkan/tile_border_textured.vert.spv", pFragName: "shader/vulkan/tile_border_textured.frag.spv", IsBorder: true))
6088 return -1;
6089
6090 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_rotationless.vert.spv", pFragName: "shader/vulkan/primex_rotationless.frag.spv", HasSampler: false, Rotationless: true))
6091 return -1;
6092
6093 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_tex_rotationless.vert.spv", pFragName: "shader/vulkan/primex_tex_rotationless.frag.spv", HasSampler: true, Rotationless: true))
6094 return -1;
6095
6096 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex.vert.spv", pFragName: "shader/vulkan/primex.frag.spv", HasSampler: false, Rotationless: false))
6097 return -1;
6098
6099 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_tex.vert.spv", pFragName: "shader/vulkan/primex_tex.frag.spv", HasSampler: true, Rotationless: false))
6100 return -1;
6101
6102 if(!CreateSpriteMultiGraphicsPipeline(pVertName: "shader/vulkan/spritemulti.vert.spv", pFragName: "shader/vulkan/spritemulti.frag.spv"))
6103 return -1;
6104
6105 if(!CreateSpriteMultiPushGraphicsPipeline(pVertName: "shader/vulkan/spritemulti_push.vert.spv", pFragName: "shader/vulkan/spritemulti_push.frag.spv"))
6106 return -1;
6107
6108 if(!CreateQuadGraphicsPipeline<false>(pVertName: "shader/vulkan/quad.vert.spv", pFragName: "shader/vulkan/quad.frag.spv"))
6109 return -1;
6110
6111 if(!CreateQuadGraphicsPipeline<true>(pVertName: "shader/vulkan/quad_textured.vert.spv", pFragName: "shader/vulkan/quad_textured.frag.spv"))
6112 return -1;
6113
6114 if(!CreateQuadGroupedGraphicsPipeline<false>(pVertName: "shader/vulkan/quad_grouped.vert.spv", pFragName: "shader/vulkan/quad_grouped.frag.spv"))
6115 return -1;
6116
6117 if(!CreateQuadGroupedGraphicsPipeline<true>(pVertName: "shader/vulkan/quad_grouped_textured.vert.spv", pFragName: "shader/vulkan/quad_grouped_textured.frag.spv"))
6118 return -1;
6119
6120 m_SwapchainCreated = true;
6121 return 0;
6122 }
6123
6124 template<bool IsFirstInitialization>
6125 int InitVulkan()
6126 {
6127 if(IsFirstInitialization)
6128 {
6129 if(!CreateDescriptorSetLayouts())
6130 return -1;
6131
6132 if(!CreateTextDescriptorSetLayout())
6133 return -1;
6134
6135 if(!CreateSpriteMultiUniformDescriptorSetLayout())
6136 return -1;
6137
6138 if(!CreateQuadUniformDescriptorSetLayout())
6139 return -1;
6140
6141 VkSwapchainKHR OldSwapChain = VK_NULL_HANDLE;
6142 if(InitVulkanSwapChain(OldSwapChain) != 0)
6143 return -1;
6144 }
6145
6146 if(IsFirstInitialization)
6147 {
6148 if(!CreateCommandPool())
6149 return -1;
6150 }
6151
6152 if(!CreateCommandBuffers())
6153 return -1;
6154
6155 if(!CreateSyncObjects())
6156 return -1;
6157
6158 if(IsFirstInitialization)
6159 {
6160 if(!CreateDescriptorPools())
6161 return -1;
6162
6163 if(!CreateTextureSamplers())
6164 return -1;
6165 }
6166
6167 m_vStreamedVertexBuffers.resize(new_size: m_ThreadCount);
6168 m_vStreamedUniformBuffers.resize(new_size: m_ThreadCount);
6169 for(size_t i = 0; i < m_ThreadCount; ++i)
6170 {
6171 m_vStreamedVertexBuffers[i].Init(FrameImageCount: m_SwapChainImageCount);
6172 m_vStreamedUniformBuffers[i].Init(FrameImageCount: m_SwapChainImageCount);
6173 }
6174
6175 m_vLastPipeline.resize(new_size: m_ThreadCount, VK_NULL_HANDLE);
6176
6177 m_vvFrameDelayedBufferCleanup.resize(new_size: m_SwapChainImageCount);
6178 m_vvFrameDelayedTextureCleanup.resize(new_size: m_SwapChainImageCount);
6179 m_vvFrameDelayedTextTexturesCleanup.resize(new_size: m_SwapChainImageCount);
6180 m_StagingBufferCache.Init(SwapChainImageCount: m_SwapChainImageCount);
6181 m_StagingBufferCacheImage.Init(SwapChainImageCount: m_SwapChainImageCount);
6182 m_VertexBufferCache.Init(SwapChainImageCount: m_SwapChainImageCount);
6183 for(auto &ImageBufferCache : m_ImageBufferCaches)
6184 ImageBufferCache.second.Init(SwapChainImageCount: m_SwapChainImageCount);
6185
6186 m_vImageLastFrameCheck.resize(new_size: m_SwapChainImageCount, x: 0);
6187
6188 if(IsFirstInitialization)
6189 {
6190 // check if image format supports linear blitting
6191 VkFormatProperties FormatProperties;
6192 vkGetPhysicalDeviceFormatProperties(physicalDevice: m_VKGPU, format: VK_FORMAT_R8G8B8A8_UNORM, pFormatProperties: &FormatProperties);
6193 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) != 0)
6194 {
6195 m_AllowsLinearBlitting = true;
6196 }
6197 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) != 0 && (FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT) != 0)
6198 {
6199 m_OptimalRGBAImageBlitting = true;
6200 }
6201 // check if image format supports blitting to linear tiled images
6202 if((FormatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT) != 0)
6203 {
6204 m_LinearRGBAImageBlitting = true;
6205 }
6206
6207 vkGetPhysicalDeviceFormatProperties(physicalDevice: m_VKGPU, format: m_VKSurfFormat.format, pFormatProperties: &FormatProperties);
6208 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) != 0)
6209 {
6210 m_OptimalSwapChainImageBlitting = true;
6211 }
6212 }
6213
6214 return 0;
6215 }
6216
6217 [[nodiscard]] bool GetMemoryCommandBuffer(VkCommandBuffer *&pMemCommandBuffer)
6218 {
6219 auto &MemCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
6220 if(!m_vUsedMemoryCommandBuffer[m_CurImageIndex])
6221 {
6222 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = true;
6223
6224 vkResetCommandBuffer(commandBuffer: MemCommandBuffer, flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
6225
6226 VkCommandBufferBeginInfo BeginInfo{};
6227 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
6228 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
6229 if(vkBeginCommandBuffer(commandBuffer: MemCommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
6230 {
6231 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be filled anymore.");
6232 return false;
6233 }
6234 }
6235 pMemCommandBuffer = &MemCommandBuffer;
6236 return true;
6237 }
6238
6239 [[nodiscard]] bool GetGraphicCommandBuffer(VkCommandBuffer *&pDrawCommandBuffer, size_t RenderThreadIndex)
6240 {
6241 if(m_ThreadCount < 2)
6242 {
6243 pDrawCommandBuffer = &m_vMainDrawCommandBuffers[m_CurImageIndex];
6244 return true;
6245 }
6246 else
6247 {
6248 auto &DrawCommandBuffer = m_vvThreadDrawCommandBuffers[RenderThreadIndex][m_CurImageIndex];
6249 if(!m_vvUsedThreadDrawCommandBuffer[RenderThreadIndex][m_CurImageIndex])
6250 {
6251 m_vvUsedThreadDrawCommandBuffer[RenderThreadIndex][m_CurImageIndex] = true;
6252
6253 vkResetCommandBuffer(commandBuffer: DrawCommandBuffer, flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
6254
6255 VkCommandBufferBeginInfo BeginInfo{};
6256 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
6257 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
6258
6259 VkCommandBufferInheritanceInfo InheritanceInfo{};
6260 InheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
6261 InheritanceInfo.framebuffer = m_vFramebufferList[m_CurImageIndex];
6262 InheritanceInfo.occlusionQueryEnable = VK_FALSE;
6263 InheritanceInfo.renderPass = m_VKRenderPass;
6264 InheritanceInfo.subpass = 0;
6265
6266 BeginInfo.pInheritanceInfo = &InheritanceInfo;
6267
6268 if(vkBeginCommandBuffer(commandBuffer: DrawCommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
6269 {
6270 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Thread draw command buffer cannot be filled anymore.");
6271 return false;
6272 }
6273 }
6274 pDrawCommandBuffer = &DrawCommandBuffer;
6275 return true;
6276 }
6277 }
6278
6279 VkCommandBuffer &GetMainGraphicCommandBuffer()
6280 {
6281 return m_vMainDrawCommandBuffers[m_CurImageIndex];
6282 }
6283
6284 /************************
6285 * STREAM BUFFERS SETUP
6286 ************************/
6287
6288 typedef std::function<bool(SFrameBuffers &, VkBuffer, VkDeviceSize)> TNewMemFunc;
6289
6290 // returns true, if the stream memory was just allocated
6291 template<typename TStreamMemName, typename TInstanceTypeName, size_t InstanceTypeCount, size_t BufferCreateCount, bool UsesCurrentCountOffset>
6292 [[nodiscard]] bool CreateStreamBuffer(TStreamMemName *&pBufferMem, TNewMemFunc &&NewMemFunc, SStreamMemory<TStreamMemName> &StreamUniformBuffer, VkBufferUsageFlagBits Usage, VkBuffer &NewBuffer, SDeviceMemoryBlock &NewBufferMem, size_t &BufferOffset, const void *pData, size_t DataSize)
6293 {
6294 VkBuffer Buffer = VK_NULL_HANDLE;
6295 SDeviceMemoryBlock BufferMem;
6296 size_t Offset = 0;
6297
6298 uint8_t *pMem = nullptr;
6299
6300 size_t BufferCountOffset = 0;
6301 if(UsesCurrentCountOffset)
6302 BufferCountOffset = StreamUniformBuffer.GetUsedCount(m_CurImageIndex);
6303 for(; BufferCountOffset < StreamUniformBuffer.GetBuffers(m_CurImageIndex).size(); ++BufferCountOffset)
6304 {
6305 auto &BufferOfFrame = StreamUniformBuffer.GetBuffers(m_CurImageIndex)[BufferCountOffset];
6306 if(BufferOfFrame.m_Size >= DataSize + BufferOfFrame.m_UsedSize)
6307 {
6308 if(BufferOfFrame.m_UsedSize == 0)
6309 StreamUniformBuffer.IncreaseUsedCount(m_CurImageIndex);
6310 Buffer = BufferOfFrame.m_Buffer;
6311 BufferMem = BufferOfFrame.m_BufferMem;
6312 Offset = BufferOfFrame.m_UsedSize;
6313 BufferOfFrame.m_UsedSize += DataSize;
6314 pMem = BufferOfFrame.m_pMappedBufferData;
6315 pBufferMem = &BufferOfFrame;
6316 break;
6317 }
6318 }
6319
6320 if(BufferMem.m_Mem == VK_NULL_HANDLE)
6321 {
6322 // create memory
6323 VkBuffer StreamBuffer;
6324 SDeviceMemoryBlock StreamBufferMemory;
6325 const VkDeviceSize NewBufferSingleSize = sizeof(TInstanceTypeName) * InstanceTypeCount;
6326 const VkDeviceSize NewBufferSize = NewBufferSingleSize * BufferCreateCount;
6327 if(!CreateBuffer(BufferSize: NewBufferSize, MemUsage: MEMORY_BLOCK_USAGE_STREAM, BufferUsage: Usage, MemoryProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, VKBuffer&: StreamBuffer, VKBufferMemory&: StreamBufferMemory))
6328 return false;
6329
6330 void *pMappedData = nullptr;
6331 if(vkMapMemory(device: m_VKDevice, memory: StreamBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMappedData) != VK_SUCCESS)
6332 return false;
6333
6334 size_t NewBufferIndex = StreamUniformBuffer.GetBuffers(m_CurImageIndex).size();
6335 for(size_t i = 0; i < BufferCreateCount; ++i)
6336 {
6337 StreamUniformBuffer.GetBuffers(m_CurImageIndex).push_back(TStreamMemName(StreamBuffer, StreamBufferMemory, NewBufferSingleSize * i, NewBufferSingleSize, 0, ((uint8_t *)pMappedData) + (NewBufferSingleSize * i)));
6338 StreamUniformBuffer.GetRanges(m_CurImageIndex).push_back({});
6339 if(!NewMemFunc(StreamUniformBuffer.GetBuffers(m_CurImageIndex).back(), StreamBuffer, NewBufferSingleSize * i))
6340 return false;
6341 }
6342 auto &NewStreamBuffer = StreamUniformBuffer.GetBuffers(m_CurImageIndex)[NewBufferIndex];
6343
6344 Buffer = StreamBuffer;
6345 BufferMem = StreamBufferMemory;
6346
6347 pBufferMem = &NewStreamBuffer;
6348 pMem = NewStreamBuffer.m_pMappedBufferData;
6349 Offset = NewStreamBuffer.m_OffsetInBuffer;
6350 NewStreamBuffer.m_UsedSize += DataSize;
6351
6352 StreamUniformBuffer.IncreaseUsedCount(m_CurImageIndex);
6353 }
6354
6355 // Offset here is the offset in the buffer
6356 if(BufferMem.m_Size - Offset < DataSize)
6357 {
6358 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Stream buffers are limited to CCommandBuffer::MAX_VERTICES. Exceeding it is a bug in the high level code.");
6359 return false;
6360 }
6361
6362 {
6363 mem_copy(dest: pMem + Offset, source: pData, size: DataSize);
6364 }
6365
6366 NewBuffer = Buffer;
6367 NewBufferMem = BufferMem;
6368 BufferOffset = Offset;
6369
6370 return true;
6371 }
6372
6373 [[nodiscard]] bool CreateStreamVertexBuffer(size_t RenderThreadIndex, VkBuffer &NewBuffer, SDeviceMemoryBlock &NewBufferMem, size_t &BufferOffset, const void *pData, size_t DataSize)
6374 {
6375 SFrameBuffers *pStreamBuffer;
6376 return CreateStreamBuffer<SFrameBuffers, GL_SVertexTex3DStream, CCommandBuffer::MAX_VERTICES * 2, 1, false>(
6377 pBufferMem&: pStreamBuffer, NewMemFunc: [](SFrameBuffers &, VkBuffer, VkDeviceSize) { return true; }, StreamUniformBuffer&: m_vStreamedVertexBuffers[RenderThreadIndex], Usage: VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, NewBuffer, NewBufferMem, BufferOffset, pData, DataSize);
6378 }
6379
6380 template<typename TName, size_t InstanceMaxParticleCount, size_t MaxInstances>
6381 [[nodiscard]] bool GetUniformBufferObjectImpl(size_t RenderThreadIndex, bool RequiresSharedStagesDescriptor, SStreamMemory<SFrameUniformBuffers> &StreamUniformBuffer, SDeviceDescriptorSet &DescrSet, const void *pData, size_t DataSize)
6382 {
6383 VkBuffer NewBuffer;
6384 SDeviceMemoryBlock NewBufferMem;
6385 size_t BufferOffset;
6386 SFrameUniformBuffers *pMem;
6387 if(!CreateStreamBuffer<SFrameUniformBuffers, TName, InstanceMaxParticleCount, MaxInstances, true>(
6388 pMem,
6389 [this, RenderThreadIndex](SFrameBuffers &Mem, VkBuffer Buffer, VkDeviceSize MemOffset) {
6390 if(!CreateUniformDescriptorSets(RenderThreadIndex, SetLayout&: m_SpriteMultiUniformDescriptorSetLayout, pSets: ((SFrameUniformBuffers *)(&Mem))->m_aUniformSets.data(), SetCount: 1, BindBuffer: Buffer, SingleBufferInstanceSize: InstanceMaxParticleCount * sizeof(TName), MemoryOffset: MemOffset))
6391 return false;
6392 if(!CreateUniformDescriptorSets(RenderThreadIndex, SetLayout&: m_QuadUniformDescriptorSetLayout, pSets: &((SFrameUniformBuffers *)(&Mem))->m_aUniformSets[1], SetCount: 1, BindBuffer: Buffer, SingleBufferInstanceSize: InstanceMaxParticleCount * sizeof(TName), MemoryOffset: MemOffset))
6393 return false;
6394 return true;
6395 },
6396 StreamUniformBuffer, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, NewBuffer, NewBufferMem, BufferOffset, pData, DataSize))
6397 return false;
6398
6399 DescrSet = pMem->m_aUniformSets[RequiresSharedStagesDescriptor ? 1 : 0];
6400 return true;
6401 }
6402
6403 [[nodiscard]] bool GetUniformBufferObject(size_t RenderThreadIndex, bool RequiresSharedStagesDescriptor, SDeviceDescriptorSet &DescrSet, size_t ParticleCount, const void *pData, size_t DataSize)
6404 {
6405 return GetUniformBufferObjectImpl<IGraphics::SRenderSpriteInfo, 512, 128>(RenderThreadIndex, RequiresSharedStagesDescriptor, StreamUniformBuffer&: m_vStreamedUniformBuffers[RenderThreadIndex], DescrSet, pData, DataSize);
6406 }
6407
6408 [[nodiscard]] bool CreateIndexBuffer(void *pData, size_t DataSize, VkBuffer &Buffer, SDeviceMemoryBlock &Memory)
6409 {
6410 VkDeviceSize BufferDataSize = DataSize;
6411
6412 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
6413 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: DataSize))
6414 return false;
6415
6416 SDeviceMemoryBlock VertexBufferMemory;
6417 VkBuffer VertexBuffer;
6418 if(!CreateBuffer(BufferSize: BufferDataSize, MemUsage: MEMORY_BLOCK_USAGE_BUFFER, BufferUsage: VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT, MemoryProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VKBuffer&: VertexBuffer, VKBufferMemory&: VertexBufferMemory))
6419 return false;
6420
6421 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: 0, Size: BufferDataSize, BufferAccessType: VK_ACCESS_INDEX_READ_BIT, BeforeCommand: true))
6422 return false;
6423 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: 0, CopySize: BufferDataSize))
6424 return false;
6425 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: 0, Size: BufferDataSize, BufferAccessType: VK_ACCESS_INDEX_READ_BIT, BeforeCommand: false))
6426 return false;
6427
6428 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
6429
6430 Buffer = VertexBuffer;
6431 Memory = VertexBufferMemory;
6432 return true;
6433 }
6434
6435 void DestroyIndexBuffer(VkBuffer &Buffer, SDeviceMemoryBlock &Memory)
6436 {
6437 CleanBufferPair(ImageIndex: 0, Buffer, BufferMem&: Memory);
6438 }
6439
6440 /************************
6441 * COMMAND IMPLEMENTATION
6442 ************************/
6443 template<typename TName>
6444 [[nodiscard]] static bool IsInCommandRange(TName CMD, TName Min, TName Max)
6445 {
6446 return CMD >= Min && CMD < Max;
6447 }
6448
6449 [[nodiscard]] ERunCommandReturnTypes RunCommand(const CCommandBuffer::SCommand *pBaseCommand) override
6450 {
6451 if(m_HasError)
6452 {
6453 // ignore all further commands
6454 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_ERROR;
6455 }
6456
6457 if(IsInCommandRange<decltype(pBaseCommand->m_Cmd)>(CMD: pBaseCommand->m_Cmd, Min: CCommandBuffer::CMD_FIRST, Max: CCommandBuffer::CMD_COUNT))
6458 {
6459 auto &CallbackObj = m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::ECommandBufferCMD(pBaseCommand->m_Cmd))];
6460 SRenderCommandExecuteBuffer Buffer;
6461 Buffer.m_Command = (CCommandBuffer::ECommandBufferCMD)pBaseCommand->m_Cmd;
6462 Buffer.m_pRawCommand = pBaseCommand;
6463 Buffer.m_ThreadIndex = 0;
6464
6465 if(m_CurCommandInPipe + 1 == m_CommandsInPipe)
6466 {
6467 m_LastCommandsInPipeThreadIndex = std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6468 }
6469
6470 bool CanStartThread = false;
6471 if(CallbackObj.m_IsRenderCommand)
6472 {
6473 bool ForceSingleThread = m_LastCommandsInPipeThreadIndex == std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6474
6475 size_t PotentiallyNextThread = (((m_CurCommandInPipe * (m_ThreadCount - 1)) / m_CommandsInPipe) + 1);
6476 if(PotentiallyNextThread - 1 > m_LastCommandsInPipeThreadIndex)
6477 {
6478 CanStartThread = true;
6479 m_LastCommandsInPipeThreadIndex = PotentiallyNextThread - 1;
6480 }
6481 Buffer.m_ThreadIndex = m_ThreadCount > 1 && !ForceSingleThread ? (m_LastCommandsInPipeThreadIndex + 1) : 0;
6482 CallbackObj.m_FillExecuteBuffer(Buffer, pBaseCommand);
6483 m_CurRenderCallCountInPipe += Buffer.m_EstimatedRenderCallCount;
6484 }
6485 bool Ret = true;
6486 if(!CallbackObj.m_IsRenderCommand || (Buffer.m_ThreadIndex == 0 && !m_RenderingPaused))
6487 {
6488 Ret = CallbackObj.m_CMDIsHandled;
6489 if(!CallbackObj.m_CommandCB(pBaseCommand, Buffer))
6490 {
6491 // an error occurred, stop this command and ignore all further commands
6492 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_ERROR;
6493 }
6494 }
6495 else if(!m_RenderingPaused)
6496 {
6497 if(CanStartThread)
6498 {
6499 StartRenderThread(ThreadIndex: m_LastCommandsInPipeThreadIndex - 1);
6500 }
6501 m_vvThreadCommandLists[Buffer.m_ThreadIndex - 1].push_back(x: Buffer);
6502 }
6503
6504 ++m_CurCommandInPipe;
6505 return Ret ? ERunCommandReturnTypes::RUN_COMMAND_COMMAND_HANDLED : ERunCommandReturnTypes::RUN_COMMAND_COMMAND_UNHANDLED;
6506 }
6507
6508 if(m_CurCommandInPipe + 1 == m_CommandsInPipe)
6509 {
6510 m_LastCommandsInPipeThreadIndex = std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6511 }
6512 ++m_CurCommandInPipe;
6513
6514 switch(pBaseCommand->m_Cmd)
6515 {
6516 case CCommandProcessorFragment_GLBase::CMD_INIT:
6517 if(!Cmd_Init(pCommand: static_cast<const SCommand_Init *>(pBaseCommand)))
6518 {
6519 SetWarningPreMsg("Could not initialize Vulkan: ");
6520 return RUN_COMMAND_COMMAND_WARNING;
6521 }
6522 break;
6523 case CCommandProcessorFragment_GLBase::CMD_SHUTDOWN:
6524 if(!Cmd_Shutdown(pCommand: static_cast<const SCommand_Shutdown *>(pBaseCommand)))
6525 {
6526 SetWarningPreMsg("Could not shutdown Vulkan: ");
6527 return RUN_COMMAND_COMMAND_WARNING;
6528 }
6529 break;
6530
6531 case CCommandProcessorFragment_GLBase::CMD_PRE_INIT:
6532 if(!Cmd_PreInit(pCommand: static_cast<const CCommandProcessorFragment_GLBase::SCommand_PreInit *>(pBaseCommand)))
6533 {
6534 SetWarningPreMsg("Could not initialize Vulkan: ");
6535 return RUN_COMMAND_COMMAND_WARNING;
6536 }
6537 break;
6538 case CCommandProcessorFragment_GLBase::CMD_POST_SHUTDOWN:
6539 if(!Cmd_PostShutdown(pCommand: static_cast<const CCommandProcessorFragment_GLBase::SCommand_PostShutdown *>(pBaseCommand)))
6540 {
6541 SetWarningPreMsg("Could not shutdown Vulkan: ");
6542 return RUN_COMMAND_COMMAND_WARNING;
6543 }
6544 break;
6545 default:
6546 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_UNHANDLED;
6547 }
6548
6549 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_HANDLED;
6550 }
6551
6552 [[nodiscard]] bool Cmd_Init(const SCommand_Init *pCommand)
6553 {
6554 pCommand->m_pCapabilities->m_TileBuffering = true;
6555 pCommand->m_pCapabilities->m_QuadBuffering = true;
6556 pCommand->m_pCapabilities->m_TextBuffering = true;
6557 pCommand->m_pCapabilities->m_QuadContainerBuffering = true;
6558 pCommand->m_pCapabilities->m_ShaderSupport = true;
6559
6560 pCommand->m_pCapabilities->m_MipMapping = true;
6561 pCommand->m_pCapabilities->m_3DTextures = false;
6562 pCommand->m_pCapabilities->m_2DArrayTextures = true;
6563 pCommand->m_pCapabilities->m_NPOTTextures = true;
6564
6565 pCommand->m_pCapabilities->m_ContextMajor = 1;
6566 pCommand->m_pCapabilities->m_ContextMinor = 1;
6567 pCommand->m_pCapabilities->m_ContextPatch = 0;
6568
6569 pCommand->m_pCapabilities->m_TrianglesAsQuads = true;
6570
6571 m_GlobalTextureLodBIAS = g_Config.m_GfxGLTextureLODBIAS;
6572 m_pTextureMemoryUsage = pCommand->m_pTextureMemoryUsage;
6573 m_pBufferMemoryUsage = pCommand->m_pBufferMemoryUsage;
6574 m_pStreamMemoryUsage = pCommand->m_pStreamMemoryUsage;
6575 m_pStagingMemoryUsage = pCommand->m_pStagingMemoryUsage;
6576
6577 m_MultiSamplingCount = (g_Config.m_GfxFsaaSamples & 0xFFFFFFFE); // ignore the uneven bit, only even multi sampling works
6578
6579 *pCommand->m_pReadPresentedImageDataFunc = [this](uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData) {
6580 return GetPresentedImageData(Width, Height, Format, vDstData);
6581 };
6582
6583 m_pWindow = pCommand->m_pWindow;
6584
6585 *pCommand->m_pInitError = m_VKInstance != VK_NULL_HANDLE ? 0 : -1;
6586
6587 if(m_VKInstance == VK_NULL_HANDLE)
6588 {
6589 *pCommand->m_pInitError = -2;
6590 return false;
6591 }
6592
6593 m_pStorage = pCommand->m_pStorage;
6594 if(InitVulkan<true>() != 0)
6595 {
6596 *pCommand->m_pInitError = -2;
6597 return false;
6598 }
6599
6600 std::array<uint32_t, (size_t)CCommandBuffer::MAX_VERTICES / 4 * 6> aIndices;
6601 int Primq = 0;
6602 for(int i = 0; i < CCommandBuffer::MAX_VERTICES / 4 * 6; i += 6)
6603 {
6604 aIndices[i] = Primq;
6605 aIndices[i + 1] = Primq + 1;
6606 aIndices[i + 2] = Primq + 2;
6607 aIndices[i + 3] = Primq;
6608 aIndices[i + 4] = Primq + 2;
6609 aIndices[i + 5] = Primq + 3;
6610 Primq += 4;
6611 }
6612
6613 if(!PrepareFrame())
6614 return false;
6615 if(m_HasError)
6616 {
6617 *pCommand->m_pInitError = -2;
6618 return false;
6619 }
6620
6621 if(!CreateIndexBuffer(pData: aIndices.data(), DataSize: sizeof(uint32_t) * aIndices.size(), Buffer&: m_IndexBuffer, Memory&: m_IndexBufferMemory))
6622 {
6623 *pCommand->m_pInitError = -2;
6624 return false;
6625 }
6626 if(!CreateIndexBuffer(pData: aIndices.data(), DataSize: sizeof(uint32_t) * aIndices.size(), Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory))
6627 {
6628 *pCommand->m_pInitError = -2;
6629 return false;
6630 }
6631 m_CurRenderIndexPrimitiveCount = CCommandBuffer::MAX_VERTICES / 4;
6632
6633 m_CanAssert = true;
6634
6635 return true;
6636 }
6637
6638 [[nodiscard]] bool Cmd_Shutdown(const SCommand_Shutdown *pCommand)
6639 {
6640 vkDeviceWaitIdle(device: m_VKDevice);
6641
6642 DestroyIndexBuffer(Buffer&: m_IndexBuffer, Memory&: m_IndexBufferMemory);
6643 DestroyIndexBuffer(Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory);
6644
6645 CleanupVulkan<true>(SwapchainCount: m_SwapChainImageCount);
6646
6647 return true;
6648 }
6649
6650 [[nodiscard]] bool Cmd_Texture_Destroy(const CCommandBuffer::SCommand_Texture_Destroy *pCommand)
6651 {
6652 size_t ImageIndex = (size_t)pCommand->m_Slot;
6653 auto &Texture = m_vTextures[ImageIndex];
6654
6655 m_vvFrameDelayedTextureCleanup[m_CurImageIndex].push_back(x: Texture);
6656
6657 Texture = CTexture{};
6658
6659 return true;
6660 }
6661
6662 [[nodiscard]] bool Cmd_Texture_Create(const CCommandBuffer::SCommand_Texture_Create *pCommand)
6663 {
6664 int Slot = pCommand->m_Slot;
6665 int Width = pCommand->m_Width;
6666 int Height = pCommand->m_Height;
6667 int Flags = pCommand->m_Flags;
6668 uint8_t *pData = pCommand->m_pData;
6669
6670 if(!CreateTextureCMD(Slot, Width, Height, Format: VK_FORMAT_R8G8B8A8_UNORM, StoreFormat: VK_FORMAT_R8G8B8A8_UNORM, Flags, pData))
6671 return false;
6672
6673 free(ptr: pData);
6674
6675 return true;
6676 }
6677
6678 [[nodiscard]] bool Cmd_TextTextures_Create(const CCommandBuffer::SCommand_TextTextures_Create *pCommand)
6679 {
6680 int Slot = pCommand->m_Slot;
6681 int SlotOutline = pCommand->m_SlotOutline;
6682 int Width = pCommand->m_Width;
6683 int Height = pCommand->m_Height;
6684
6685 uint8_t *pTmpData = pCommand->m_pTextData;
6686 uint8_t *pTmpData2 = pCommand->m_pTextOutlineData;
6687
6688 if(!CreateTextureCMD(Slot, Width, Height, Format: VK_FORMAT_R8_UNORM, StoreFormat: VK_FORMAT_R8_UNORM, Flags: TextureFlag::NO_MIPMAPS, pData&: pTmpData))
6689 return false;
6690 if(!CreateTextureCMD(Slot: SlotOutline, Width, Height, Format: VK_FORMAT_R8_UNORM, StoreFormat: VK_FORMAT_R8_UNORM, Flags: TextureFlag::NO_MIPMAPS, pData&: pTmpData2))
6691 return false;
6692
6693 if(!CreateNewTextDescriptorSets(Texture: Slot, TextureOutline: SlotOutline))
6694 return false;
6695
6696 free(ptr: pTmpData);
6697 free(ptr: pTmpData2);
6698
6699 return true;
6700 }
6701
6702 [[nodiscard]] bool Cmd_TextTextures_Destroy(const CCommandBuffer::SCommand_TextTextures_Destroy *pCommand)
6703 {
6704 size_t ImageIndex = (size_t)pCommand->m_Slot;
6705 size_t ImageIndexOutline = (size_t)pCommand->m_SlotOutline;
6706 auto &Texture = m_vTextures[ImageIndex];
6707 auto &TextureOutline = m_vTextures[ImageIndexOutline];
6708
6709 m_vvFrameDelayedTextTexturesCleanup[m_CurImageIndex].emplace_back(args&: Texture, args&: TextureOutline);
6710
6711 Texture = {};
6712 TextureOutline = {};
6713
6714 return true;
6715 }
6716
6717 [[nodiscard]] bool Cmd_TextTexture_Update(const CCommandBuffer::SCommand_TextTexture_Update *pCommand)
6718 {
6719 size_t IndexTex = pCommand->m_Slot;
6720 uint8_t *pData = pCommand->m_pData;
6721
6722 if(!UpdateTexture(TextureSlot: IndexTex, Format: VK_FORMAT_R8_UNORM, pData, XOff: pCommand->m_X, YOff: pCommand->m_Y, Width: pCommand->m_Width, Height: pCommand->m_Height))
6723 return false;
6724
6725 free(ptr: pData);
6726
6727 return true;
6728 }
6729
6730 void Cmd_Clear_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Clear *pCommand)
6731 {
6732 if(!pCommand->m_ForceClear)
6733 {
6734 bool ColorChanged = m_aClearColor[0] != pCommand->m_Color.r || m_aClearColor[1] != pCommand->m_Color.g ||
6735 m_aClearColor[2] != pCommand->m_Color.b || m_aClearColor[3] != pCommand->m_Color.a;
6736 m_aClearColor[0] = pCommand->m_Color.r;
6737 m_aClearColor[1] = pCommand->m_Color.g;
6738 m_aClearColor[2] = pCommand->m_Color.b;
6739 m_aClearColor[3] = pCommand->m_Color.a;
6740 if(ColorChanged)
6741 ExecBuffer.m_ClearColorInRenderThread = true;
6742 }
6743 else
6744 {
6745 ExecBuffer.m_ClearColorInRenderThread = true;
6746 }
6747 ExecBuffer.m_EstimatedRenderCallCount = 0;
6748 }
6749
6750 [[nodiscard]] bool Cmd_Clear(const SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Clear *pCommand)
6751 {
6752 if(ExecBuffer.m_ClearColorInRenderThread)
6753 {
6754 std::array<VkClearAttachment, 1> aAttachments = {VkClearAttachment{.aspectMask: VK_IMAGE_ASPECT_COLOR_BIT, .colorAttachment: 0, .clearValue: VkClearValue{.color: VkClearColorValue{.float32: {pCommand->m_Color.r, pCommand->m_Color.g, pCommand->m_Color.b, pCommand->m_Color.a}}}}};
6755 std::array<VkClearRect, 1> aClearRects = {VkClearRect{.rect: {.offset: {.x: 0, .y: 0}, .extent: m_VKSwapImgAndViewportExtent.m_SwapImageViewport}, .baseArrayLayer: 0, .layerCount: 1}};
6756
6757 VkCommandBuffer *pCommandBuffer;
6758 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
6759 return false;
6760 auto &CommandBuffer = *pCommandBuffer;
6761 vkCmdClearAttachments(commandBuffer: CommandBuffer, attachmentCount: aAttachments.size(), pAttachments: aAttachments.data(), rectCount: aClearRects.size(), pRects: aClearRects.data());
6762 }
6763
6764 return true;
6765 }
6766
6767 void Cmd_Render_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Render *pCommand)
6768 {
6769 bool IsTextured = GetIsTextured(State: pCommand->m_State);
6770 if(IsTextured)
6771 {
6772 size_t AddressModeIndex = GetAddressModeIndex(State: pCommand->m_State);
6773 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
6774 }
6775
6776 ExecBuffer.m_IndexBuffer = m_IndexBuffer;
6777
6778 ExecBuffer.m_EstimatedRenderCallCount = 1;
6779
6780 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
6781 }
6782
6783 [[nodiscard]] bool Cmd_Render(const CCommandBuffer::SCommand_Render *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
6784 {
6785 return RenderStandard<CCommandBuffer::SVertex, false>(ExecBuffer, State: pCommand->m_State, PrimType: pCommand->m_PrimType, pVertices: pCommand->m_pVertices, PrimitiveCount: pCommand->m_PrimCount);
6786 }
6787
6788 [[nodiscard]] bool Cmd_ReadPixel(const CCommandBuffer::SCommand_TrySwapAndReadPixel *pCommand)
6789 {
6790 if(!*pCommand->m_pSwapped && !NextFrame())
6791 return false;
6792 *pCommand->m_pSwapped = true;
6793
6794 uint32_t Width;
6795 uint32_t Height;
6796 CImageInfo::EImageFormat Format;
6797 if(GetPresentedImageDataImpl(Width, Height, Format, vDstData&: m_vReadPixelHelper, ResetAlpha: false, PixelOffset: pCommand->m_Position))
6798 {
6799 *pCommand->m_pColor = ColorRGBA(m_vReadPixelHelper[0] / 255.0f, m_vReadPixelHelper[1] / 255.0f, m_vReadPixelHelper[2] / 255.0f, 1.0f);
6800 }
6801 else
6802 {
6803 *pCommand->m_pColor = ColorRGBA(1.0f, 1.0f, 1.0f, 1.0f);
6804 }
6805
6806 return true;
6807 }
6808
6809 [[nodiscard]] bool Cmd_Screenshot(const CCommandBuffer::SCommand_TrySwapAndScreenshot *pCommand)
6810 {
6811 if(!*pCommand->m_pSwapped && !NextFrame())
6812 return false;
6813 *pCommand->m_pSwapped = true;
6814
6815 uint32_t Width;
6816 uint32_t Height;
6817 CImageInfo::EImageFormat Format;
6818 if(GetPresentedImageDataImpl(Width, Height, Format, vDstData&: m_vScreenshotHelper, ResetAlpha: true, PixelOffset: {}))
6819 {
6820 const size_t ImgSize = (size_t)Width * (size_t)Height * CImageInfo::PixelSize(Format);
6821 pCommand->m_pImage->m_pData = static_cast<uint8_t *>(malloc(size: ImgSize));
6822 mem_copy(dest: pCommand->m_pImage->m_pData, source: m_vScreenshotHelper.data(), size: ImgSize);
6823 }
6824 else
6825 {
6826 pCommand->m_pImage->m_pData = nullptr;
6827 }
6828 pCommand->m_pImage->m_Width = (int)Width;
6829 pCommand->m_pImage->m_Height = (int)Height;
6830 pCommand->m_pImage->m_Format = Format;
6831
6832 return true;
6833 }
6834
6835 void Cmd_RenderTex3D_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderTex3D *pCommand)
6836 {
6837 bool IsTextured = GetIsTextured(State: pCommand->m_State);
6838 if(IsTextured)
6839 {
6840 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_VKStandard3DTexturedDescrSet;
6841 }
6842
6843 ExecBuffer.m_IndexBuffer = m_IndexBuffer;
6844
6845 ExecBuffer.m_EstimatedRenderCallCount = 1;
6846
6847 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
6848 }
6849
6850 [[nodiscard]] bool Cmd_RenderTex3D(const CCommandBuffer::SCommand_RenderTex3D *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
6851 {
6852 return RenderStandard<CCommandBuffer::SVertexTex3DStream, true>(ExecBuffer, State: pCommand->m_State, PrimType: pCommand->m_PrimType, pVertices: pCommand->m_pVertices, PrimitiveCount: pCommand->m_PrimCount);
6853 }
6854
6855 void Cmd_Update_Viewport_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Update_Viewport *pCommand)
6856 {
6857 ExecBuffer.m_EstimatedRenderCallCount = 0;
6858 }
6859
6860 [[nodiscard]] bool Cmd_Update_Viewport(const CCommandBuffer::SCommand_Update_Viewport *pCommand)
6861 {
6862 if(pCommand->m_ByResize)
6863 {
6864 if(IsVerbose())
6865 {
6866 dbg_msg(sys: "vulkan", fmt: "got resize event.");
6867 }
6868 m_CanvasWidth = (uint32_t)pCommand->m_Width;
6869 m_CanvasHeight = (uint32_t)pCommand->m_Height;
6870#ifndef CONF_PLATFORM_MACOS
6871 m_RecreateSwapChain = true;
6872#endif
6873 }
6874 else
6875 {
6876 auto Viewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
6877 if(pCommand->m_X != 0 || pCommand->m_Y != 0 || (uint32_t)pCommand->m_Width != Viewport.width || (uint32_t)pCommand->m_Height != Viewport.height)
6878 {
6879 m_HasDynamicViewport = true;
6880
6881 // convert viewport from OGL to vulkan
6882 int32_t ViewportY = (int32_t)Viewport.height - ((int32_t)pCommand->m_Y + (int32_t)pCommand->m_Height);
6883 uint32_t ViewportH = (int32_t)pCommand->m_Height;
6884 m_DynamicViewportOffset = {.x: (int32_t)pCommand->m_X, .y: ViewportY};
6885 m_DynamicViewportSize = {.width: (uint32_t)pCommand->m_Width, .height: ViewportH};
6886 }
6887 else
6888 {
6889 m_HasDynamicViewport = false;
6890 }
6891 }
6892
6893 return true;
6894 }
6895
6896 [[nodiscard]] bool Cmd_VSync(const CCommandBuffer::SCommand_VSync *pCommand)
6897 {
6898 if(IsVerbose())
6899 {
6900 dbg_msg(sys: "vulkan", fmt: "queueing swap chain recreation because vsync was changed");
6901 }
6902 m_RecreateSwapChain = true;
6903 *pCommand->m_pRetOk = true;
6904
6905 return true;
6906 }
6907
6908 [[nodiscard]] bool Cmd_MultiSampling(const CCommandBuffer::SCommand_MultiSampling *pCommand)
6909 {
6910 if(IsVerbose())
6911 {
6912 dbg_msg(sys: "vulkan", fmt: "queueing swap chain recreation because multi sampling was changed");
6913 }
6914 m_RecreateSwapChain = true;
6915
6916 uint32_t MSCount = (std::min(a: pCommand->m_RequestedMultiSamplingCount, b: (uint32_t)GetMaxSampleCount()) & 0xFFFFFFFE); // ignore the uneven bits
6917 m_NextMultiSamplingCount = MSCount;
6918
6919 *pCommand->m_pRetMultiSamplingCount = MSCount;
6920 *pCommand->m_pRetOk = true;
6921
6922 return true;
6923 }
6924
6925 [[nodiscard]] bool Cmd_Swap(const CCommandBuffer::SCommand_Swap *pCommand)
6926 {
6927 return NextFrame();
6928 }
6929
6930 [[nodiscard]] bool Cmd_CreateBufferObject(const CCommandBuffer::SCommand_CreateBufferObject *pCommand)
6931 {
6932 bool IsOneFrameBuffer = (pCommand->m_Flags & IGraphics::EBufferObjectCreateFlags::BUFFER_OBJECT_CREATE_FLAGS_ONE_TIME_USE_BIT) != 0;
6933 if(!CreateBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex, pUploadData: pCommand->m_pUploadData, BufferDataSize: (VkDeviceSize)pCommand->m_DataSize, IsOneFrameBuffer))
6934 return false;
6935 if(pCommand->m_DeletePointer)
6936 free(ptr: pCommand->m_pUploadData);
6937
6938 return true;
6939 }
6940
6941 [[nodiscard]] bool Cmd_UpdateBufferObject(const CCommandBuffer::SCommand_UpdateBufferObject *pCommand)
6942 {
6943 size_t BufferIndex = (size_t)pCommand->m_BufferIndex;
6944 bool DeletePointer = pCommand->m_DeletePointer;
6945 VkDeviceSize Offset = (VkDeviceSize)((intptr_t)pCommand->m_pOffset);
6946 void *pUploadData = pCommand->m_pUploadData;
6947 VkDeviceSize DataSize = (VkDeviceSize)pCommand->m_DataSize;
6948
6949 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
6950 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pUploadData, RequiredSize: DataSize))
6951 return false;
6952
6953 const auto &MemBlock = m_vBufferObjects[BufferIndex].m_BufferObject.m_Mem;
6954 VkBuffer VertexBuffer = MemBlock.m_Buffer;
6955 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6956 return false;
6957 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, CopySize: DataSize))
6958 return false;
6959 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6960 return false;
6961
6962 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
6963
6964 if(DeletePointer)
6965 free(ptr: pUploadData);
6966
6967 return true;
6968 }
6969
6970 [[nodiscard]] bool Cmd_RecreateBufferObject(const CCommandBuffer::SCommand_RecreateBufferObject *pCommand)
6971 {
6972 DeleteBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex);
6973 bool IsOneFrameBuffer = (pCommand->m_Flags & IGraphics::EBufferObjectCreateFlags::BUFFER_OBJECT_CREATE_FLAGS_ONE_TIME_USE_BIT) != 0;
6974 return CreateBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex, pUploadData: pCommand->m_pUploadData, BufferDataSize: (VkDeviceSize)pCommand->m_DataSize, IsOneFrameBuffer);
6975 }
6976
6977 [[nodiscard]] bool Cmd_CopyBufferObject(const CCommandBuffer::SCommand_CopyBufferObject *pCommand)
6978 {
6979 size_t ReadBufferIndex = (size_t)pCommand->m_ReadBufferIndex;
6980 size_t WriteBufferIndex = (size_t)pCommand->m_WriteBufferIndex;
6981 auto &ReadMemBlock = m_vBufferObjects[ReadBufferIndex].m_BufferObject.m_Mem;
6982 auto &WriteMemBlock = m_vBufferObjects[WriteBufferIndex].m_BufferObject.m_Mem;
6983 VkBuffer ReadBuffer = ReadMemBlock.m_Buffer;
6984 VkBuffer WriteBuffer = WriteMemBlock.m_Buffer;
6985
6986 VkDeviceSize DataSize = (VkDeviceSize)pCommand->m_CopySize;
6987 VkDeviceSize ReadOffset = (VkDeviceSize)pCommand->m_ReadOffset + ReadMemBlock.m_HeapData.m_OffsetToAlign;
6988 VkDeviceSize WriteOffset = (VkDeviceSize)pCommand->m_WriteOffset + WriteMemBlock.m_HeapData.m_OffsetToAlign;
6989
6990 if(!MemoryBarrier(Buffer: ReadBuffer, Offset: ReadOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6991 return false;
6992 if(!MemoryBarrier(Buffer: WriteBuffer, Offset: WriteOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6993 return false;
6994 if(!CopyBuffer(SrcBuffer: ReadBuffer, DstBuffer: WriteBuffer, SrcOffset: ReadOffset, DstOffset: WriteOffset, CopySize: DataSize))
6995 return false;
6996 if(!MemoryBarrier(Buffer: WriteBuffer, Offset: WriteOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6997 return false;
6998 if(!MemoryBarrier(Buffer: ReadBuffer, Offset: ReadOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6999 return false;
7000
7001 return true;
7002 }
7003
7004 [[nodiscard]] bool Cmd_DeleteBufferObject(const CCommandBuffer::SCommand_DeleteBufferObject *pCommand)
7005 {
7006 size_t BufferIndex = (size_t)pCommand->m_BufferIndex;
7007 DeleteBufferObject(BufferIndex);
7008
7009 return true;
7010 }
7011
7012 [[nodiscard]] bool Cmd_CreateBufferContainer(const CCommandBuffer::SCommand_CreateBufferContainer *pCommand)
7013 {
7014 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7015 while(ContainerIndex >= m_vBufferContainers.size())
7016 m_vBufferContainers.resize(new_size: (m_vBufferContainers.size() * 2) + 1);
7017
7018 m_vBufferContainers[ContainerIndex].m_BufferObjectIndex = pCommand->m_VertBufferBindingIndex;
7019
7020 return true;
7021 }
7022
7023 [[nodiscard]] bool Cmd_UpdateBufferContainer(const CCommandBuffer::SCommand_UpdateBufferContainer *pCommand)
7024 {
7025 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7026 m_vBufferContainers[ContainerIndex].m_BufferObjectIndex = pCommand->m_VertBufferBindingIndex;
7027
7028 return true;
7029 }
7030
7031 [[nodiscard]] bool Cmd_DeleteBufferContainer(const CCommandBuffer::SCommand_DeleteBufferContainer *pCommand)
7032 {
7033 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7034 bool DeleteAllBO = pCommand->m_DestroyAllBO;
7035 if(DeleteAllBO)
7036 {
7037 size_t BufferIndex = (size_t)m_vBufferContainers[ContainerIndex].m_BufferObjectIndex;
7038 DeleteBufferObject(BufferIndex);
7039 }
7040
7041 return true;
7042 }
7043
7044 [[nodiscard]] bool Cmd_IndicesRequiredNumNotify(const CCommandBuffer::SCommand_IndicesRequiredNumNotify *pCommand)
7045 {
7046 size_t IndicesCount = pCommand->m_RequiredIndicesNum;
7047 if(m_CurRenderIndexPrimitiveCount < IndicesCount / 6)
7048 {
7049 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: m_RenderIndexBuffer, .m_Mem: m_RenderIndexBufferMemory});
7050 std::vector<uint32_t> vIndices(IndicesCount);
7051 uint32_t Primq = 0;
7052 for(size_t i = 0; i < IndicesCount; i += 6)
7053 {
7054 vIndices[i] = Primq;
7055 vIndices[i + 1] = Primq + 1;
7056 vIndices[i + 2] = Primq + 2;
7057 vIndices[i + 3] = Primq;
7058 vIndices[i + 4] = Primq + 2;
7059 vIndices[i + 5] = Primq + 3;
7060 Primq += 4;
7061 }
7062 if(!CreateIndexBuffer(pData: vIndices.data(), DataSize: vIndices.size() * sizeof(uint32_t), Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory))
7063 return false;
7064 m_CurRenderIndexPrimitiveCount = IndicesCount / 6;
7065 }
7066
7067 return true;
7068 }
7069
7070 void Cmd_RenderTileLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderTileLayer *pCommand)
7071 {
7072 RenderTileLayer_FillExecuteBuffer(ExecBuffer, DrawCalls: pCommand->m_IndicesDrawNum, State: pCommand->m_State, BufferContainerIndex: pCommand->m_BufferContainerIndex);
7073 }
7074
7075 [[nodiscard]] bool Cmd_RenderTileLayer(const CCommandBuffer::SCommand_RenderTileLayer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7076 {
7077 vec2 Scale{};
7078 vec2 Off{};
7079 return RenderTileLayer(ExecBuffer, State: pCommand->m_State, IsBorder: false, Color: pCommand->m_Color, Scale, Off, IndicesDrawNum: (size_t)pCommand->m_IndicesDrawNum, pIndicesOffsets: pCommand->m_pIndicesOffsets, pDrawCount: pCommand->m_pDrawCount);
7080 }
7081
7082 void Cmd_RenderBorderTile_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderBorderTile *pCommand)
7083 {
7084 RenderTileLayer_FillExecuteBuffer(ExecBuffer, DrawCalls: 1, State: pCommand->m_State, BufferContainerIndex: pCommand->m_BufferContainerIndex);
7085 }
7086
7087 [[nodiscard]] bool Cmd_RenderBorderTile(const CCommandBuffer::SCommand_RenderBorderTile *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7088 {
7089 vec2 Scale = pCommand->m_Scale;
7090 vec2 Off = pCommand->m_Offset;
7091 unsigned int DrawNum = pCommand->m_DrawNum * 6;
7092 return RenderTileLayer(ExecBuffer, State: pCommand->m_State, IsBorder: true, Color: pCommand->m_Color, Scale, Off, IndicesDrawNum: 1, pIndicesOffsets: &pCommand->m_pIndicesOffset, pDrawCount: &DrawNum);
7093 }
7094
7095 void Cmd_RenderQuadLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadLayer *pCommand)
7096 {
7097 size_t BufferContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7098 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7099 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7100
7101 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7102 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7103
7104 bool IsTextured = GetIsTextured(State: pCommand->m_State);
7105 if(IsTextured)
7106 {
7107 size_t AddressModeIndex = GetAddressModeIndex(State: pCommand->m_State);
7108 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
7109 }
7110
7111 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7112
7113 ExecBuffer.m_EstimatedRenderCallCount = ((pCommand->m_QuadNum - 1) / gs_GraphicsMaxQuadsRenderCount) + 1;
7114
7115 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
7116 }
7117
7118 [[nodiscard]] bool Cmd_RenderQuadLayer(const CCommandBuffer::SCommand_RenderQuadLayer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer, bool Grouped)
7119 {
7120 std::array<float, (size_t)4 * 2> m;
7121 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7122
7123 bool CanBeGrouped = Grouped || pCommand->m_QuadNum == 1;
7124
7125 bool IsTextured;
7126 size_t BlendModeIndex;
7127 size_t DynamicIndex;
7128 size_t AddressModeIndex;
7129 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7130 auto &PipeLayout = GetPipeLayout(Container&: CanBeGrouped ? m_QuadGroupedPipeline : m_QuadPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7131 auto &PipeLine = GetPipeline(Container&: CanBeGrouped ? m_QuadGroupedPipeline : m_QuadPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7132
7133 VkCommandBuffer *pCommandBuffer;
7134 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7135 return false;
7136 auto &CommandBuffer = *pCommandBuffer;
7137
7138 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7139
7140 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7141 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7142 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7143
7144 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
7145
7146 if(IsTextured)
7147 {
7148 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7149 }
7150
7151 uint32_t DrawCount = (uint32_t)pCommand->m_QuadNum;
7152
7153 if(CanBeGrouped)
7154 {
7155 SUniformQuadGroupedGPos PushConstantVertex;
7156 mem_copy(dest: &PushConstantVertex.m_BOPush, source: &pCommand->m_pQuadInfo[0], size: sizeof(PushConstantVertex.m_BOPush));
7157
7158 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7159 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, offset: 0, size: sizeof(SUniformQuadGroupedGPos), pValues: &PushConstantVertex);
7160
7161 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)(pCommand->m_QuadOffset) * 6);
7162 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(DrawCount * 6), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
7163 }
7164 else
7165 {
7166 SUniformQuadGPos PushConstantVertex;
7167 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7168 PushConstantVertex.m_QuadOffset = pCommand->m_QuadOffset;
7169
7170 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(PushConstantVertex), pValues: &PushConstantVertex);
7171
7172 size_t RenderOffset = 0;
7173 while(DrawCount > 0)
7174 {
7175 uint32_t RealDrawCount = (DrawCount > gs_GraphicsMaxQuadsRenderCount ? gs_GraphicsMaxQuadsRenderCount : DrawCount);
7176 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)(pCommand->m_QuadOffset + RenderOffset) * 6);
7177
7178 // create uniform buffer
7179 SDeviceDescriptorSet UniDescrSet;
7180 if(!GetUniformBufferObject(RenderThreadIndex: ExecBuffer.m_ThreadIndex, RequiresSharedStagesDescriptor: true, DescrSet&: UniDescrSet, ParticleCount: RealDrawCount, pData: (const float *)(pCommand->m_pQuadInfo + RenderOffset), DataSize: RealDrawCount * sizeof(SQuadRenderInfo)))
7181 return false;
7182
7183 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: IsTextured ? 1 : 0, descriptorSetCount: 1, pDescriptorSets: &UniDescrSet.m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7184 if(RenderOffset > 0)
7185 {
7186 int32_t QuadOffset = pCommand->m_QuadOffset + RenderOffset;
7187 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: sizeof(SUniformQuadGPos) - sizeof(int32_t), size: sizeof(int32_t), pValues: &QuadOffset);
7188 }
7189
7190 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(RealDrawCount * 6), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
7191 RenderOffset += RealDrawCount;
7192 DrawCount -= RealDrawCount;
7193 }
7194 }
7195
7196 return true;
7197 }
7198
7199 void Cmd_RenderText_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderText *pCommand)
7200 {
7201 size_t BufferContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7202 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7203 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7204
7205 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7206 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7207
7208 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_TextTextureIndex].m_VKTextDescrSet;
7209
7210 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7211
7212 ExecBuffer.m_EstimatedRenderCallCount = 1;
7213
7214 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
7215 }
7216
7217 [[nodiscard]] bool Cmd_RenderText(const CCommandBuffer::SCommand_RenderText *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7218 {
7219 std::array<float, (size_t)4 * 2> m;
7220 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7221
7222 bool IsTextured;
7223 size_t BlendModeIndex;
7224 size_t DynamicIndex;
7225 size_t AddressModeIndex;
7226 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7227 IsTextured = true; // text is always textured
7228 auto &PipeLayout = GetPipeLayout(Container&: m_TextPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7229 auto &PipeLine = GetPipeline(Container&: m_TextPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7230
7231 VkCommandBuffer *pCommandBuffer;
7232 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7233 return false;
7234 auto &CommandBuffer = *pCommandBuffer;
7235
7236 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7237
7238 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7239 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7240 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7241
7242 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
7243
7244 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7245
7246 SUniformGTextPos PosTexSizeConstant;
7247 mem_copy(dest: PosTexSizeConstant.m_aPos, source: m.data(), size: m.size() * sizeof(float));
7248 PosTexSizeConstant.m_TextureSize = pCommand->m_TextureSize;
7249
7250 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGTextPos), pValues: &PosTexSizeConstant);
7251
7252 SUniformTextFragment FragmentConstants;
7253
7254 FragmentConstants.m_Constants.m_TextColor = pCommand->m_TextColor;
7255 FragmentConstants.m_Constants.m_TextOutlineColor = pCommand->m_TextOutlineColor;
7256 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformGTextPos) + sizeof(SUniformTextGFragmentOffset), size: sizeof(SUniformTextFragment), pValues: &FragmentConstants);
7257
7258 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7259
7260 return true;
7261 }
7262
7263 void BufferContainer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, size_t BufferContainerIndex, size_t DrawCalls)
7264 {
7265 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7266 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7267
7268 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7269 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7270
7271 bool IsTextured = GetIsTextured(State);
7272 if(IsTextured)
7273 {
7274 size_t AddressModeIndex = GetAddressModeIndex(State);
7275 ExecBuffer.m_aDescriptors[0] = m_vTextures[State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
7276 }
7277
7278 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7279
7280 ExecBuffer.m_EstimatedRenderCallCount = DrawCalls;
7281
7282 ExecBufferFillDynamicStates(State, ExecBuffer);
7283 }
7284
7285 void Cmd_RenderQuadContainer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainer *pCommand)
7286 {
7287 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: 1);
7288 }
7289
7290 [[nodiscard]] bool Cmd_RenderQuadContainer(const CCommandBuffer::SCommand_RenderQuadContainer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7291 {
7292 std::array<float, (size_t)4 * 2> m;
7293 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7294
7295 bool IsTextured;
7296 size_t BlendModeIndex;
7297 size_t DynamicIndex;
7298 size_t AddressModeIndex;
7299 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7300 auto &PipeLayout = GetStandardPipeLayout(IsLineGeometry: false, IsTextured, BlendModeIndex, DynamicIndex);
7301 auto &PipeLine = GetStandardPipe(IsLineGeometry: false, IsTextured, BlendModeIndex, DynamicIndex);
7302
7303 VkCommandBuffer *pCommandBuffer;
7304 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7305 return false;
7306 auto &CommandBuffer = *pCommandBuffer;
7307
7308 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7309
7310 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7311 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7312 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7313
7314 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7315
7316 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7317
7318 if(IsTextured)
7319 {
7320 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7321 }
7322
7323 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGPos), pValues: m.data());
7324
7325 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7326
7327 return true;
7328 }
7329
7330 void Cmd_RenderQuadContainerEx_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainerEx *pCommand)
7331 {
7332 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: 1);
7333 }
7334
7335 [[nodiscard]] bool Cmd_RenderQuadContainerEx(const CCommandBuffer::SCommand_RenderQuadContainerEx *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7336 {
7337 std::array<float, (size_t)4 * 2> m;
7338 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7339
7340 bool IsRotationless = !(pCommand->m_Rotation != 0);
7341 bool IsTextured;
7342 size_t BlendModeIndex;
7343 size_t DynamicIndex;
7344 size_t AddressModeIndex;
7345 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7346 auto &PipeLayout = GetPipeLayout(Container&: IsRotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7347 auto &PipeLine = GetPipeline(Container&: IsRotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7348
7349 VkCommandBuffer *pCommandBuffer;
7350 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7351 return false;
7352 auto &CommandBuffer = *pCommandBuffer;
7353
7354 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7355
7356 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7357 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7358 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7359
7360 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7361
7362 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7363
7364 if(IsTextured)
7365 {
7366 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7367 }
7368
7369 SUniformPrimExGVertColor PushConstantColor;
7370 SUniformPrimExGPos PushConstantVertex;
7371 size_t VertexPushConstantSize = sizeof(PushConstantVertex);
7372
7373 PushConstantColor = pCommand->m_VertexColor;
7374 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7375
7376 if(!IsRotationless)
7377 {
7378 PushConstantVertex.m_Rotation = pCommand->m_Rotation;
7379 PushConstantVertex.m_Center = {pCommand->m_Center.x, pCommand->m_Center.y};
7380 }
7381 else
7382 {
7383 VertexPushConstantSize = sizeof(SUniformPrimExGPosRotationless);
7384 }
7385
7386 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: VertexPushConstantSize, pValues: &PushConstantVertex);
7387 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformPrimExGPos) + sizeof(SUniformPrimExGVertColorAlign), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7388
7389 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7390
7391 return true;
7392 }
7393
7394 void Cmd_RenderQuadContainerAsSpriteMultiple_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *pCommand)
7395 {
7396 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: ((pCommand->m_DrawCount - 1) / gs_GraphicsMaxParticlesRenderCount) + 1);
7397 }
7398
7399 [[nodiscard]] bool Cmd_RenderQuadContainerAsSpriteMultiple(const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7400 {
7401 std::array<float, (size_t)4 * 2> m;
7402 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7403
7404 bool CanBePushed = pCommand->m_DrawCount <= 1;
7405
7406 bool IsTextured;
7407 size_t BlendModeIndex;
7408 size_t DynamicIndex;
7409 size_t AddressModeIndex;
7410 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7411 auto &PipeLayout = GetPipeLayout(Container&: CanBePushed ? m_SpriteMultiPushPipeline : m_SpriteMultiPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7412 auto &PipeLine = GetPipeline(Container&: CanBePushed ? m_SpriteMultiPushPipeline : m_SpriteMultiPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7413
7414 VkCommandBuffer *pCommandBuffer;
7415 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7416 return false;
7417 auto &CommandBuffer = *pCommandBuffer;
7418
7419 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7420
7421 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7422 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7423 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7424
7425 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7426 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7427
7428 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7429
7430 if(CanBePushed)
7431 {
7432 SUniformSpriteMultiPushGVertColor PushConstantColor;
7433 SUniformSpriteMultiPushGPos PushConstantVertex;
7434
7435 PushConstantColor = pCommand->m_VertexColor;
7436
7437 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7438 PushConstantVertex.m_Center = pCommand->m_Center;
7439
7440 for(size_t i = 0; i < pCommand->m_DrawCount; ++i)
7441 PushConstantVertex.m_aPSR[i] = vec4(pCommand->m_pRenderInfo[i].m_Pos.x, pCommand->m_pRenderInfo[i].m_Pos.y, pCommand->m_pRenderInfo[i].m_Scale, pCommand->m_pRenderInfo[i].m_Rotation);
7442
7443 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformSpriteMultiPushGPosBase) + sizeof(vec4) * pCommand->m_DrawCount, pValues: &PushConstantVertex);
7444 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformSpriteMultiPushGPos), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7445 }
7446 else
7447 {
7448 SUniformSpriteMultiGVertColor PushConstantColor;
7449 SUniformSpriteMultiGPos PushConstantVertex;
7450
7451 PushConstantColor = pCommand->m_VertexColor;
7452
7453 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7454 PushConstantVertex.m_Center = pCommand->m_Center;
7455
7456 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(PushConstantVertex), pValues: &PushConstantVertex);
7457 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformSpriteMultiGPos) + sizeof(SUniformSpriteMultiGVertColorAlign), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7458 }
7459
7460 const int RSPCount = 512;
7461 int DrawCount = pCommand->m_DrawCount;
7462 size_t RenderOffset = 0;
7463
7464 while(DrawCount > 0)
7465 {
7466 int UniformCount = (DrawCount > RSPCount ? RSPCount : DrawCount);
7467
7468 if(!CanBePushed)
7469 {
7470 // create uniform buffer
7471 SDeviceDescriptorSet UniDescrSet;
7472 if(!GetUniformBufferObject(RenderThreadIndex: ExecBuffer.m_ThreadIndex, RequiresSharedStagesDescriptor: false, DescrSet&: UniDescrSet, ParticleCount: UniformCount, pData: (const float *)(pCommand->m_pRenderInfo + RenderOffset), DataSize: UniformCount * sizeof(IGraphics::SRenderSpriteInfo)))
7473 return false;
7474
7475 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 1, descriptorSetCount: 1, pDescriptorSets: &UniDescrSet.m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7476 }
7477
7478 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: UniformCount, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7479
7480 RenderOffset += RSPCount;
7481 DrawCount -= RSPCount;
7482 }
7483
7484 return true;
7485 }
7486
7487 [[nodiscard]] bool Cmd_WindowCreateNtf(const CCommandBuffer::SCommand_WindowCreateNtf *pCommand)
7488 {
7489 log_debug("vulkan", "creating new surface.");
7490 m_pWindow = SDL_GetWindowFromID(id: pCommand->m_WindowId);
7491 if(m_RenderingPaused)
7492 {
7493#ifdef CONF_PLATFORM_ANDROID
7494 if(!CreateSurface(m_pWindow))
7495 return false;
7496 m_RecreateSwapChain = true;
7497#endif
7498 m_RenderingPaused = false;
7499 if(!PureMemoryFrame())
7500 return false;
7501 if(!PrepareFrame())
7502 return false;
7503 }
7504
7505 return true;
7506 }
7507
7508 [[nodiscard]] bool Cmd_WindowDestroyNtf(const CCommandBuffer::SCommand_WindowDestroyNtf *pCommand)
7509 {
7510 log_debug("vulkan", "surface got destroyed.");
7511 if(!m_RenderingPaused)
7512 {
7513 if(!WaitFrame())
7514 return false;
7515 m_RenderingPaused = true;
7516 vkDeviceWaitIdle(device: m_VKDevice);
7517#ifdef CONF_PLATFORM_ANDROID
7518 CleanupVulkanSwapChain(true);
7519#endif
7520 }
7521
7522 return true;
7523 }
7524
7525 [[nodiscard]] bool Cmd_PreInit(const CCommandProcessorFragment_GLBase::SCommand_PreInit *pCommand)
7526 {
7527 m_pGpuList = pCommand->m_pGpuList;
7528 if(InitVulkanSDL(pWindow: pCommand->m_pWindow, CanvasWidth: pCommand->m_Width, CanvasHeight: pCommand->m_Height, pRendererString: pCommand->m_pRendererString, pVendorString: pCommand->m_pVendorString, pVersionString: pCommand->m_pVersionString) != 0)
7529 {
7530 m_VKInstance = VK_NULL_HANDLE;
7531 }
7532
7533 RegisterCommands();
7534
7535 m_ThreadCount = g_Config.m_GfxRenderThreadCount;
7536 if(m_ThreadCount <= 1)
7537 m_ThreadCount = 1;
7538 else
7539 {
7540 m_ThreadCount = std::clamp<decltype(m_ThreadCount)>(val: m_ThreadCount, lo: 3, hi: std::max<decltype(m_ThreadCount)>(a: 3, b: std::thread::hardware_concurrency()));
7541 }
7542
7543 // start threads
7544 dbg_assert(m_ThreadCount != 2, "Either use 1 main thread or at least 2 extra rendering threads.");
7545 if(m_ThreadCount > 1)
7546 {
7547 m_vvThreadCommandLists.resize(new_size: m_ThreadCount - 1);
7548 m_vThreadHelperHadCommands.resize(new_size: m_ThreadCount - 1, x: false);
7549 for(auto &ThreadCommandList : m_vvThreadCommandLists)
7550 {
7551 ThreadCommandList.reserve(n: 256);
7552 }
7553
7554 m_vpRenderThreads.reserve(n: m_ThreadCount - 1);
7555 for(size_t i = 0; i < m_ThreadCount - 1; ++i)
7556 {
7557 auto *pRenderThread = new SRenderThread();
7558 std::unique_lock<std::mutex> Lock(pRenderThread->m_Mutex);
7559 m_vpRenderThreads.emplace_back(args&: pRenderThread);
7560 pRenderThread->m_Thread = std::thread([this, i]() { RunThread(ThreadIndex: i); });
7561 // wait until thread started
7562 pRenderThread->m_Cond.wait(lock&: Lock, p: [pRenderThread]() -> bool { return pRenderThread->m_Started; });
7563 }
7564 }
7565
7566 return true;
7567 }
7568
7569 [[nodiscard]] bool Cmd_PostShutdown(const CCommandProcessorFragment_GLBase::SCommand_PostShutdown *pCommand)
7570 {
7571 for(size_t i = 0; i < m_ThreadCount - 1; ++i)
7572 {
7573 auto *pThread = m_vpRenderThreads[i].get();
7574 {
7575 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
7576 pThread->m_Finished = true;
7577 pThread->m_Cond.notify_one();
7578 }
7579 pThread->m_Thread.join();
7580 }
7581 m_vpRenderThreads.clear();
7582 m_vvThreadCommandLists.clear();
7583 m_vThreadHelperHadCommands.clear();
7584
7585 m_ThreadCount = 1;
7586
7587 CleanupVulkanSDL();
7588
7589 return true;
7590 }
7591
7592 void StartCommands(size_t CommandCount, size_t EstimatedRenderCallCount) override
7593 {
7594 m_CommandsInPipe = CommandCount;
7595 m_RenderCallsInPipe = EstimatedRenderCallCount;
7596 m_CurCommandInPipe = 0;
7597 m_CurRenderCallCountInPipe = 0;
7598 }
7599
7600 void EndCommands() override
7601 {
7602 FinishRenderThreads();
7603 m_CommandsInPipe = 0;
7604 m_RenderCallsInPipe = 0;
7605 }
7606
7607 /****************
7608 * RENDER THREADS
7609 *****************/
7610
7611 void RunThread(size_t ThreadIndex)
7612 {
7613 auto *pThread = m_vpRenderThreads[ThreadIndex].get();
7614 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
7615 pThread->m_Started = true;
7616 pThread->m_Cond.notify_one();
7617
7618 while(!pThread->m_Finished)
7619 {
7620 pThread->m_Cond.wait(lock&: Lock, p: [pThread]() -> bool { return pThread->m_IsRendering || pThread->m_Finished; });
7621 pThread->m_Cond.notify_one();
7622
7623 // set this to true, if you want to benchmark the render thread times
7624 static constexpr bool s_BenchmarkRenderThreads = false;
7625 std::chrono::nanoseconds ThreadRenderTime = 0ns;
7626 if(IsVerbose() && s_BenchmarkRenderThreads)
7627 {
7628 ThreadRenderTime = time_get_nanoseconds();
7629 }
7630
7631 if(!pThread->m_Finished)
7632 {
7633 bool HasErrorFromCmd = false;
7634 for(auto &NextCmd : m_vvThreadCommandLists[ThreadIndex])
7635 {
7636 if(!m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: NextCmd.m_Command)].m_CommandCB(NextCmd.m_pRawCommand, NextCmd))
7637 {
7638 // an error occurred, the thread will not continue execution
7639 HasErrorFromCmd = true;
7640 break;
7641 }
7642 }
7643 m_vvThreadCommandLists[ThreadIndex].clear();
7644
7645 if(!HasErrorFromCmd && m_vvUsedThreadDrawCommandBuffer[ThreadIndex + 1][m_CurImageIndex])
7646 {
7647 auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[ThreadIndex + 1][m_CurImageIndex];
7648 vkEndCommandBuffer(commandBuffer: GraphicThreadCommandBuffer);
7649 }
7650 }
7651
7652 if(IsVerbose() && s_BenchmarkRenderThreads)
7653 {
7654 dbg_msg(sys: "vulkan", fmt: "render thread %" PRIzu " took %d ns to finish", ThreadIndex, (int)(time_get_nanoseconds() - ThreadRenderTime).count());
7655 }
7656
7657 pThread->m_IsRendering = false;
7658 }
7659 }
7660};
7661
7662CCommandProcessorFragment_GLBase *CreateVulkanCommandProcessorFragment()
7663{
7664 return new CCommandProcessorFragment_Vulkan();
7665}
7666
7667#endif
7668