1#if defined(CONF_BACKEND_VULKAN)
2
3#include <base/log.h>
4#include <base/math.h>
5#include <base/system.h>
6
7#include <engine/client/backend/backend_base.h>
8#include <engine/client/backend/vulkan/backend_vulkan.h>
9#include <engine/client/backend_sdl.h>
10#include <engine/client/graphics_threaded.h>
11#include <engine/gfx/image_manipulation.h>
12#include <engine/graphics.h>
13#include <engine/shared/config.h>
14#include <engine/shared/localization.h>
15#include <engine/storage.h>
16
17#include <SDL_video.h>
18#include <SDL_vulkan.h>
19#include <vulkan/vk_platform.h>
20#include <vulkan/vulkan_core.h>
21
22#include <algorithm>
23#include <array>
24#include <condition_variable>
25#include <cstddef>
26#include <cstdlib>
27#include <functional>
28#include <limits>
29#include <map>
30#include <memory>
31#include <mutex>
32#include <optional>
33#include <set>
34#include <string>
35#include <thread>
36#include <unordered_map>
37#include <utility>
38#include <vector>
39
40#ifndef VK_API_VERSION_MAJOR
41#define VK_API_VERSION_MAJOR VK_VERSION_MAJOR
42#define VK_API_VERSION_MINOR VK_VERSION_MINOR
43#define VK_API_VERSION_PATCH VK_VERSION_PATCH
44#endif
45
46using namespace std::chrono_literals;
47
48class CCommandProcessorFragment_Vulkan : public CCommandProcessorFragment_GLBase
49{
50 enum EMemoryBlockUsage
51 {
52 MEMORY_BLOCK_USAGE_TEXTURE = 0,
53 MEMORY_BLOCK_USAGE_BUFFER,
54 MEMORY_BLOCK_USAGE_STREAM,
55 MEMORY_BLOCK_USAGE_STAGING,
56
57 // whenever dummy is used, make sure to deallocate all memory
58 MEMORY_BLOCK_USAGE_DUMMY,
59 };
60
61 [[nodiscard]] bool IsVerbose()
62 {
63 return g_Config.m_DbgGfx == DEBUG_GFX_MODE_VERBOSE || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL;
64 }
65
66 static const char *MemoryUsageName(EMemoryBlockUsage MemUsage)
67 {
68 switch(MemUsage)
69 {
70 case MEMORY_BLOCK_USAGE_TEXTURE:
71 return "texture";
72 case MEMORY_BLOCK_USAGE_BUFFER:
73 return "buffer";
74 case MEMORY_BLOCK_USAGE_STREAM:
75 return "stream";
76 case MEMORY_BLOCK_USAGE_STAGING:
77 return "staging buffer";
78 default:
79 dbg_assert_failed("Invalid MemUsage: %d", (int)MemUsage);
80 }
81 }
82
83 void VerboseAllocatedMemory(VkDeviceSize Size, size_t FrameImageIndex, EMemoryBlockUsage MemUsage) const
84 {
85 log_debug("gfx/vulkan", "Allocated chunk of memory with size %" PRIzu " for frame %" PRIzu " (%s).",
86 (size_t)Size, (size_t)m_CurImageIndex, MemoryUsageName(MemUsage));
87 }
88
89 void VerboseDeallocatedMemory(VkDeviceSize Size, size_t FrameImageIndex, EMemoryBlockUsage MemUsage) const
90 {
91 log_debug("gfx/vulkan", "Deallocated chunk of memory with size %" PRIzu " for frame %" PRIzu " (%s).",
92 (size_t)Size, (size_t)m_CurImageIndex, MemoryUsageName(MemUsage));
93 }
94
95 /************************
96 * STRUCT DEFINITIONS
97 ************************/
98
99 static constexpr size_t STAGING_BUFFER_CACHE_ID = 0;
100 static constexpr size_t STAGING_BUFFER_IMAGE_CACHE_ID = 1;
101 static constexpr size_t VERTEX_BUFFER_CACHE_ID = 2;
102 static constexpr size_t IMAGE_BUFFER_CACHE_ID = 3;
103
104 struct SDeviceMemoryBlock
105 {
106 VkDeviceMemory m_Mem = VK_NULL_HANDLE;
107 VkDeviceSize m_Size = 0;
108 EMemoryBlockUsage m_UsageType;
109 };
110
111 struct SDeviceDescriptorPools;
112
113 struct SDeviceDescriptorSet
114 {
115 VkDescriptorSet m_Descriptor = VK_NULL_HANDLE;
116 SDeviceDescriptorPools *m_pPools = nullptr;
117 size_t m_PoolIndex = std::numeric_limits<size_t>::max();
118 };
119
120 struct SDeviceDescriptorPool
121 {
122 VkDescriptorPool m_Pool;
123 VkDeviceSize m_Size = 0;
124 VkDeviceSize m_CurSize = 0;
125 };
126
127 struct SDeviceDescriptorPools
128 {
129 std::vector<SDeviceDescriptorPool> m_vPools;
130 VkDeviceSize m_DefaultAllocSize = 0;
131 bool m_IsUniformPool = false;
132 };
133
134 // some mix of queue and binary tree
135 struct SMemoryHeap
136 {
137 struct SMemoryHeapElement;
138 struct SMemoryHeapQueueElement
139 {
140 size_t m_AllocationSize;
141 // only useful information for the heap
142 size_t m_OffsetInHeap;
143 // useful for the user of this element
144 size_t m_OffsetToAlign;
145 SMemoryHeapElement *m_pElementInHeap;
146 [[nodiscard]] bool operator>(const SMemoryHeapQueueElement &Other) const { return m_AllocationSize > Other.m_AllocationSize; }
147 // respects alignment requirements
148 constexpr bool CanFitAllocation(size_t AllocSize, size_t AllocAlignment) const
149 {
150 size_t ExtraSizeAlign = m_OffsetInHeap % AllocAlignment;
151 if(ExtraSizeAlign != 0)
152 ExtraSizeAlign = AllocAlignment - ExtraSizeAlign;
153 size_t RealAllocSize = AllocSize + ExtraSizeAlign;
154 return m_AllocationSize >= RealAllocSize;
155 }
156 };
157
158 typedef std::multiset<SMemoryHeapQueueElement, std::greater<>> TMemoryHeapQueue;
159
160 struct SMemoryHeapElement
161 {
162 size_t m_AllocationSize;
163 size_t m_Offset;
164 SMemoryHeapElement *m_pParent;
165 std::unique_ptr<SMemoryHeapElement> m_pLeft;
166 std::unique_ptr<SMemoryHeapElement> m_pRight;
167
168 bool m_InUse;
169 TMemoryHeapQueue::iterator m_InQueue;
170 };
171
172 SMemoryHeapElement m_Root;
173 TMemoryHeapQueue m_Elements;
174
175 void Init(size_t Size, size_t Offset)
176 {
177 m_Root.m_AllocationSize = Size;
178 m_Root.m_Offset = Offset;
179 m_Root.m_pParent = nullptr;
180 m_Root.m_InUse = false;
181
182 SMemoryHeapQueueElement QueueEl;
183 QueueEl.m_AllocationSize = Size;
184 QueueEl.m_OffsetInHeap = Offset;
185 QueueEl.m_OffsetToAlign = Offset;
186 QueueEl.m_pElementInHeap = &m_Root;
187 m_Root.m_InQueue = m_Elements.insert(x: QueueEl);
188 }
189
190 [[nodiscard]] bool Allocate(size_t AllocSize, size_t AllocAlignment, SMemoryHeapQueueElement &AllocatedMemory)
191 {
192 if(m_Elements.empty())
193 {
194 return false;
195 }
196 else
197 {
198 // check if there is enough space in this instance
199 if(!m_Elements.begin()->CanFitAllocation(AllocSize, AllocAlignment))
200 {
201 return false;
202 }
203 else
204 {
205 // see SMemoryHeapQueueElement::operator>
206 SMemoryHeapQueueElement FindAllocSize;
207 FindAllocSize.m_AllocationSize = AllocSize;
208 // find upper bound for a allocation size
209 auto Upper = m_Elements.upper_bound(x: FindAllocSize);
210 // then find the first entry that respects alignment, this is a linear search!
211 auto FoundEl = m_Elements.rend();
212 for(auto AllocIterator = std::make_reverse_iterator(i: Upper); AllocIterator != m_Elements.rend(); ++AllocIterator)
213 {
214 if(AllocIterator->CanFitAllocation(AllocSize, AllocAlignment))
215 {
216 FoundEl = AllocIterator;
217 break;
218 }
219 }
220
221 auto TopEl = *FoundEl;
222 m_Elements.erase(position: TopEl.m_pElementInHeap->m_InQueue);
223
224 TopEl.m_pElementInHeap->m_InUse = true;
225
226 // calculate the real alloc size + alignment offset
227 size_t ExtraSizeAlign = TopEl.m_OffsetInHeap % AllocAlignment;
228 if(ExtraSizeAlign != 0)
229 ExtraSizeAlign = AllocAlignment - ExtraSizeAlign;
230 size_t RealAllocSize = AllocSize + ExtraSizeAlign;
231
232 // the heap element gets children
233 TopEl.m_pElementInHeap->m_pLeft = std::make_unique<SMemoryHeapElement>();
234 TopEl.m_pElementInHeap->m_pLeft->m_AllocationSize = RealAllocSize;
235 TopEl.m_pElementInHeap->m_pLeft->m_Offset = TopEl.m_OffsetInHeap;
236 TopEl.m_pElementInHeap->m_pLeft->m_pParent = TopEl.m_pElementInHeap;
237 TopEl.m_pElementInHeap->m_pLeft->m_InUse = true;
238
239 if(RealAllocSize < TopEl.m_AllocationSize)
240 {
241 SMemoryHeapQueueElement RemainingEl;
242 RemainingEl.m_OffsetInHeap = TopEl.m_OffsetInHeap + RealAllocSize;
243 RemainingEl.m_AllocationSize = TopEl.m_AllocationSize - RealAllocSize;
244
245 TopEl.m_pElementInHeap->m_pRight = std::make_unique<SMemoryHeapElement>();
246 TopEl.m_pElementInHeap->m_pRight->m_AllocationSize = RemainingEl.m_AllocationSize;
247 TopEl.m_pElementInHeap->m_pRight->m_Offset = RemainingEl.m_OffsetInHeap;
248 TopEl.m_pElementInHeap->m_pRight->m_pParent = TopEl.m_pElementInHeap;
249 TopEl.m_pElementInHeap->m_pRight->m_InUse = false;
250
251 RemainingEl.m_pElementInHeap = TopEl.m_pElementInHeap->m_pRight.get();
252 RemainingEl.m_pElementInHeap->m_InQueue = m_Elements.insert(x: RemainingEl);
253 }
254
255 AllocatedMemory.m_pElementInHeap = TopEl.m_pElementInHeap->m_pLeft.get();
256 AllocatedMemory.m_AllocationSize = RealAllocSize;
257 AllocatedMemory.m_OffsetInHeap = TopEl.m_OffsetInHeap;
258 AllocatedMemory.m_OffsetToAlign = TopEl.m_OffsetInHeap + ExtraSizeAlign;
259 return true;
260 }
261 }
262 }
263
264 void Free(const SMemoryHeapQueueElement &AllocatedMemory)
265 {
266 bool ContinueFree = true;
267 SMemoryHeapQueueElement ThisEl = AllocatedMemory;
268 while(ContinueFree)
269 {
270 // first check if the other block is in use, if not merge them again
271 SMemoryHeapElement *pThisHeapObj = ThisEl.m_pElementInHeap;
272 SMemoryHeapElement *pThisParent = pThisHeapObj->m_pParent;
273 pThisHeapObj->m_InUse = false;
274 SMemoryHeapElement *pOtherHeapObj = nullptr;
275 if(pThisParent != nullptr && pThisHeapObj == pThisParent->m_pLeft.get())
276 pOtherHeapObj = pThisHeapObj->m_pParent->m_pRight.get();
277 else if(pThisParent != nullptr)
278 pOtherHeapObj = pThisHeapObj->m_pParent->m_pLeft.get();
279
280 if((pThisParent != nullptr && pOtherHeapObj == nullptr) || (pOtherHeapObj != nullptr && !pOtherHeapObj->m_InUse))
281 {
282 // merge them
283 if(pOtherHeapObj != nullptr)
284 {
285 m_Elements.erase(position: pOtherHeapObj->m_InQueue);
286 pOtherHeapObj->m_InUse = false;
287 }
288
289 SMemoryHeapQueueElement ParentEl;
290 ParentEl.m_OffsetInHeap = pThisParent->m_Offset;
291 ParentEl.m_AllocationSize = pThisParent->m_AllocationSize;
292 ParentEl.m_pElementInHeap = pThisParent;
293
294 pThisParent->m_pLeft = nullptr;
295 pThisParent->m_pRight = nullptr;
296
297 ThisEl = ParentEl;
298 }
299 else
300 {
301 // else just put this back into queue
302 ThisEl.m_pElementInHeap->m_InQueue = m_Elements.insert(x: ThisEl);
303 ContinueFree = false;
304 }
305 }
306 }
307
308 [[nodiscard]] bool IsUnused() const
309 {
310 return !m_Root.m_InUse;
311 }
312 };
313
314 template<size_t Id>
315 struct SMemoryBlock
316 {
317 SMemoryHeap::SMemoryHeapQueueElement m_HeapData;
318
319 VkDeviceSize m_UsedSize;
320
321 // optional
322 VkBuffer m_Buffer;
323
324 SDeviceMemoryBlock m_BufferMem;
325 void *m_pMappedBuffer;
326
327 bool m_IsCached;
328 SMemoryHeap *m_pHeap;
329 };
330
331 template<size_t Id>
332 struct SMemoryImageBlock : public SMemoryBlock<Id>
333 {
334 uint32_t m_ImageMemoryBits;
335 };
336
337 template<size_t Id>
338 struct SMemoryBlockCache
339 {
340 struct SMemoryCacheType
341 {
342 struct SMemoryCacheHeap
343 {
344 SMemoryHeap m_Heap;
345 VkBuffer m_Buffer;
346
347 SDeviceMemoryBlock m_BufferMem;
348 void *m_pMappedBuffer;
349 };
350 std::vector<SMemoryCacheHeap *> m_vpMemoryHeaps;
351 };
352 SMemoryCacheType m_MemoryCaches;
353 std::vector<std::vector<SMemoryBlock<Id>>> m_vvFrameDelayedCachedBufferCleanup;
354
355 bool m_CanShrink = false;
356
357 void Init(size_t SwapChainImageCount)
358 {
359 m_vvFrameDelayedCachedBufferCleanup.resize(SwapChainImageCount);
360 }
361
362 void DestroyFrameData(size_t ImageCount)
363 {
364 for(size_t i = 0; i < ImageCount; ++i)
365 Cleanup(ImgIndex: i);
366 m_vvFrameDelayedCachedBufferCleanup.clear();
367 }
368
369 void Destroy(VkDevice &Device)
370 {
371 for(auto HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.begin(); HeapIterator != m_MemoryCaches.m_vpMemoryHeaps.end();)
372 {
373 auto *pHeap = *HeapIterator;
374 if(pHeap->m_pMappedBuffer != nullptr)
375 vkUnmapMemory(Device, pHeap->m_BufferMem.m_Mem);
376 if(pHeap->m_Buffer != VK_NULL_HANDLE)
377 vkDestroyBuffer(Device, pHeap->m_Buffer, nullptr);
378 vkFreeMemory(Device, pHeap->m_BufferMem.m_Mem, nullptr);
379
380 delete pHeap;
381 HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.erase(HeapIterator);
382 }
383
384 m_MemoryCaches.m_vpMemoryHeaps.clear();
385 m_vvFrameDelayedCachedBufferCleanup.clear();
386 }
387
388 void Cleanup(size_t ImgIndex)
389 {
390 for(auto &MemBlock : m_vvFrameDelayedCachedBufferCleanup[ImgIndex])
391 {
392 MemBlock.m_UsedSize = 0;
393 MemBlock.m_pHeap->Free(MemBlock.m_HeapData);
394
395 m_CanShrink = true;
396 }
397 m_vvFrameDelayedCachedBufferCleanup[ImgIndex].clear();
398 }
399
400 void FreeMemBlock(SMemoryBlock<Id> &Block, size_t ImgIndex)
401 {
402 m_vvFrameDelayedCachedBufferCleanup[ImgIndex].push_back(Block);
403 }
404
405 // returns the total free'd memory
406 size_t Shrink(VkDevice &Device)
407 {
408 size_t FreedMemory = 0;
409 if(m_CanShrink)
410 {
411 m_CanShrink = false;
412 if(m_MemoryCaches.m_vpMemoryHeaps.size() > 1)
413 {
414 for(auto HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.begin(); HeapIterator != m_MemoryCaches.m_vpMemoryHeaps.end();)
415 {
416 auto *pHeap = *HeapIterator;
417 if(pHeap->m_Heap.IsUnused())
418 {
419 if(pHeap->m_pMappedBuffer != nullptr)
420 vkUnmapMemory(Device, pHeap->m_BufferMem.m_Mem);
421 if(pHeap->m_Buffer != VK_NULL_HANDLE)
422 vkDestroyBuffer(Device, pHeap->m_Buffer, nullptr);
423 vkFreeMemory(Device, pHeap->m_BufferMem.m_Mem, nullptr);
424 FreedMemory += pHeap->m_BufferMem.m_Size;
425
426 delete pHeap;
427 HeapIterator = m_MemoryCaches.m_vpMemoryHeaps.erase(HeapIterator);
428 if(m_MemoryCaches.m_vpMemoryHeaps.size() == 1)
429 break;
430 }
431 else
432 ++HeapIterator;
433 }
434 }
435 }
436
437 return FreedMemory;
438 }
439 };
440
441 struct CTexture
442 {
443 VkImage m_Img = VK_NULL_HANDLE;
444 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_ImgMem;
445 VkImageView m_ImgView = VK_NULL_HANDLE;
446 VkSampler m_aSamplers[2] = {VK_NULL_HANDLE, VK_NULL_HANDLE};
447
448 VkImage m_Img3D = VK_NULL_HANDLE;
449 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_Img3DMem;
450 VkImageView m_Img3DView = VK_NULL_HANDLE;
451 VkSampler m_Sampler3D = VK_NULL_HANDLE;
452
453 uint32_t m_Width = 0;
454 uint32_t m_Height = 0;
455 uint32_t m_RescaleCount = 0;
456
457 uint32_t m_MipMapCount = 1;
458
459 std::array<SDeviceDescriptorSet, 2> m_aVKStandardTexturedDescrSets;
460 SDeviceDescriptorSet m_VKStandard3DTexturedDescrSet;
461 SDeviceDescriptorSet m_VKTextDescrSet;
462 };
463
464 struct SBufferObject
465 {
466 SMemoryBlock<VERTEX_BUFFER_CACHE_ID> m_Mem;
467 };
468
469 struct SBufferObjectFrame
470 {
471 SBufferObject m_BufferObject;
472
473 // since stream buffers can be used the cur buffer should always be used for rendering
474 bool m_IsStreamedBuffer = false;
475 VkBuffer m_CurBuffer = VK_NULL_HANDLE;
476 size_t m_CurBufferOffset = 0;
477 };
478
479 struct SBufferContainer
480 {
481 int m_BufferObjectIndex;
482 };
483
484 struct SFrameBuffers
485 {
486 VkBuffer m_Buffer;
487 SDeviceMemoryBlock m_BufferMem;
488 size_t m_OffsetInBuffer = 0;
489 size_t m_Size;
490 size_t m_UsedSize;
491 uint8_t *m_pMappedBufferData;
492
493 SFrameBuffers(VkBuffer Buffer, SDeviceMemoryBlock BufferMem, size_t OffsetInBuffer, size_t Size, size_t UsedSize, uint8_t *pMappedBufferData) :
494 m_Buffer(Buffer), m_BufferMem(BufferMem), m_OffsetInBuffer(OffsetInBuffer), m_Size(Size), m_UsedSize(UsedSize), m_pMappedBufferData(pMappedBufferData)
495 {
496 }
497 };
498
499 struct SFrameUniformBuffers : public SFrameBuffers
500 {
501 std::array<SDeviceDescriptorSet, 2> m_aUniformSets;
502
503 SFrameUniformBuffers(VkBuffer Buffer, SDeviceMemoryBlock BufferMem, size_t OffsetInBuffer, size_t Size, size_t UsedSize, uint8_t *pMappedBufferData) :
504 SFrameBuffers(Buffer, BufferMem, OffsetInBuffer, Size, UsedSize, pMappedBufferData) {}
505 };
506
507 template<typename TName>
508 struct SStreamMemory
509 {
510 typedef std::vector<std::vector<TName>> TBufferObjectsOfFrame;
511 typedef std::vector<std::vector<VkMappedMemoryRange>> TMemoryMapRangesOfFrame;
512 typedef std::vector<size_t> TStreamUseCount;
513 TBufferObjectsOfFrame m_vvBufferObjectsOfFrame;
514 TMemoryMapRangesOfFrame m_vvBufferObjectsOfFrameRangeData;
515 TStreamUseCount m_vCurrentUsedCount;
516
517 std::vector<TName> &GetBuffers(size_t FrameImageIndex)
518 {
519 return m_vvBufferObjectsOfFrame[FrameImageIndex];
520 }
521
522 std::vector<VkMappedMemoryRange> &GetRanges(size_t FrameImageIndex)
523 {
524 return m_vvBufferObjectsOfFrameRangeData[FrameImageIndex];
525 }
526
527 size_t GetUsedCount(size_t FrameImageIndex)
528 {
529 return m_vCurrentUsedCount[FrameImageIndex];
530 }
531
532 void IncreaseUsedCount(size_t FrameImageIndex)
533 {
534 ++m_vCurrentUsedCount[FrameImageIndex];
535 }
536
537 [[nodiscard]] bool IsUsed(size_t FrameImageIndex)
538 {
539 return GetUsedCount(FrameImageIndex) > 0;
540 }
541
542 void ResetFrame(size_t FrameImageIndex)
543 {
544 m_vCurrentUsedCount[FrameImageIndex] = 0;
545 }
546
547 void Init(size_t FrameImageCount)
548 {
549 m_vvBufferObjectsOfFrame.resize(FrameImageCount);
550 m_vvBufferObjectsOfFrameRangeData.resize(new_size: FrameImageCount);
551 m_vCurrentUsedCount.resize(new_size: FrameImageCount);
552 }
553
554 typedef std::function<void(size_t, TName &)> TDestroyBufferFunc;
555
556 void Destroy(TDestroyBufferFunc &&DestroyBuffer)
557 {
558 size_t ImageIndex = 0;
559 for(auto &vBuffersOfFrame : m_vvBufferObjectsOfFrame)
560 {
561 for(auto &BufferOfFrame : vBuffersOfFrame)
562 {
563 VkDeviceMemory BufferMem = BufferOfFrame.m_BufferMem.m_Mem;
564 DestroyBuffer(ImageIndex, BufferOfFrame);
565
566 // delete similar buffers
567 for(auto &BufferOfFrameDel : vBuffersOfFrame)
568 {
569 if(BufferOfFrameDel.m_BufferMem.m_Mem == BufferMem)
570 {
571 BufferOfFrameDel.m_Buffer = VK_NULL_HANDLE;
572 BufferOfFrameDel.m_BufferMem.m_Mem = VK_NULL_HANDLE;
573 }
574 }
575 }
576 ++ImageIndex;
577 }
578 m_vvBufferObjectsOfFrame.clear();
579 m_vvBufferObjectsOfFrameRangeData.clear();
580 m_vCurrentUsedCount.clear();
581 }
582 };
583
584 struct SShaderModule
585 {
586 VkShaderModule m_VertShaderModule = VK_NULL_HANDLE;
587 VkShaderModule m_FragShaderModule = VK_NULL_HANDLE;
588
589 VkDevice m_VKDevice = VK_NULL_HANDLE;
590
591 ~SShaderModule()
592 {
593 if(m_VKDevice != VK_NULL_HANDLE)
594 {
595 if(m_VertShaderModule != VK_NULL_HANDLE)
596 vkDestroyShaderModule(device: m_VKDevice, shaderModule: m_VertShaderModule, pAllocator: nullptr);
597
598 if(m_FragShaderModule != VK_NULL_HANDLE)
599 vkDestroyShaderModule(device: m_VKDevice, shaderModule: m_FragShaderModule, pAllocator: nullptr);
600 }
601 }
602 };
603
604 enum EVulkanBackendAddressModes
605 {
606 VULKAN_BACKEND_ADDRESS_MODE_REPEAT = 0,
607 VULKAN_BACKEND_ADDRESS_MODE_CLAMP_EDGES,
608
609 VULKAN_BACKEND_ADDRESS_MODE_COUNT,
610 };
611
612 enum EVulkanBackendBlendModes
613 {
614 VULKAN_BACKEND_BLEND_MODE_ALPHA = 0,
615 VULKAN_BACKEND_BLEND_MODE_NONE,
616 VULKAN_BACKEND_BLEND_MODE_ADDITATIVE,
617
618 VULKAN_BACKEND_BLEND_MODE_COUNT,
619 };
620
621 enum EVulkanBackendClipModes
622 {
623 VULKAN_BACKEND_CLIP_MODE_NONE = 0,
624 VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT,
625
626 VULKAN_BACKEND_CLIP_MODE_COUNT,
627 };
628
629 enum EVulkanBackendTextureModes
630 {
631 VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED = 0,
632 VULKAN_BACKEND_TEXTURE_MODE_TEXTURED,
633
634 VULKAN_BACKEND_TEXTURE_MODE_COUNT,
635 };
636
637 struct SPipelineContainer
638 {
639 // 3 blend modes - 2 viewport & scissor modes - 2 texture modes
640 std::array<std::array<std::array<VkPipelineLayout, VULKAN_BACKEND_TEXTURE_MODE_COUNT>, VULKAN_BACKEND_CLIP_MODE_COUNT>, VULKAN_BACKEND_BLEND_MODE_COUNT> m_aaaPipelineLayouts;
641 std::array<std::array<std::array<VkPipeline, VULKAN_BACKEND_TEXTURE_MODE_COUNT>, VULKAN_BACKEND_CLIP_MODE_COUNT>, VULKAN_BACKEND_BLEND_MODE_COUNT> m_aaaPipelines;
642
643 SPipelineContainer()
644 {
645 for(auto &aaPipeLayouts : m_aaaPipelineLayouts)
646 {
647 for(auto &aPipeLayouts : aaPipeLayouts)
648 {
649 for(auto &PipeLayout : aPipeLayouts)
650 {
651 PipeLayout = VK_NULL_HANDLE;
652 }
653 }
654 }
655 for(auto &aaPipe : m_aaaPipelines)
656 {
657 for(auto &aPipe : aaPipe)
658 {
659 for(auto &Pipe : aPipe)
660 {
661 Pipe = VK_NULL_HANDLE;
662 }
663 }
664 }
665 }
666
667 void Destroy(VkDevice &Device)
668 {
669 for(auto &aaPipeLayouts : m_aaaPipelineLayouts)
670 {
671 for(auto &aPipeLayouts : aaPipeLayouts)
672 {
673 for(auto &PipeLayout : aPipeLayouts)
674 {
675 if(PipeLayout != VK_NULL_HANDLE)
676 vkDestroyPipelineLayout(device: Device, pipelineLayout: PipeLayout, pAllocator: nullptr);
677 PipeLayout = VK_NULL_HANDLE;
678 }
679 }
680 }
681 for(auto &aaPipe : m_aaaPipelines)
682 {
683 for(auto &aPipe : aaPipe)
684 {
685 for(auto &Pipe : aPipe)
686 {
687 if(Pipe != VK_NULL_HANDLE)
688 vkDestroyPipeline(device: Device, pipeline: Pipe, pAllocator: nullptr);
689 Pipe = VK_NULL_HANDLE;
690 }
691 }
692 }
693 }
694 };
695
696 /*******************************
697 * UNIFORM PUSH CONSTANT LAYOUTS
698 ********************************/
699
700 struct SUniformGPos
701 {
702 float m_aPos[4 * 2];
703 };
704
705 struct SUniformGTextPos
706 {
707 float m_aPos[4 * 2];
708 float m_TextureSize;
709 };
710
711 typedef vec3 SUniformTextGFragmentOffset;
712
713 struct SUniformTextGFragmentConstants
714 {
715 ColorRGBA m_TextColor;
716 ColorRGBA m_TextOutlineColor;
717 };
718
719 struct SUniformTextFragment
720 {
721 SUniformTextGFragmentConstants m_Constants;
722 };
723
724 struct SUniformTileGPos
725 {
726 float m_aPos[4 * 2];
727 };
728
729 struct SUniformTileGPosBorder : public SUniformTileGPos
730 {
731 vec2 m_Offset;
732 vec2 m_Scale;
733 };
734
735 typedef ColorRGBA SUniformTileGVertColor;
736
737 struct SUniformTileGVertColorAlign
738 {
739 float m_aPad[(64 - 48) / 4];
740 };
741
742 struct SUniformPrimExGPosRotationless
743 {
744 float m_aPos[4 * 2];
745 };
746
747 struct SUniformPrimExGPos : public SUniformPrimExGPosRotationless
748 {
749 vec2 m_Center;
750 float m_Rotation;
751 };
752
753 typedef ColorRGBA SUniformPrimExGVertColor;
754
755 struct SUniformPrimExGVertColorAlign
756 {
757 float m_aPad[(48 - 44) / 4];
758 };
759
760 struct SUniformSpriteMultiGPos
761 {
762 float m_aPos[4 * 2];
763 vec2 m_Center;
764 };
765
766 typedef ColorRGBA SUniformSpriteMultiGVertColor;
767
768 struct SUniformSpriteMultiGVertColorAlign
769 {
770 float m_aPad[(48 - 40) / 4];
771 };
772
773 struct SUniformSpriteMultiPushGPosBase
774 {
775 float m_aPos[4 * 2];
776 vec2 m_Center;
777 vec2 m_Padding;
778 };
779
780 struct SUniformSpriteMultiPushGPos : public SUniformSpriteMultiPushGPosBase
781 {
782 vec4 m_aPSR[1];
783 };
784
785 typedef ColorRGBA SUniformSpriteMultiPushGVertColor;
786
787 struct SUniformQuadGPosBase
788 {
789 float m_aPos[4 * 2];
790 int32_t m_QuadOffset;
791 };
792
793 struct SUniformQuadPushGBufferObject
794 {
795 ColorRGBA m_VertColor;
796 vec2 m_Offset;
797 float m_Rotation;
798 float m_Padding;
799 };
800
801 struct SUniformQuadGroupedGPos
802 {
803 float m_aPos[4 * 2];
804 SUniformQuadPushGBufferObject m_BOPush;
805 };
806
807 struct SUniformQuadGPos
808 {
809 float m_aPos[4 * 2];
810 int32_t m_QuadOffset;
811 };
812
813 enum ESupportedSamplerTypes
814 {
815 SUPPORTED_SAMPLER_TYPE_REPEAT = 0,
816 SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE,
817 SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY,
818
819 SUPPORTED_SAMPLER_TYPE_COUNT,
820 };
821
822 struct SShaderFileCache
823 {
824 std::vector<uint8_t> m_vBinary;
825 };
826
827 struct SSwapImgViewportExtent
828 {
829 VkExtent2D m_SwapImageViewport;
830 bool m_HasForcedViewport = false;
831 VkExtent2D m_ForcedViewport;
832
833 // the viewport of the resulting presented image on the screen
834 // if there is a forced viewport the resulting image is smaller
835 // than the full swap image size
836 VkExtent2D GetPresentedImageViewport() const
837 {
838 uint32_t ViewportWidth = m_SwapImageViewport.width;
839 uint32_t ViewportHeight = m_SwapImageViewport.height;
840 if(m_HasForcedViewport)
841 {
842 ViewportWidth = m_ForcedViewport.width;
843 ViewportHeight = m_ForcedViewport.height;
844 }
845
846 return {.width: ViewportWidth, .height: ViewportHeight};
847 }
848 };
849
850 struct SSwapChainMultiSampleImage
851 {
852 VkImage m_Image = VK_NULL_HANDLE;
853 SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> m_ImgMem;
854 VkImageView m_ImgView = VK_NULL_HANDLE;
855 };
856
857 /************************
858 * MEMBER VARIABLES
859 ************************/
860
861 std::unordered_map<std::string, SShaderFileCache> m_ShaderFiles;
862
863 SMemoryBlockCache<STAGING_BUFFER_CACHE_ID> m_StagingBufferCache;
864 SMemoryBlockCache<STAGING_BUFFER_IMAGE_CACHE_ID> m_StagingBufferCacheImage;
865 SMemoryBlockCache<VERTEX_BUFFER_CACHE_ID> m_VertexBufferCache;
866 std::map<uint32_t, SMemoryBlockCache<IMAGE_BUFFER_CACHE_ID>> m_ImageBufferCaches;
867
868 std::vector<VkMappedMemoryRange> m_vNonFlushedStagingBufferRange;
869
870 std::vector<CTexture> m_vTextures;
871
872 std::atomic<uint64_t> *m_pTextureMemoryUsage;
873 std::atomic<uint64_t> *m_pBufferMemoryUsage;
874 std::atomic<uint64_t> *m_pStreamMemoryUsage;
875 std::atomic<uint64_t> *m_pStagingMemoryUsage;
876
877 TTwGraphicsGpuList *m_pGpuList;
878
879 int m_GlobalTextureLodBIAS;
880 uint32_t m_MultiSamplingCount = 1;
881
882 uint32_t m_NextMultiSamplingCount = std::numeric_limits<uint32_t>::max();
883
884 bool m_RecreateSwapChain = false;
885 bool m_SwapchainCreated = false;
886 bool m_RenderingPaused = false;
887 bool m_HasDynamicViewport = false;
888 VkOffset2D m_DynamicViewportOffset;
889 VkExtent2D m_DynamicViewportSize;
890
891 bool m_AllowsLinearBlitting = false;
892 bool m_OptimalSwapChainImageBlitting = false;
893 bool m_OptimalRGBAImageBlitting = false;
894 bool m_LinearRGBAImageBlitting = false;
895
896 VkBuffer m_IndexBuffer;
897 SDeviceMemoryBlock m_IndexBufferMemory;
898
899 VkBuffer m_RenderIndexBuffer;
900 SDeviceMemoryBlock m_RenderIndexBufferMemory;
901 size_t m_CurRenderIndexPrimitiveCount;
902
903 VkDeviceSize m_NonCoherentMemAlignment;
904 VkDeviceSize m_OptimalImageCopyMemAlignment;
905 uint32_t m_MaxTextureSize;
906 uint32_t m_MaxSamplerAnisotropy;
907 VkSampleCountFlags m_MaxMultiSample;
908
909 uint32_t m_MinUniformAlign;
910
911 std::vector<uint8_t> m_vReadPixelHelper;
912 std::vector<uint8_t> m_vScreenshotHelper;
913
914 SDeviceMemoryBlock m_GetPresentedImgDataHelperMem;
915 VkImage m_GetPresentedImgDataHelperImage = VK_NULL_HANDLE;
916 uint8_t *m_pGetPresentedImgDataHelperMappedMemory = nullptr;
917 VkDeviceSize m_GetPresentedImgDataHelperMappedLayoutOffset = 0;
918 VkDeviceSize m_GetPresentedImgDataHelperMappedLayoutPitch = 0;
919 uint32_t m_GetPresentedImgDataHelperWidth = 0;
920 uint32_t m_GetPresentedImgDataHelperHeight = 0;
921 VkFence m_GetPresentedImgDataHelperFence = VK_NULL_HANDLE;
922
923 std::array<VkSampler, SUPPORTED_SAMPLER_TYPE_COUNT> m_aSamplers;
924
925 class IStorage *m_pStorage;
926
927 struct SDelayedBufferCleanupItem
928 {
929 VkBuffer m_Buffer;
930 SDeviceMemoryBlock m_Mem;
931 void *m_pMappedData = nullptr;
932 };
933
934 std::vector<std::vector<SDelayedBufferCleanupItem>> m_vvFrameDelayedBufferCleanup;
935 std::vector<std::vector<CTexture>> m_vvFrameDelayedTextureCleanup;
936 std::vector<std::vector<std::pair<CTexture, CTexture>>> m_vvFrameDelayedTextTexturesCleanup;
937
938 size_t m_ThreadCount = 1;
939 static constexpr size_t MAIN_THREAD_INDEX = 0;
940 size_t m_CurCommandInPipe = 0;
941 size_t m_CurRenderCallCountInPipe = 0;
942 size_t m_CommandsInPipe = 0;
943 size_t m_RenderCallsInPipe = 0;
944 size_t m_LastCommandsInPipeThreadIndex = 0;
945
946 struct SRenderThread
947 {
948 bool m_IsRendering = false;
949 std::thread m_Thread;
950 std::mutex m_Mutex;
951 std::condition_variable m_Cond;
952 bool m_Finished = false;
953 bool m_Started = false;
954 };
955 std::vector<std::unique_ptr<SRenderThread>> m_vpRenderThreads;
956
957private:
958 std::vector<VkImageView> m_vSwapChainImageViewList;
959 std::vector<SSwapChainMultiSampleImage> m_vSwapChainMultiSamplingImages;
960 std::vector<VkFramebuffer> m_vFramebufferList;
961 std::vector<VkCommandBuffer> m_vMainDrawCommandBuffers;
962
963 std::vector<std::vector<VkCommandBuffer>> m_vvThreadDrawCommandBuffers;
964 std::vector<VkCommandBuffer> m_vHelperThreadDrawCommandBuffers;
965 std::vector<std::vector<bool>> m_vvUsedThreadDrawCommandBuffer;
966
967 std::vector<VkCommandBuffer> m_vMemoryCommandBuffers;
968 std::vector<bool> m_vUsedMemoryCommandBuffer;
969
970 std::vector<VkSemaphore> m_vQueueSubmitSemaphores;
971 std::vector<VkSemaphore> m_vBusyAcquireImageSemaphores;
972 VkSemaphore m_AcquireImageSemaphore;
973
974 std::vector<VkFence> m_vQueueSubmitFences;
975
976 uint64_t m_CurFrame = 0;
977 std::vector<uint64_t> m_vImageLastFrameCheck;
978
979 uint32_t m_LastPresentedSwapChainImageIndex;
980
981 std::vector<SBufferObjectFrame> m_vBufferObjects;
982
983 std::vector<SBufferContainer> m_vBufferContainers;
984
985 VkInstance m_VKInstance;
986 VkPhysicalDevice m_VKGPU;
987 uint32_t m_VKGraphicsQueueIndex = std::numeric_limits<uint32_t>::max();
988 VkDevice m_VKDevice;
989 VkQueue m_VKGraphicsQueue, m_VKPresentQueue;
990 VkSurfaceKHR m_VKPresentSurface;
991 SSwapImgViewportExtent m_VKSwapImgAndViewportExtent;
992
993#ifdef VK_EXT_debug_utils
994 VkDebugUtilsMessengerEXT m_DebugMessenger;
995#endif
996
997 VkDescriptorSetLayout m_StandardTexturedDescriptorSetLayout;
998 VkDescriptorSetLayout m_Standard3DTexturedDescriptorSetLayout;
999
1000 VkDescriptorSetLayout m_TextDescriptorSetLayout;
1001
1002 VkDescriptorSetLayout m_SpriteMultiUniformDescriptorSetLayout;
1003 VkDescriptorSetLayout m_QuadUniformDescriptorSetLayout;
1004
1005 SPipelineContainer m_StandardPipeline;
1006 SPipelineContainer m_StandardLinePipeline;
1007 SPipelineContainer m_Standard3DPipeline;
1008 SPipelineContainer m_TextPipeline;
1009 SPipelineContainer m_TilePipeline;
1010 SPipelineContainer m_TileBorderPipeline;
1011 SPipelineContainer m_PrimExPipeline;
1012 SPipelineContainer m_PrimExRotationlessPipeline;
1013 SPipelineContainer m_SpriteMultiPipeline;
1014 SPipelineContainer m_SpriteMultiPushPipeline;
1015 SPipelineContainer m_QuadPipeline;
1016 SPipelineContainer m_QuadGroupedPipeline;
1017
1018 std::vector<VkPipeline> m_vLastPipeline;
1019
1020 std::vector<VkCommandPool> m_vCommandPools;
1021
1022 VkRenderPass m_VKRenderPass;
1023
1024 VkSurfaceFormatKHR m_VKSurfFormat;
1025
1026 SDeviceDescriptorPools m_StandardTextureDescrPool;
1027 SDeviceDescriptorPools m_TextTextureDescrPool;
1028
1029 std::vector<SDeviceDescriptorPools> m_vUniformBufferDescrPools;
1030
1031 VkSwapchainKHR m_VKSwapChain = VK_NULL_HANDLE;
1032 std::vector<VkImage> m_vSwapChainImages;
1033 uint32_t m_SwapChainImageCount = 0;
1034
1035 std::vector<SStreamMemory<SFrameBuffers>> m_vStreamedVertexBuffers;
1036 std::vector<SStreamMemory<SFrameUniformBuffers>> m_vStreamedUniformBuffers;
1037
1038 uint32_t m_CurImageIndex = 0;
1039
1040 uint32_t m_CanvasWidth;
1041 uint32_t m_CanvasHeight;
1042
1043 SDL_Window *m_pWindow;
1044
1045 std::array<float, 4> m_aClearColor = {0, 0, 0, 0};
1046
1047 struct SRenderCommandExecuteBuffer
1048 {
1049 CCommandBuffer::ECommandBufferCMD m_Command;
1050 const CCommandBuffer::SCommand *m_pRawCommand;
1051 uint32_t m_ThreadIndex;
1052
1053 // must be calculated when the buffer gets filled
1054 size_t m_EstimatedRenderCallCount = 0;
1055
1056 // useful data
1057 VkBuffer m_Buffer;
1058 size_t m_BufferOff;
1059 std::array<SDeviceDescriptorSet, 2> m_aDescriptors;
1060
1061 VkBuffer m_IndexBuffer;
1062
1063 bool m_ClearColorInRenderThread = false;
1064
1065 bool m_HasDynamicState = false;
1066 VkViewport m_Viewport;
1067 VkRect2D m_Scissor;
1068 };
1069
1070 typedef std::vector<SRenderCommandExecuteBuffer> TCommandList;
1071 typedef std::vector<TCommandList> TThreadCommandList;
1072
1073 TThreadCommandList m_vvThreadCommandLists;
1074 std::vector<bool> m_vThreadHelperHadCommands;
1075
1076 typedef std::function<bool(const CCommandBuffer::SCommand *, SRenderCommandExecuteBuffer &)> TCommandBufferCommandCallback;
1077 typedef std::function<void(SRenderCommandExecuteBuffer &, const CCommandBuffer::SCommand *)> TCommandBufferFillExecuteBufferFunc;
1078
1079 struct SCommandCallback
1080 {
1081 bool m_IsRenderCommand;
1082 TCommandBufferFillExecuteBufferFunc m_FillExecuteBuffer;
1083 TCommandBufferCommandCallback m_CommandCB;
1084 // command should be considered handled after it executed
1085 bool m_CMDIsHandled = true;
1086 };
1087 std::array<SCommandCallback, static_cast<int>(CCommandBuffer::CMD_COUNT) - static_cast<int>(CCommandBuffer::CMD_FIRST)> m_aCommandCallbacks;
1088
1089protected:
1090 /************************
1091 * ERROR MANAGEMENT
1092 ************************/
1093 std::mutex m_ErrWarnMutex;
1094 std::string m_ErrorHelper;
1095
1096 bool m_HasError = false;
1097 bool m_CanAssert = false;
1098
1099 /**
1100 * After an error occurred, the rendering stop as soon as possible
1101 * Always stop the current code execution after a call to this function (e.g. return false)
1102 */
1103 void SetError(EGfxErrorType ErrType, const char *pErr, const char *pErrStrExtra = nullptr)
1104 {
1105 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1106 SGfxErrorContainer::SError Err = {.m_RequiresTranslation: false, .m_Err: pErr};
1107 if(std::find(first: m_Error.m_vErrors.begin(), last: m_Error.m_vErrors.end(), val: Err) == m_Error.m_vErrors.end())
1108 m_Error.m_vErrors.emplace_back(args&: Err);
1109 if(pErrStrExtra != nullptr)
1110 {
1111 SGfxErrorContainer::SError ErrExtra = {.m_RequiresTranslation: false, .m_Err: pErrStrExtra};
1112 if(std::find(first: m_Error.m_vErrors.begin(), last: m_Error.m_vErrors.end(), val: ErrExtra) == m_Error.m_vErrors.end())
1113 m_Error.m_vErrors.emplace_back(args&: ErrExtra);
1114 }
1115 if(m_CanAssert)
1116 {
1117 if(pErrStrExtra != nullptr)
1118 log_error("gfx/vulkan", "%s: %s", pErr, pErrStrExtra);
1119 else
1120 log_error("gfx/vulkan", "%s", pErr);
1121 m_HasError = true;
1122 m_Error.m_ErrorType = ErrType;
1123 }
1124 else
1125 {
1126 Lock.unlock();
1127 // during initialization vulkan should not throw any errors but warnings instead
1128 // since most code in the swapchain is shared with runtime code, add this extra code path
1129 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED, pWarning: pErr);
1130 }
1131 }
1132
1133 void SetWarningPreMsg(const char *pWarningPre)
1134 {
1135 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1136 if(std::find(first: m_Warning.m_vWarnings.begin(), last: m_Warning.m_vWarnings.end(), val: pWarningPre) == m_Warning.m_vWarnings.end())
1137 m_Warning.m_vWarnings.emplace(position: m_Warning.m_vWarnings.begin(), args&: pWarningPre);
1138 }
1139
1140 void SetWarning(EGfxWarningType WarningType, const char *pWarning)
1141 {
1142 std::unique_lock<std::mutex> Lock(m_ErrWarnMutex);
1143 log_warn("gfx/vulkan", "%s", pWarning);
1144 if(std::find(first: m_Warning.m_vWarnings.begin(), last: m_Warning.m_vWarnings.end(), val: pWarning) == m_Warning.m_vWarnings.end())
1145 m_Warning.m_vWarnings.emplace_back(args&: pWarning);
1146 m_Warning.m_WarningType = WarningType;
1147 }
1148
1149 const char *CheckVulkanCriticalError(VkResult CallResult)
1150 {
1151 const char *pCriticalError = nullptr;
1152 switch(CallResult)
1153 {
1154 case VK_ERROR_OUT_OF_HOST_MEMORY:
1155 pCriticalError = "Host ran out of memory.";
1156 log_error("gfx/vulkan", "%s", pCriticalError);
1157 break;
1158 case VK_ERROR_OUT_OF_DEVICE_MEMORY:
1159 pCriticalError = "Device ran out of memory.";
1160 log_error("gfx/vulkan", "%s", pCriticalError);
1161 break;
1162 case VK_ERROR_DEVICE_LOST:
1163 pCriticalError = "Device lost.";
1164 log_error("gfx/vulkan", "%s", pCriticalError);
1165 break;
1166 case VK_ERROR_OUT_OF_DATE_KHR:
1167 {
1168 if(IsVerbose())
1169 {
1170 log_debug("gfx/vulkan", "Queueing swap chain recreation because the current is out of date.");
1171 }
1172 m_RecreateSwapChain = true;
1173 break;
1174 }
1175 case VK_ERROR_SURFACE_LOST_KHR:
1176 log_error("gfx/vulkan", "Surface lost.");
1177 break;
1178 case VK_ERROR_INCOMPATIBLE_DRIVER:
1179 pCriticalError = "No compatible driver found. Vulkan 1.1 is required.";
1180 log_error("gfx/vulkan", "%s", pCriticalError);
1181 break;
1182 case VK_ERROR_INITIALIZATION_FAILED:
1183 pCriticalError = "Initialization failed for unknown reason.";
1184 log_error("gfx/vulkan", "%s", pCriticalError);
1185 break;
1186 case VK_ERROR_LAYER_NOT_PRESENT:
1187 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_MISSING_EXTENSION, pWarning: "At least one Vulkan layer was not present. (Try to disable them.)");
1188 break;
1189 case VK_ERROR_EXTENSION_NOT_PRESENT:
1190 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_MISSING_EXTENSION, pWarning: "At least one Vulkan extension was not present. (Try to disable them.)");
1191 break;
1192 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
1193 log_error("gfx/vulkan", "Native window in use.");
1194 break;
1195 case VK_SUCCESS:
1196 break;
1197 case VK_SUBOPTIMAL_KHR:
1198 if(IsVerbose())
1199 {
1200 log_debug("gfx/vulkan", "Queueing swap chain recreation because the current is suboptimal.");
1201 }
1202 m_RecreateSwapChain = true;
1203 break;
1204 default:
1205 m_ErrorHelper = "Unknown error: ";
1206 m_ErrorHelper.append(str: std::to_string(val: CallResult));
1207 pCriticalError = m_ErrorHelper.c_str();
1208 log_error("gfx/vulkan", "%s", pCriticalError);
1209 break;
1210 }
1211
1212 return pCriticalError;
1213 }
1214
1215 void ErroneousCleanup() override
1216 {
1217 CleanupVulkanSDL();
1218 }
1219
1220 /************************
1221 * COMMAND CALLBACKS
1222 ************************/
1223
1224 size_t CommandBufferCMDOff(CCommandBuffer::ECommandBufferCMD CommandBufferCMD)
1225 {
1226 return (size_t)CommandBufferCMD - CCommandBuffer::CMD_FIRST;
1227 }
1228
1229 void RegisterCommands()
1230 {
1231 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXTURE_CREATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Texture_Create(pCommand: static_cast<const CCommandBuffer::SCommand_Texture_Create *>(pBaseCommand)); }};
1232 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXTURE_DESTROY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Texture_Destroy(pCommand: static_cast<const CCommandBuffer::SCommand_Texture_Destroy *>(pBaseCommand)); }};
1233 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURES_CREATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTextures_Create(pCommand: static_cast<const CCommandBuffer::SCommand_TextTextures_Create *>(pBaseCommand)); }};
1234 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURES_DESTROY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTextures_Destroy(pCommand: static_cast<const CCommandBuffer::SCommand_TextTextures_Destroy *>(pBaseCommand)); }};
1235 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TEXT_TEXTURE_UPDATE)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_TextTexture_Update(pCommand: static_cast<const CCommandBuffer::SCommand_TextTexture_Update *>(pBaseCommand)); }};
1236
1237 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CLEAR)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Clear_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Clear *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Clear(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Clear *>(pBaseCommand)); }};
1238 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Render_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Render *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Render(pCommand: static_cast<const CCommandBuffer::SCommand_Render *>(pBaseCommand), ExecBuffer); }};
1239 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TEX3D)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderTex3D_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderTex3D *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderTex3D(pCommand: static_cast<const CCommandBuffer::SCommand_RenderTex3D *>(pBaseCommand), ExecBuffer); }};
1240
1241 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CREATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CreateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_CreateBufferObject *>(pBaseCommand)); }};
1242 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RECREATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RecreateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_RecreateBufferObject *>(pBaseCommand)); }};
1243 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_UpdateBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_UpdateBufferObject *>(pBaseCommand)); }};
1244 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_COPY_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CopyBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_CopyBufferObject *>(pBaseCommand)); }};
1245 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_DELETE_BUFFER_OBJECT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_DeleteBufferObject(pCommand: static_cast<const CCommandBuffer::SCommand_DeleteBufferObject *>(pBaseCommand)); }};
1246
1247 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_CREATE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_CreateBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_CreateBufferContainer *>(pBaseCommand)); }};
1248 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_DELETE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_DeleteBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_DeleteBufferContainer *>(pBaseCommand)); }};
1249 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_BUFFER_CONTAINER)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_UpdateBufferContainer(pCommand: static_cast<const CCommandBuffer::SCommand_UpdateBufferContainer *>(pBaseCommand)); }};
1250
1251 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_INDICES_REQUIRED_NUM_NOTIFY)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_IndicesRequiredNumNotify(pCommand: static_cast<const CCommandBuffer::SCommand_IndicesRequiredNumNotify *>(pBaseCommand)); }};
1252
1253 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TILE_LAYER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderTileLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderTileLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderTileLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderTileLayer *>(pBaseCommand), ExecBuffer); }};
1254 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_BORDER_TILE)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderBorderTile_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderBorderTile *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderBorderTile(pCommand: static_cast<const CCommandBuffer::SCommand_RenderBorderTile *>(pBaseCommand), ExecBuffer); }};
1255 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_LAYER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand), ExecBuffer, Grouped: false); }};
1256 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_LAYER_GROUPED)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadLayer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadLayer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadLayer *>(pBaseCommand), ExecBuffer, Grouped: true); }};
1257 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_TEXT)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderText_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderText *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderText(pCommand: static_cast<const CCommandBuffer::SCommand_RenderText *>(pBaseCommand), ExecBuffer); }};
1258 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainer_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainer *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainer(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainer *>(pBaseCommand), ExecBuffer); }};
1259 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER_EX)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainerEx_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerEx *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainerEx(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerEx *>(pBaseCommand), ExecBuffer); }};
1260 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_RENDER_QUAD_CONTAINER_SPRITE_MULTIPLE)] = {.m_IsRenderCommand: true, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_RenderQuadContainerAsSpriteMultiple_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_RenderQuadContainerAsSpriteMultiple(pCommand: static_cast<const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *>(pBaseCommand), ExecBuffer); }};
1261
1262 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_SWAP)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Swap(pCommand: static_cast<const CCommandBuffer::SCommand_Swap *>(pBaseCommand)); }};
1263
1264 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_VSYNC)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_VSync(pCommand: static_cast<const CCommandBuffer::SCommand_VSync *>(pBaseCommand)); }};
1265 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_MULTISAMPLING)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_MultiSampling(pCommand: static_cast<const CCommandBuffer::SCommand_MultiSampling *>(pBaseCommand)); }};
1266 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TRY_SWAP_AND_READ_PIXEL)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_ReadPixel(pCommand: static_cast<const CCommandBuffer::SCommand_TrySwapAndReadPixel *>(pBaseCommand)); }};
1267 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_TRY_SWAP_AND_SCREENSHOT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Screenshot(pCommand: static_cast<const CCommandBuffer::SCommand_TrySwapAndScreenshot *>(pBaseCommand)); }};
1268
1269 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_UPDATE_VIEWPORT)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [this](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) { Cmd_Update_Viewport_FillExecuteBuffer(ExecBuffer, pCommand: static_cast<const CCommandBuffer::SCommand_Update_Viewport *>(pBaseCommand)); }, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_Update_Viewport(pCommand: static_cast<const CCommandBuffer::SCommand_Update_Viewport *>(pBaseCommand)); }};
1270
1271 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_WINDOW_CREATE_NTF)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_WindowCreateNtf(pCommand: static_cast<const CCommandBuffer::SCommand_WindowCreateNtf *>(pBaseCommand)); }, .m_CMDIsHandled: false};
1272 m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::CMD_WINDOW_DESTROY_NTF)] = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [this](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return Cmd_WindowDestroyNtf(pCommand: static_cast<const CCommandBuffer::SCommand_WindowDestroyNtf *>(pBaseCommand)); }, .m_CMDIsHandled: false};
1273
1274 for(auto &Callback : m_aCommandCallbacks)
1275 {
1276 if(!(bool)Callback.m_CommandCB)
1277 Callback = {.m_IsRenderCommand: false, .m_FillExecuteBuffer: [](SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand *pBaseCommand) {}, .m_CommandCB: [](const CCommandBuffer::SCommand *pBaseCommand, SRenderCommandExecuteBuffer &ExecBuffer) { return true; }};
1278 }
1279 }
1280
1281 /*****************************
1282 * VIDEO AND SCREENSHOT HELPER
1283 ******************************/
1284
1285 [[nodiscard]] bool PreparePresentedImageDataImage(uint8_t *&pResImageData, uint32_t Width, uint32_t Height)
1286 {
1287 bool NeedsNewImg = Width != m_GetPresentedImgDataHelperWidth || Height != m_GetPresentedImgDataHelperHeight;
1288 if(m_GetPresentedImgDataHelperImage == VK_NULL_HANDLE || NeedsNewImg)
1289 {
1290 if(m_GetPresentedImgDataHelperImage != VK_NULL_HANDLE)
1291 {
1292 DeletePresentedImageDataImage();
1293 }
1294 m_GetPresentedImgDataHelperWidth = Width;
1295 m_GetPresentedImgDataHelperHeight = Height;
1296
1297 VkImageCreateInfo ImageInfo{};
1298 ImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
1299 ImageInfo.imageType = VK_IMAGE_TYPE_2D;
1300 ImageInfo.extent.width = Width;
1301 ImageInfo.extent.height = Height;
1302 ImageInfo.extent.depth = 1;
1303 ImageInfo.mipLevels = 1;
1304 ImageInfo.arrayLayers = 1;
1305 ImageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1306 ImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
1307 ImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1308 ImageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1309 ImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1310 ImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1311
1312 vkCreateImage(device: m_VKDevice, pCreateInfo: &ImageInfo, pAllocator: nullptr, pImage: &m_GetPresentedImgDataHelperImage);
1313 // Create memory to back up the image
1314 VkMemoryRequirements MemRequirements;
1315 vkGetImageMemoryRequirements(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pMemoryRequirements: &MemRequirements);
1316
1317 VkMemoryAllocateInfo MemAllocInfo{};
1318 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1319 MemAllocInfo.allocationSize = MemRequirements.size;
1320 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: MemRequirements.memoryTypeBits, Properties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
1321
1322 vkAllocateMemory(device: m_VKDevice, pAllocateInfo: &MemAllocInfo, pAllocator: nullptr, pMemory: &m_GetPresentedImgDataHelperMem.m_Mem);
1323 vkBindImageMemory(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, memory: m_GetPresentedImgDataHelperMem.m_Mem, memoryOffset: 0);
1324
1325 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_UNDEFINED, NewLayout: VK_IMAGE_LAYOUT_GENERAL))
1326 return false;
1327
1328 VkImageSubresource SubResource{.aspectMask: VK_IMAGE_ASPECT_COLOR_BIT, .mipLevel: 0, .arrayLayer: 0};
1329 VkSubresourceLayout SubResourceLayout;
1330 vkGetImageSubresourceLayout(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pSubresource: &SubResource, pLayout: &SubResourceLayout);
1331
1332 if(vkMapMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: (void **)&m_pGetPresentedImgDataHelperMappedMemory) != VK_SUCCESS)
1333 return false;
1334 m_GetPresentedImgDataHelperMappedLayoutOffset = SubResourceLayout.offset;
1335 m_GetPresentedImgDataHelperMappedLayoutPitch = SubResourceLayout.rowPitch;
1336 m_pGetPresentedImgDataHelperMappedMemory += m_GetPresentedImgDataHelperMappedLayoutOffset;
1337
1338 VkFenceCreateInfo FenceInfo{};
1339 FenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1340 FenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
1341 vkCreateFence(device: m_VKDevice, pCreateInfo: &FenceInfo, pAllocator: nullptr, pFence: &m_GetPresentedImgDataHelperFence);
1342 }
1343 pResImageData = m_pGetPresentedImgDataHelperMappedMemory;
1344 return true;
1345 }
1346
1347 void DeletePresentedImageDataImage()
1348 {
1349 if(m_GetPresentedImgDataHelperImage != VK_NULL_HANDLE)
1350 {
1351 vkDestroyFence(device: m_VKDevice, fence: m_GetPresentedImgDataHelperFence, pAllocator: nullptr);
1352
1353 m_GetPresentedImgDataHelperFence = VK_NULL_HANDLE;
1354
1355 vkDestroyImage(device: m_VKDevice, image: m_GetPresentedImgDataHelperImage, pAllocator: nullptr);
1356 vkUnmapMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem);
1357 vkFreeMemory(device: m_VKDevice, memory: m_GetPresentedImgDataHelperMem.m_Mem, pAllocator: nullptr);
1358
1359 m_GetPresentedImgDataHelperImage = VK_NULL_HANDLE;
1360 m_GetPresentedImgDataHelperMem = {};
1361 m_pGetPresentedImgDataHelperMappedMemory = nullptr;
1362
1363 m_GetPresentedImgDataHelperWidth = 0;
1364 m_GetPresentedImgDataHelperHeight = 0;
1365 }
1366 }
1367
1368 [[nodiscard]] bool GetPresentedImageDataImpl(uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData, bool ResetAlpha, std::optional<ivec2> PixelOffset)
1369 {
1370 bool IsB8G8R8A8 = m_VKSurfFormat.format == VK_FORMAT_B8G8R8A8_UNORM;
1371 bool UsesRGBALikeFormat = m_VKSurfFormat.format == VK_FORMAT_R8G8B8A8_UNORM || IsB8G8R8A8;
1372 if(UsesRGBALikeFormat && m_LastPresentedSwapChainImageIndex != std::numeric_limits<decltype(m_LastPresentedSwapChainImageIndex)>::max())
1373 {
1374 auto Viewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
1375 VkOffset3D SrcOffset;
1376 if(PixelOffset.has_value())
1377 {
1378 SrcOffset.x = PixelOffset.value().x;
1379 SrcOffset.y = PixelOffset.value().y;
1380 Width = 1;
1381 Height = 1;
1382 }
1383 else
1384 {
1385 SrcOffset.x = 0;
1386 SrcOffset.y = 0;
1387 Width = Viewport.width;
1388 Height = Viewport.height;
1389 }
1390 SrcOffset.z = 0;
1391 Format = CImageInfo::FORMAT_RGBA;
1392
1393 const size_t ImageTotalSize = (size_t)Width * Height * CImageInfo::PixelSize(Format);
1394
1395 uint8_t *pResImageData;
1396 if(!PreparePresentedImageDataImage(pResImageData, Width, Height))
1397 return false;
1398
1399 VkCommandBuffer *pCommandBuffer;
1400 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
1401 return false;
1402 VkCommandBuffer &CommandBuffer = *pCommandBuffer;
1403
1404 auto &SwapImg = m_vSwapChainImages[m_LastPresentedSwapChainImageIndex];
1405
1406 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_GENERAL, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
1407 return false;
1408 if(!ImageBarrier(Image: SwapImg, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: m_VKSurfFormat.format, OldLayout: VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL))
1409 return false;
1410
1411 // If source and destination support blit we'll blit as this also does automatic format conversion (e.g. from BGR to RGB)
1412 if(m_OptimalSwapChainImageBlitting && m_LinearRGBAImageBlitting)
1413 {
1414 VkOffset3D BlitSize;
1415 BlitSize.x = Width;
1416 BlitSize.y = Height;
1417 BlitSize.z = 1;
1418
1419 VkImageBlit ImageBlitRegion{};
1420 ImageBlitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1421 ImageBlitRegion.srcSubresource.layerCount = 1;
1422 ImageBlitRegion.srcOffsets[0] = SrcOffset;
1423 ImageBlitRegion.srcOffsets[1] = {.x: SrcOffset.x + BlitSize.x, .y: SrcOffset.y + BlitSize.y, .z: SrcOffset.z + BlitSize.z};
1424 ImageBlitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1425 ImageBlitRegion.dstSubresource.layerCount = 1;
1426 ImageBlitRegion.dstOffsets[1] = BlitSize;
1427
1428 // Issue the blit command
1429 vkCmdBlitImage(commandBuffer: CommandBuffer, srcImage: SwapImg, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1430 dstImage: m_GetPresentedImgDataHelperImage, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1431 regionCount: 1, pRegions: &ImageBlitRegion, filter: VK_FILTER_NEAREST);
1432
1433 // transformed to RGBA
1434 IsB8G8R8A8 = false;
1435 }
1436 else
1437 {
1438 // Otherwise use image copy (requires us to manually flip components)
1439 VkImageCopy ImageCopyRegion{};
1440 ImageCopyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1441 ImageCopyRegion.srcSubresource.layerCount = 1;
1442 ImageCopyRegion.srcOffset = SrcOffset;
1443 ImageCopyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1444 ImageCopyRegion.dstSubresource.layerCount = 1;
1445 ImageCopyRegion.extent.width = Width;
1446 ImageCopyRegion.extent.height = Height;
1447 ImageCopyRegion.extent.depth = 1;
1448
1449 // Issue the copy command
1450 vkCmdCopyImage(commandBuffer: CommandBuffer, srcImage: SwapImg, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1451 dstImage: m_GetPresentedImgDataHelperImage, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1452 regionCount: 1, pRegions: &ImageCopyRegion);
1453 }
1454
1455 if(!ImageBarrier(Image: m_GetPresentedImgDataHelperImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: VK_FORMAT_R8G8B8A8_UNORM, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_GENERAL))
1456 return false;
1457 if(!ImageBarrier(Image: SwapImg, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format: m_VKSurfFormat.format, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
1458 return false;
1459
1460 vkEndCommandBuffer(commandBuffer: CommandBuffer);
1461 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
1462
1463 VkSubmitInfo SubmitInfo{};
1464 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1465 SubmitInfo.commandBufferCount = 1;
1466 SubmitInfo.pCommandBuffers = &CommandBuffer;
1467
1468 vkResetFences(device: m_VKDevice, fenceCount: 1, pFences: &m_GetPresentedImgDataHelperFence);
1469 vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, fence: m_GetPresentedImgDataHelperFence);
1470 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_GetPresentedImgDataHelperFence, VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
1471
1472 VkMappedMemoryRange MemRange{};
1473 MemRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1474 MemRange.memory = m_GetPresentedImgDataHelperMem.m_Mem;
1475 MemRange.offset = m_GetPresentedImgDataHelperMappedLayoutOffset;
1476 MemRange.size = VK_WHOLE_SIZE;
1477 vkInvalidateMappedMemoryRanges(device: m_VKDevice, memoryRangeCount: 1, pMemoryRanges: &MemRange);
1478
1479 size_t RealFullImageSize = maximum(a: ImageTotalSize, b: (size_t)(Height * m_GetPresentedImgDataHelperMappedLayoutPitch));
1480 size_t ExtraRowSize = Width * 4;
1481 if(vDstData.size() < RealFullImageSize + ExtraRowSize)
1482 vDstData.resize(new_size: RealFullImageSize + ExtraRowSize);
1483
1484 mem_copy(dest: vDstData.data(), source: pResImageData, size: RealFullImageSize);
1485
1486 // pack image data together without any offset that the driver might require
1487 if(Width * 4 < m_GetPresentedImgDataHelperMappedLayoutPitch)
1488 {
1489 for(uint32_t Y = 0; Y < Height; ++Y)
1490 {
1491 size_t OffsetImagePacked = (Y * Width * 4);
1492 size_t OffsetImageUnpacked = (Y * m_GetPresentedImgDataHelperMappedLayoutPitch);
1493 mem_copy(dest: vDstData.data() + RealFullImageSize, source: vDstData.data() + OffsetImageUnpacked, size: Width * 4);
1494 mem_copy(dest: vDstData.data() + OffsetImagePacked, source: vDstData.data() + RealFullImageSize, size: Width * 4);
1495 }
1496 }
1497
1498 if(IsB8G8R8A8 || ResetAlpha)
1499 {
1500 // swizzle
1501 for(uint32_t Y = 0; Y < Height; ++Y)
1502 {
1503 for(uint32_t X = 0; X < Width; ++X)
1504 {
1505 size_t ImgOff = (Y * Width * 4) + (X * 4);
1506 if(IsB8G8R8A8)
1507 {
1508 std::swap(a&: vDstData[ImgOff], b&: vDstData[ImgOff + 2]);
1509 }
1510 vDstData[ImgOff + 3] = 255;
1511 }
1512 }
1513 }
1514
1515 return true;
1516 }
1517 else
1518 {
1519 if(!UsesRGBALikeFormat)
1520 {
1521 log_error("gfx/vulkan", "Swap chain image was not in an RGBA-like format.");
1522 }
1523 else
1524 {
1525 log_error("gfx/vulkan", "Swap chain image was not ready to be copied.");
1526 }
1527 return false;
1528 }
1529 }
1530
1531 [[nodiscard]] bool GetPresentedImageData(uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData) override
1532 {
1533 return GetPresentedImageDataImpl(Width, Height, Format, vDstData, ResetAlpha: false, PixelOffset: {});
1534 }
1535
1536 /************************
1537 * MEMORY MANAGEMENT
1538 ************************/
1539
1540 [[nodiscard]] bool AllocateVulkanMemory(const VkMemoryAllocateInfo *pAllocateInfo, VkDeviceMemory *pMemory)
1541 {
1542 VkResult Res = vkAllocateMemory(device: m_VKDevice, pAllocateInfo, pAllocator: nullptr, pMemory);
1543 if(Res != VK_SUCCESS)
1544 {
1545 log_warn("gfx/vulkan", "Memory allocation failed, trying to recover.");
1546 if(Res == VK_ERROR_OUT_OF_HOST_MEMORY || Res == VK_ERROR_OUT_OF_DEVICE_MEMORY)
1547 {
1548 // aggressively try to get more memory
1549 vkDeviceWaitIdle(device: m_VKDevice);
1550 for(size_t i = 0; i < m_SwapChainImageCount + 1; ++i)
1551 {
1552 if(!NextFrame())
1553 return false;
1554 }
1555 Res = vkAllocateMemory(device: m_VKDevice, pAllocateInfo, pAllocator: nullptr, pMemory);
1556 }
1557 if(Res != VK_SUCCESS)
1558 {
1559 log_error("gfx/vulkan", "Memory allocation and recovery failed.");
1560 return false;
1561 }
1562 }
1563 return true;
1564 }
1565
1566 [[nodiscard]] bool GetBufferImpl(VkDeviceSize RequiredSize, EMemoryBlockUsage MemUsage, VkBuffer &Buffer, SDeviceMemoryBlock &BufferMemory, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags BufferProperties)
1567 {
1568 return CreateBuffer(BufferSize: RequiredSize, MemUsage, BufferUsage, MemoryProperties: BufferProperties, VKBuffer&: Buffer, VKBufferMemory&: BufferMemory);
1569 }
1570
1571 template<size_t Id,
1572 int64_t MemoryBlockSize, size_t BlockCount,
1573 bool RequiresMapping>
1574 [[nodiscard]] bool GetBufferBlockImpl(SMemoryBlock<Id> &RetBlock, SMemoryBlockCache<Id> &MemoryCache, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags BufferProperties, const void *pBufferData, VkDeviceSize RequiredSize, VkDeviceSize TargetAlignment)
1575 {
1576 bool Res = true;
1577
1578 auto &&CreateCacheBlock = [&]() -> bool {
1579 bool FoundAllocation = false;
1580 SMemoryHeap::SMemoryHeapQueueElement AllocatedMem;
1581 SDeviceMemoryBlock TmpBufferMemory;
1582 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pCacheHeap = nullptr;
1583 auto &Heaps = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps;
1584 for(size_t i = 0; i < Heaps.size(); ++i)
1585 {
1586 auto *pHeap = Heaps[i];
1587 if(pHeap->m_Heap.Allocate(RequiredSize, TargetAlignment, AllocatedMem))
1588 {
1589 TmpBufferMemory = pHeap->m_BufferMem;
1590 FoundAllocation = true;
1591 pCacheHeap = pHeap;
1592 break;
1593 }
1594 }
1595 if(!FoundAllocation)
1596 {
1597 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pNewHeap = new typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap();
1598
1599 VkBuffer TmpBuffer;
1600 if(!GetBufferImpl(RequiredSize: MemoryBlockSize * BlockCount, MemUsage: RequiresMapping ? MEMORY_BLOCK_USAGE_STAGING : MEMORY_BLOCK_USAGE_BUFFER, Buffer&: TmpBuffer, BufferMemory&: TmpBufferMemory, BufferUsage, BufferProperties))
1601 {
1602 delete pNewHeap;
1603 return false;
1604 }
1605
1606 void *pMapData = nullptr;
1607
1608 if(RequiresMapping)
1609 {
1610 if(vkMapMemory(device: m_VKDevice, memory: TmpBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMapData) != VK_SUCCESS)
1611 {
1612 SetError(ErrType: RequiresMapping ? EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_STAGING : EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Failed to map buffer block memory.");
1613 delete pNewHeap;
1614 return false;
1615 }
1616 }
1617
1618 pNewHeap->m_Buffer = TmpBuffer;
1619
1620 pNewHeap->m_BufferMem = TmpBufferMemory;
1621 pNewHeap->m_pMappedBuffer = pMapData;
1622
1623 pCacheHeap = pNewHeap;
1624 Heaps.emplace_back(pNewHeap);
1625 Heaps.back()->m_Heap.Init(MemoryBlockSize * BlockCount, 0);
1626 if(!Heaps.back()->m_Heap.Allocate(RequiredSize, TargetAlignment, AllocatedMem))
1627 {
1628 SetError(ErrType: RequiresMapping ? EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_STAGING : EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Heap allocation failed directly after creating fresh heap.");
1629 return false;
1630 }
1631 }
1632
1633 RetBlock.m_Buffer = pCacheHeap->m_Buffer;
1634 RetBlock.m_BufferMem = TmpBufferMemory;
1635 if(RequiresMapping)
1636 RetBlock.m_pMappedBuffer = ((uint8_t *)pCacheHeap->m_pMappedBuffer) + AllocatedMem.m_OffsetToAlign;
1637 else
1638 RetBlock.m_pMappedBuffer = nullptr;
1639 RetBlock.m_IsCached = true;
1640 RetBlock.m_pHeap = &pCacheHeap->m_Heap;
1641 RetBlock.m_HeapData = AllocatedMem;
1642 RetBlock.m_UsedSize = RequiredSize;
1643
1644 if(RequiresMapping)
1645 mem_copy(RetBlock.m_pMappedBuffer, pBufferData, RequiredSize);
1646
1647 return true;
1648 };
1649
1650 if(RequiredSize < (VkDeviceSize)MemoryBlockSize)
1651 {
1652 Res = CreateCacheBlock();
1653 }
1654 else
1655 {
1656 VkBuffer TmpBuffer;
1657 SDeviceMemoryBlock TmpBufferMemory;
1658 if(!GetBufferImpl(RequiredSize, MemUsage: RequiresMapping ? MEMORY_BLOCK_USAGE_STAGING : MEMORY_BLOCK_USAGE_BUFFER, Buffer&: TmpBuffer, BufferMemory&: TmpBufferMemory, BufferUsage, BufferProperties))
1659 return false;
1660
1661 void *pMapData = nullptr;
1662 if(RequiresMapping)
1663 {
1664 if(vkMapMemory(device: m_VKDevice, memory: TmpBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMapData) != VK_SUCCESS)
1665 return false;
1666 mem_copy(dest: pMapData, source: pBufferData, size: static_cast<size_t>(RequiredSize));
1667 }
1668
1669 RetBlock.m_Buffer = TmpBuffer;
1670 RetBlock.m_BufferMem = TmpBufferMemory;
1671 RetBlock.m_pMappedBuffer = pMapData;
1672 RetBlock.m_pHeap = nullptr;
1673 RetBlock.m_IsCached = false;
1674 RetBlock.m_HeapData.m_OffsetToAlign = 0;
1675 RetBlock.m_HeapData.m_AllocationSize = RequiredSize;
1676 RetBlock.m_UsedSize = RequiredSize;
1677 }
1678
1679 return Res;
1680 }
1681
1682 [[nodiscard]] bool GetStagingBuffer(SMemoryBlock<STAGING_BUFFER_CACHE_ID> &ResBlock, const void *pBufferData, VkDeviceSize RequiredSize)
1683 {
1684 return GetBufferBlockImpl<STAGING_BUFFER_CACHE_ID, 8 * 1024 * 1024, 3, true>(RetBlock&: ResBlock, MemoryCache&: m_StagingBufferCache, BufferUsage: VK_BUFFER_USAGE_TRANSFER_SRC_BIT, BufferProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, pBufferData, RequiredSize, TargetAlignment: maximum<VkDeviceSize>(a: m_NonCoherentMemAlignment, b: 16));
1685 }
1686
1687 [[nodiscard]] bool GetStagingBufferImage(SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> &ResBlock, const void *pBufferData, VkDeviceSize RequiredSize)
1688 {
1689 return GetBufferBlockImpl<STAGING_BUFFER_IMAGE_CACHE_ID, 8 * 1024 * 1024, 3, true>(RetBlock&: ResBlock, MemoryCache&: m_StagingBufferCacheImage, BufferUsage: VK_BUFFER_USAGE_TRANSFER_SRC_BIT, BufferProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, pBufferData, RequiredSize, TargetAlignment: maximum<VkDeviceSize>(a: m_OptimalImageCopyMemAlignment, b: maximum<VkDeviceSize>(a: m_NonCoherentMemAlignment, b: 16)));
1690 }
1691
1692 template<size_t Id>
1693 void PrepareStagingMemRange(SMemoryBlock<Id> &Block)
1694 {
1695 VkMappedMemoryRange UploadRange{};
1696 UploadRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1697 UploadRange.memory = Block.m_BufferMem.m_Mem;
1698 UploadRange.offset = Block.m_HeapData.m_OffsetToAlign;
1699
1700 auto AlignmentMod = ((VkDeviceSize)Block.m_HeapData.m_AllocationSize % m_NonCoherentMemAlignment);
1701 auto AlignmentReq = (m_NonCoherentMemAlignment - AlignmentMod);
1702 if(AlignmentMod == 0)
1703 AlignmentReq = 0;
1704 UploadRange.size = Block.m_HeapData.m_AllocationSize + AlignmentReq;
1705
1706 if(UploadRange.offset + UploadRange.size > Block.m_BufferMem.m_Size)
1707 UploadRange.size = VK_WHOLE_SIZE;
1708
1709 m_vNonFlushedStagingBufferRange.push_back(x: UploadRange);
1710 }
1711
1712 void UploadAndFreeStagingMemBlock(SMemoryBlock<STAGING_BUFFER_CACHE_ID> &Block)
1713 {
1714 PrepareStagingMemRange(Block);
1715 if(!Block.m_IsCached)
1716 {
1717 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: Block.m_pMappedBuffer});
1718 }
1719 else
1720 {
1721 m_StagingBufferCache.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1722 }
1723 }
1724
1725 void UploadAndFreeStagingImageMemBlock(SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> &Block)
1726 {
1727 PrepareStagingMemRange(Block);
1728 if(!Block.m_IsCached)
1729 {
1730 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: Block.m_pMappedBuffer});
1731 }
1732 else
1733 {
1734 m_StagingBufferCacheImage.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1735 }
1736 }
1737
1738 [[nodiscard]] bool GetVertexBuffer(SMemoryBlock<VERTEX_BUFFER_CACHE_ID> &ResBlock, VkDeviceSize RequiredSize)
1739 {
1740 return GetBufferBlockImpl<VERTEX_BUFFER_CACHE_ID, 8 * 1024 * 1024, 3, false>(RetBlock&: ResBlock, MemoryCache&: m_VertexBufferCache, BufferUsage: VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, BufferProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, pBufferData: nullptr, RequiredSize, TargetAlignment: 16);
1741 }
1742
1743 void FreeVertexMemBlock(SMemoryBlock<VERTEX_BUFFER_CACHE_ID> &Block)
1744 {
1745 if(!Block.m_IsCached)
1746 {
1747 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: nullptr});
1748 }
1749 else
1750 {
1751 m_VertexBufferCache.FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1752 }
1753 }
1754
1755 static size_t ImageMipLevelCount(size_t Width, size_t Height, size_t Depth)
1756 {
1757 return std::floor(x: std::log2(x: maximum(a: Width, b: maximum(a: Height, b: Depth)))) + 1;
1758 }
1759
1760 static size_t ImageMipLevelCount(const VkExtent3D &ImgExtent)
1761 {
1762 return ImageMipLevelCount(Width: ImgExtent.width, Height: ImgExtent.height, Depth: ImgExtent.depth);
1763 }
1764
1765 // good approximation of 1024x1024 image with mipmaps
1766 static constexpr int64_t IMAGE_SIZE_1024X1024_APPROXIMATION = (1024 * 1024 * 4) * 2;
1767
1768 [[nodiscard]] bool GetImageMemoryImpl(VkDeviceSize RequiredSize, uint32_t RequiredMemoryTypeBits, SDeviceMemoryBlock &BufferMemory, VkMemoryPropertyFlags BufferProperties)
1769 {
1770 VkMemoryAllocateInfo MemAllocInfo{};
1771 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1772 MemAllocInfo.allocationSize = RequiredSize;
1773 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: RequiredMemoryTypeBits, Properties: BufferProperties);
1774
1775 BufferMemory.m_Size = RequiredSize;
1776 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) + RequiredSize, m: std::memory_order_relaxed);
1777
1778 if(IsVerbose())
1779 {
1780 VerboseAllocatedMemory(Size: RequiredSize, FrameImageIndex: m_CurImageIndex, MemUsage: MEMORY_BLOCK_USAGE_TEXTURE);
1781 }
1782
1783 if(!AllocateVulkanMemory(pAllocateInfo: &MemAllocInfo, pMemory: &BufferMemory.m_Mem))
1784 {
1785 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_IMAGE, pErr: "Allocation for image memory failed.");
1786 return false;
1787 }
1788
1789 BufferMemory.m_UsageType = MEMORY_BLOCK_USAGE_TEXTURE;
1790
1791 return true;
1792 }
1793
1794 template<size_t Id,
1795 int64_t MemoryBlockSize, size_t BlockCount>
1796 [[nodiscard]] bool GetImageMemoryBlockImpl(SMemoryImageBlock<Id> &RetBlock, SMemoryBlockCache<Id> &MemoryCache, VkMemoryPropertyFlags BufferProperties, VkDeviceSize RequiredSize, VkDeviceSize RequiredAlignment, uint32_t RequiredMemoryTypeBits)
1797 {
1798 auto &&CreateCacheBlock = [&]() -> bool {
1799 bool FoundAllocation = false;
1800 SMemoryHeap::SMemoryHeapQueueElement AllocatedMem;
1801 SDeviceMemoryBlock TmpBufferMemory;
1802 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pCacheHeap = nullptr;
1803 for(size_t i = 0; i < MemoryCache.m_MemoryCaches.m_vpMemoryHeaps.size(); ++i)
1804 {
1805 auto *pHeap = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps[i];
1806 if(pHeap->m_Heap.Allocate(RequiredSize, RequiredAlignment, AllocatedMem))
1807 {
1808 TmpBufferMemory = pHeap->m_BufferMem;
1809 FoundAllocation = true;
1810 pCacheHeap = pHeap;
1811 break;
1812 }
1813 }
1814 if(!FoundAllocation)
1815 {
1816 typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap *pNewHeap = new typename SMemoryBlockCache<Id>::SMemoryCacheType::SMemoryCacheHeap();
1817
1818 if(!GetImageMemoryImpl(RequiredSize: MemoryBlockSize * BlockCount, RequiredMemoryTypeBits, BufferMemory&: TmpBufferMemory, BufferProperties))
1819 {
1820 delete pNewHeap;
1821 return false;
1822 }
1823
1824 pNewHeap->m_Buffer = VK_NULL_HANDLE;
1825
1826 pNewHeap->m_BufferMem = TmpBufferMemory;
1827 pNewHeap->m_pMappedBuffer = nullptr;
1828
1829 auto &Heaps = MemoryCache.m_MemoryCaches.m_vpMemoryHeaps;
1830 pCacheHeap = pNewHeap;
1831 Heaps.emplace_back(pNewHeap);
1832 Heaps.back()->m_Heap.Init(MemoryBlockSize * BlockCount, 0);
1833 if(!Heaps.back()->m_Heap.Allocate(RequiredSize, RequiredAlignment, AllocatedMem))
1834 {
1835 dbg_assert_failed("Heap allocation failed directly after creating fresh heap for image");
1836 }
1837 }
1838
1839 RetBlock.m_Buffer = VK_NULL_HANDLE;
1840 RetBlock.m_BufferMem = TmpBufferMemory;
1841 RetBlock.m_pMappedBuffer = nullptr;
1842 RetBlock.m_IsCached = true;
1843 RetBlock.m_pHeap = &pCacheHeap->m_Heap;
1844 RetBlock.m_HeapData = AllocatedMem;
1845 RetBlock.m_UsedSize = RequiredSize;
1846
1847 return true;
1848 };
1849
1850 if(RequiredSize < (VkDeviceSize)MemoryBlockSize)
1851 {
1852 if(!CreateCacheBlock())
1853 return false;
1854 }
1855 else
1856 {
1857 SDeviceMemoryBlock TmpBufferMemory;
1858 if(!GetImageMemoryImpl(RequiredSize, RequiredMemoryTypeBits, BufferMemory&: TmpBufferMemory, BufferProperties))
1859 return false;
1860
1861 RetBlock.m_Buffer = VK_NULL_HANDLE;
1862 RetBlock.m_BufferMem = TmpBufferMemory;
1863 RetBlock.m_pMappedBuffer = nullptr;
1864 RetBlock.m_IsCached = false;
1865 RetBlock.m_pHeap = nullptr;
1866 RetBlock.m_HeapData.m_OffsetToAlign = 0;
1867 RetBlock.m_HeapData.m_AllocationSize = RequiredSize;
1868 RetBlock.m_UsedSize = RequiredSize;
1869 }
1870
1871 RetBlock.m_ImageMemoryBits = RequiredMemoryTypeBits;
1872
1873 return true;
1874 }
1875
1876 [[nodiscard]] bool GetImageMemory(SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &RetBlock, VkDeviceSize RequiredSize, VkDeviceSize RequiredAlignment, uint32_t RequiredMemoryTypeBits)
1877 {
1878 auto BufferCacheIterator = m_ImageBufferCaches.find(x: RequiredMemoryTypeBits);
1879 if(BufferCacheIterator == m_ImageBufferCaches.end())
1880 {
1881 BufferCacheIterator = m_ImageBufferCaches.insert(x: {RequiredMemoryTypeBits, {}}).first;
1882
1883 BufferCacheIterator->second.Init(SwapChainImageCount: m_SwapChainImageCount);
1884 }
1885 return GetImageMemoryBlockImpl<IMAGE_BUFFER_CACHE_ID, IMAGE_SIZE_1024X1024_APPROXIMATION, 2>(RetBlock, MemoryCache&: BufferCacheIterator->second, BufferProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, RequiredSize, RequiredAlignment, RequiredMemoryTypeBits);
1886 }
1887
1888 void FreeImageMemBlock(SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &Block)
1889 {
1890 if(!Block.m_IsCached)
1891 {
1892 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: Block.m_Buffer, .m_Mem: Block.m_BufferMem, .m_pMappedData: nullptr});
1893 }
1894 else
1895 {
1896 m_ImageBufferCaches[Block.m_ImageMemoryBits].FreeMemBlock(Block, ImgIndex: m_CurImageIndex);
1897 }
1898 }
1899
1900 template<bool FlushForRendering, typename TName>
1901 void UploadStreamedBuffer(SStreamMemory<TName> &StreamedBuffer)
1902 {
1903 size_t RangeUpdateCount = 0;
1904 if(StreamedBuffer.IsUsed(m_CurImageIndex))
1905 {
1906 for(size_t i = 0; i < StreamedBuffer.GetUsedCount(m_CurImageIndex); ++i)
1907 {
1908 auto &BufferOfFrame = StreamedBuffer.GetBuffers(m_CurImageIndex)[i];
1909 auto &MemRange = StreamedBuffer.GetRanges(m_CurImageIndex)[RangeUpdateCount++];
1910 MemRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
1911 MemRange.memory = BufferOfFrame.m_BufferMem.m_Mem;
1912 MemRange.offset = BufferOfFrame.m_OffsetInBuffer;
1913 auto AlignmentMod = ((VkDeviceSize)BufferOfFrame.m_UsedSize % m_NonCoherentMemAlignment);
1914 auto AlignmentReq = (m_NonCoherentMemAlignment - AlignmentMod);
1915 if(AlignmentMod == 0)
1916 AlignmentReq = 0;
1917 MemRange.size = BufferOfFrame.m_UsedSize + AlignmentReq;
1918
1919 if(MemRange.offset + MemRange.size > BufferOfFrame.m_BufferMem.m_Size)
1920 MemRange.size = VK_WHOLE_SIZE;
1921
1922 BufferOfFrame.m_UsedSize = 0;
1923 }
1924 if(RangeUpdateCount > 0 && FlushForRendering)
1925 {
1926 vkFlushMappedMemoryRanges(m_VKDevice, RangeUpdateCount, StreamedBuffer.GetRanges(m_CurImageIndex).data());
1927 }
1928 }
1929 StreamedBuffer.ResetFrame(m_CurImageIndex);
1930 }
1931
1932 void CleanBufferPair(size_t ImageIndex, VkBuffer &Buffer, SDeviceMemoryBlock &BufferMem)
1933 {
1934 bool IsBuffer = Buffer != VK_NULL_HANDLE;
1935 if(IsBuffer)
1936 {
1937 vkDestroyBuffer(device: m_VKDevice, buffer: Buffer, pAllocator: nullptr);
1938
1939 Buffer = VK_NULL_HANDLE;
1940 }
1941 if(BufferMem.m_Mem != VK_NULL_HANDLE)
1942 {
1943 vkFreeMemory(device: m_VKDevice, memory: BufferMem.m_Mem, pAllocator: nullptr);
1944 if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_BUFFER)
1945 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1946 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_TEXTURE)
1947 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1948 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_STREAM)
1949 m_pStreamMemoryUsage->store(i: m_pStreamMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1950 else if(BufferMem.m_UsageType == MEMORY_BLOCK_USAGE_STAGING)
1951 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) - BufferMem.m_Size, m: std::memory_order_relaxed);
1952
1953 if(IsVerbose())
1954 {
1955 VerboseDeallocatedMemory(Size: BufferMem.m_Size, FrameImageIndex: ImageIndex, MemUsage: BufferMem.m_UsageType);
1956 }
1957
1958 BufferMem.m_Mem = VK_NULL_HANDLE;
1959 }
1960 }
1961
1962 void DestroyTexture(CTexture &Texture)
1963 {
1964 if(Texture.m_Img != VK_NULL_HANDLE)
1965 {
1966 FreeImageMemBlock(Block&: Texture.m_ImgMem);
1967 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img, pAllocator: nullptr);
1968
1969 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_ImgView, pAllocator: nullptr);
1970 }
1971
1972 if(Texture.m_Img3D != VK_NULL_HANDLE)
1973 {
1974 FreeImageMemBlock(Block&: Texture.m_Img3DMem);
1975 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img3D, pAllocator: nullptr);
1976
1977 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_Img3DView, pAllocator: nullptr);
1978 }
1979
1980 DestroyTexturedStandardDescriptorSets(Texture, DescrIndex: 0);
1981 DestroyTexturedStandardDescriptorSets(Texture, DescrIndex: 1);
1982
1983 DestroyTextured3DStandardDescriptorSets(Texture);
1984 }
1985
1986 void DestroyTextTexture(CTexture &Texture, CTexture &TextureOutline)
1987 {
1988 if(Texture.m_Img != VK_NULL_HANDLE)
1989 {
1990 FreeImageMemBlock(Block&: Texture.m_ImgMem);
1991 vkDestroyImage(device: m_VKDevice, image: Texture.m_Img, pAllocator: nullptr);
1992
1993 vkDestroyImageView(device: m_VKDevice, imageView: Texture.m_ImgView, pAllocator: nullptr);
1994 }
1995
1996 if(TextureOutline.m_Img != VK_NULL_HANDLE)
1997 {
1998 FreeImageMemBlock(Block&: TextureOutline.m_ImgMem);
1999 vkDestroyImage(device: m_VKDevice, image: TextureOutline.m_Img, pAllocator: nullptr);
2000
2001 vkDestroyImageView(device: m_VKDevice, imageView: TextureOutline.m_ImgView, pAllocator: nullptr);
2002 }
2003
2004 DestroyTextDescriptorSets(Texture, TextureOutline);
2005 }
2006
2007 void ClearFrameData(size_t FrameImageIndex)
2008 {
2009 UploadStagingBuffers();
2010
2011 // clear pending buffers, that require deletion
2012 for(auto &BufferPair : m_vvFrameDelayedBufferCleanup[FrameImageIndex])
2013 {
2014 if(BufferPair.m_pMappedData != nullptr)
2015 {
2016 vkUnmapMemory(device: m_VKDevice, memory: BufferPair.m_Mem.m_Mem);
2017 }
2018 CleanBufferPair(ImageIndex: FrameImageIndex, Buffer&: BufferPair.m_Buffer, BufferMem&: BufferPair.m_Mem);
2019 }
2020 m_vvFrameDelayedBufferCleanup[FrameImageIndex].clear();
2021
2022 // clear pending textures, that require deletion
2023 for(auto &Texture : m_vvFrameDelayedTextureCleanup[FrameImageIndex])
2024 {
2025 DestroyTexture(Texture);
2026 }
2027 m_vvFrameDelayedTextureCleanup[FrameImageIndex].clear();
2028
2029 for(auto &TexturePair : m_vvFrameDelayedTextTexturesCleanup[FrameImageIndex])
2030 {
2031 DestroyTextTexture(Texture&: TexturePair.first, TextureOutline&: TexturePair.second);
2032 }
2033 m_vvFrameDelayedTextTexturesCleanup[FrameImageIndex].clear();
2034
2035 m_StagingBufferCache.Cleanup(ImgIndex: FrameImageIndex);
2036 m_StagingBufferCacheImage.Cleanup(ImgIndex: FrameImageIndex);
2037 m_VertexBufferCache.Cleanup(ImgIndex: FrameImageIndex);
2038 for(auto &ImageBufferCache : m_ImageBufferCaches)
2039 ImageBufferCache.second.Cleanup(ImgIndex: FrameImageIndex);
2040 }
2041
2042 void ShrinkUnusedCaches()
2043 {
2044 size_t FreedMemory = 0;
2045 FreedMemory += m_StagingBufferCache.Shrink(Device&: m_VKDevice);
2046 FreedMemory += m_StagingBufferCacheImage.Shrink(Device&: m_VKDevice);
2047 if(FreedMemory > 0)
2048 {
2049 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2050 if(IsVerbose())
2051 {
2052 log_debug("gfx/vulkan", "Deallocated chunks of memory with size %" PRIzu " from all frames (staging buffer).", FreedMemory);
2053 }
2054 }
2055 FreedMemory = 0;
2056 FreedMemory += m_VertexBufferCache.Shrink(Device&: m_VKDevice);
2057 if(FreedMemory > 0)
2058 {
2059 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2060 if(IsVerbose())
2061 {
2062 log_debug("gfx/vulkan", "Deallocated chunks of memory with size %" PRIzu " from all frames (buffer).", FreedMemory);
2063 }
2064 }
2065 FreedMemory = 0;
2066 for(auto &ImageBufferCache : m_ImageBufferCaches)
2067 FreedMemory += ImageBufferCache.second.Shrink(Device&: m_VKDevice);
2068 if(FreedMemory > 0)
2069 {
2070 m_pTextureMemoryUsage->store(i: m_pTextureMemoryUsage->load(m: std::memory_order_relaxed) - FreedMemory, m: std::memory_order_relaxed);
2071 if(IsVerbose())
2072 {
2073 log_debug("gfx/vulkan", "Deallocated chunks of memory with size %" PRIzu " from all frames (texture).", FreedMemory);
2074 }
2075 }
2076 }
2077
2078 [[nodiscard]] bool MemoryBarrier(VkBuffer Buffer, VkDeviceSize Offset, VkDeviceSize Size, VkAccessFlags BufferAccessType, bool BeforeCommand)
2079 {
2080 VkCommandBuffer *pMemCommandBuffer;
2081 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2082 return false;
2083 auto &MemCommandBuffer = *pMemCommandBuffer;
2084
2085 VkBufferMemoryBarrier Barrier{};
2086 Barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
2087 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2088 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2089 Barrier.buffer = Buffer;
2090 Barrier.offset = Offset;
2091 Barrier.size = Size;
2092
2093 VkPipelineStageFlags SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2094 VkPipelineStageFlags DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2095
2096 if(BeforeCommand)
2097 {
2098 Barrier.srcAccessMask = BufferAccessType;
2099 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2100
2101 SourceStage = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2102 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2103 }
2104 else
2105 {
2106 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2107 Barrier.dstAccessMask = BufferAccessType;
2108
2109 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2110 DestinationStage = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
2111 }
2112
2113 vkCmdPipelineBarrier(
2114 commandBuffer: MemCommandBuffer,
2115 srcStageMask: SourceStage, dstStageMask: DestinationStage,
2116 dependencyFlags: 0,
2117 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2118 bufferMemoryBarrierCount: 1, pBufferMemoryBarriers: &Barrier,
2119 imageMemoryBarrierCount: 0, pImageMemoryBarriers: nullptr);
2120
2121 return true;
2122 }
2123
2124 /************************
2125 * SWAPPING MECHANISM
2126 ************************/
2127
2128 void StartRenderThread(size_t ThreadIndex)
2129 {
2130 auto &List = m_vvThreadCommandLists[ThreadIndex];
2131 if(!List.empty())
2132 {
2133 m_vThreadHelperHadCommands[ThreadIndex] = true;
2134 auto *pThread = m_vpRenderThreads[ThreadIndex].get();
2135 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
2136 pThread->m_IsRendering = true;
2137 pThread->m_Cond.notify_one();
2138 }
2139 }
2140
2141 void FinishRenderThreads()
2142 {
2143 if(m_ThreadCount > 1)
2144 {
2145 // execute threads
2146
2147 for(size_t ThreadIndex = 0; ThreadIndex < m_ThreadCount - 1; ++ThreadIndex)
2148 {
2149 if(!m_vThreadHelperHadCommands[ThreadIndex])
2150 {
2151 StartRenderThread(ThreadIndex);
2152 }
2153 }
2154
2155 for(size_t ThreadIndex = 0; ThreadIndex < m_ThreadCount - 1; ++ThreadIndex)
2156 {
2157 if(m_vThreadHelperHadCommands[ThreadIndex])
2158 {
2159 auto &pRenderThread = m_vpRenderThreads[ThreadIndex];
2160 m_vThreadHelperHadCommands[ThreadIndex] = false;
2161 std::unique_lock<std::mutex> Lock(pRenderThread->m_Mutex);
2162 pRenderThread->m_Cond.wait(lock&: Lock, p: [&pRenderThread] { return !pRenderThread->m_IsRendering; });
2163 m_vLastPipeline[ThreadIndex + 1] = VK_NULL_HANDLE;
2164 }
2165 }
2166 }
2167 }
2168
2169 void ExecuteMemoryCommandBuffer()
2170 {
2171 if(m_vUsedMemoryCommandBuffer[m_CurImageIndex])
2172 {
2173 auto &MemoryCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
2174 vkEndCommandBuffer(commandBuffer: MemoryCommandBuffer);
2175
2176 VkSubmitInfo SubmitInfo{};
2177 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2178
2179 SubmitInfo.commandBufferCount = 1;
2180 SubmitInfo.pCommandBuffers = &MemoryCommandBuffer;
2181 vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, VK_NULL_HANDLE);
2182 vkQueueWaitIdle(queue: m_VKGraphicsQueue);
2183
2184 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
2185 }
2186 }
2187
2188 void ClearFrameMemoryUsage()
2189 {
2190 ClearFrameData(FrameImageIndex: m_CurImageIndex);
2191 ShrinkUnusedCaches();
2192 }
2193
2194 [[nodiscard]] bool WaitFrame()
2195 {
2196 FinishRenderThreads();
2197 m_LastCommandsInPipeThreadIndex = 0;
2198
2199 UploadNonFlushedBuffers<true>();
2200
2201 auto &CommandBuffer = GetMainGraphicCommandBuffer();
2202
2203 // render threads
2204 if(m_ThreadCount > 1)
2205 {
2206 size_t ThreadedCommandsUsedCount = 0;
2207 size_t RenderThreadCount = m_ThreadCount - 1;
2208 for(size_t i = 0; i < RenderThreadCount; ++i)
2209 {
2210 if(m_vvUsedThreadDrawCommandBuffer[i + 1][m_CurImageIndex])
2211 {
2212 const auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[i + 1][m_CurImageIndex];
2213 m_vHelperThreadDrawCommandBuffers[ThreadedCommandsUsedCount++] = GraphicThreadCommandBuffer;
2214
2215 m_vvUsedThreadDrawCommandBuffer[i + 1][m_CurImageIndex] = false;
2216 }
2217 }
2218 if(ThreadedCommandsUsedCount > 0)
2219 {
2220 vkCmdExecuteCommands(commandBuffer: CommandBuffer, commandBufferCount: ThreadedCommandsUsedCount, pCommandBuffers: m_vHelperThreadDrawCommandBuffers.data());
2221 }
2222
2223 // special case if swap chain was not completed in one runbuffer call
2224
2225 if(m_vvUsedThreadDrawCommandBuffer[0][m_CurImageIndex])
2226 {
2227 auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[0][m_CurImageIndex];
2228 vkEndCommandBuffer(commandBuffer: GraphicThreadCommandBuffer);
2229
2230 vkCmdExecuteCommands(commandBuffer: CommandBuffer, commandBufferCount: 1, pCommandBuffers: &GraphicThreadCommandBuffer);
2231
2232 m_vvUsedThreadDrawCommandBuffer[0][m_CurImageIndex] = false;
2233 }
2234 }
2235
2236 vkCmdEndRenderPass(commandBuffer: CommandBuffer);
2237
2238 if(vkEndCommandBuffer(commandBuffer: CommandBuffer) != VK_SUCCESS)
2239 {
2240 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be ended anymore.");
2241 return false;
2242 }
2243
2244 VkSubmitInfo SubmitInfo{};
2245 SubmitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
2246
2247 SubmitInfo.commandBufferCount = 1;
2248 SubmitInfo.pCommandBuffers = &CommandBuffer;
2249
2250 std::array<VkCommandBuffer, 2> aCommandBuffers = {};
2251
2252 if(m_vUsedMemoryCommandBuffer[m_CurImageIndex])
2253 {
2254 auto &MemoryCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
2255 vkEndCommandBuffer(commandBuffer: MemoryCommandBuffer);
2256
2257 aCommandBuffers[0] = MemoryCommandBuffer;
2258 aCommandBuffers[1] = CommandBuffer;
2259 SubmitInfo.commandBufferCount = 2;
2260 SubmitInfo.pCommandBuffers = aCommandBuffers.data();
2261
2262 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = false;
2263 }
2264
2265 std::array<VkSemaphore, 1> aWaitSemaphores = {m_AcquireImageSemaphore};
2266 std::array<VkPipelineStageFlags, 1> aWaitStages = {(VkPipelineStageFlags)VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT};
2267 SubmitInfo.waitSemaphoreCount = aWaitSemaphores.size();
2268 SubmitInfo.pWaitSemaphores = aWaitSemaphores.data();
2269 SubmitInfo.pWaitDstStageMask = aWaitStages.data();
2270
2271 std::array<VkSemaphore, 1> aSignalSemaphores = {m_vQueueSubmitSemaphores[m_CurImageIndex]};
2272 SubmitInfo.signalSemaphoreCount = aSignalSemaphores.size();
2273 SubmitInfo.pSignalSemaphores = aSignalSemaphores.data();
2274
2275 vkResetFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[m_CurImageIndex]);
2276
2277 VkResult QueueSubmitRes = vkQueueSubmit(queue: m_VKGraphicsQueue, submitCount: 1, pSubmits: &SubmitInfo, fence: m_vQueueSubmitFences[m_CurImageIndex]);
2278 if(QueueSubmitRes != VK_SUCCESS)
2279 {
2280 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: QueueSubmitRes);
2281 if(pCritErrorMsg != nullptr)
2282 {
2283 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_SUBMIT_FAILED, pErr: "Submitting to graphics queue failed.", pErrStrExtra: pCritErrorMsg);
2284 return false;
2285 }
2286 }
2287
2288 std::swap(a&: m_vBusyAcquireImageSemaphores[m_CurImageIndex], b&: m_AcquireImageSemaphore);
2289
2290 VkPresentInfoKHR PresentInfo{};
2291 PresentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
2292
2293 PresentInfo.waitSemaphoreCount = aSignalSemaphores.size();
2294 PresentInfo.pWaitSemaphores = aSignalSemaphores.data();
2295
2296 std::array<VkSwapchainKHR, 1> aSwapChains = {m_VKSwapChain};
2297 PresentInfo.swapchainCount = aSwapChains.size();
2298 PresentInfo.pSwapchains = aSwapChains.data();
2299
2300 PresentInfo.pImageIndices = &m_CurImageIndex;
2301
2302 m_LastPresentedSwapChainImageIndex = m_CurImageIndex;
2303
2304 VkResult QueuePresentRes = vkQueuePresentKHR(queue: m_VKPresentQueue, pPresentInfo: &PresentInfo);
2305 if(QueuePresentRes != VK_SUCCESS && QueuePresentRes != VK_SUBOPTIMAL_KHR)
2306 {
2307 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: QueuePresentRes);
2308 if(pCritErrorMsg != nullptr)
2309 {
2310 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_SWAP_FAILED, pErr: "Presenting graphics queue failed.", pErrStrExtra: pCritErrorMsg);
2311 return false;
2312 }
2313 }
2314
2315 return true;
2316 }
2317
2318 [[nodiscard]] bool PrepareFrame()
2319 {
2320 if(m_RecreateSwapChain)
2321 {
2322 m_RecreateSwapChain = false;
2323 if(IsVerbose())
2324 {
2325 log_debug("gfx/vulkan", "Recreating swap chain requested by user (prepare frame).");
2326 }
2327 RecreateSwapChain();
2328 }
2329
2330 auto AcqResult = vkAcquireNextImageKHR(device: m_VKDevice, swapchain: m_VKSwapChain, timeout: std::numeric_limits<uint64_t>::max(), semaphore: m_AcquireImageSemaphore, VK_NULL_HANDLE, pImageIndex: &m_CurImageIndex);
2331 if(AcqResult != VK_SUCCESS)
2332 {
2333 if(AcqResult == VK_ERROR_OUT_OF_DATE_KHR || m_RecreateSwapChain)
2334 {
2335 m_RecreateSwapChain = false;
2336 if(IsVerbose())
2337 {
2338 log_debug("gfx/vulkan", "Recreating swap chain requested by acquire next image (prepare frame).");
2339 }
2340 RecreateSwapChain();
2341 return PrepareFrame();
2342 }
2343 else
2344 {
2345 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: AcqResult);
2346 if(pCritErrorMsg != nullptr)
2347 {
2348 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_SWAP_FAILED, pErr: "Acquiring next image failed.", pErrStrExtra: pCritErrorMsg);
2349 return false;
2350 }
2351 else if(AcqResult == VK_ERROR_SURFACE_LOST_KHR)
2352 {
2353 m_RenderingPaused = true;
2354 return true;
2355 }
2356 }
2357 }
2358
2359 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[m_CurImageIndex], VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
2360
2361 // next frame
2362 m_CurFrame++;
2363 m_vImageLastFrameCheck[m_CurImageIndex] = m_CurFrame;
2364
2365 // check if older frames weren't used in a long time
2366 for(size_t FrameImageIndex = 0; FrameImageIndex < m_vImageLastFrameCheck.size(); ++FrameImageIndex)
2367 {
2368 auto LastFrame = m_vImageLastFrameCheck[FrameImageIndex];
2369 if(m_CurFrame - LastFrame > (uint64_t)m_SwapChainImageCount)
2370 {
2371 vkWaitForFences(device: m_VKDevice, fenceCount: 1, pFences: &m_vQueueSubmitFences[FrameImageIndex], VK_TRUE, timeout: std::numeric_limits<uint64_t>::max());
2372 ClearFrameData(FrameImageIndex);
2373 m_vImageLastFrameCheck[FrameImageIndex] = m_CurFrame;
2374 }
2375 }
2376
2377 // clear frame's memory data
2378 ClearFrameMemoryUsage();
2379
2380 // clear frame
2381 vkResetCommandBuffer(commandBuffer: GetMainGraphicCommandBuffer(), flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
2382
2383 auto &CommandBuffer = GetMainGraphicCommandBuffer();
2384 VkCommandBufferBeginInfo BeginInfo{};
2385 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2386 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2387
2388 if(vkBeginCommandBuffer(commandBuffer: CommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
2389 {
2390 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be filled anymore.");
2391 return false;
2392 }
2393
2394 VkRenderPassBeginInfo RenderPassInfo{};
2395 RenderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
2396 RenderPassInfo.renderPass = m_VKRenderPass;
2397 RenderPassInfo.framebuffer = m_vFramebufferList[m_CurImageIndex];
2398 RenderPassInfo.renderArea.offset = {.x: 0, .y: 0};
2399 RenderPassInfo.renderArea.extent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
2400
2401 VkClearValue ClearColorVal = {.color: {.float32: {m_aClearColor[0], m_aClearColor[1], m_aClearColor[2], m_aClearColor[3]}}};
2402 RenderPassInfo.clearValueCount = 1;
2403 RenderPassInfo.pClearValues = &ClearColorVal;
2404
2405 vkCmdBeginRenderPass(commandBuffer: CommandBuffer, pRenderPassBegin: &RenderPassInfo, contents: m_ThreadCount > 1 ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS : VK_SUBPASS_CONTENTS_INLINE);
2406
2407 for(auto &LastPipe : m_vLastPipeline)
2408 LastPipe = VK_NULL_HANDLE;
2409
2410 return true;
2411 }
2412
2413 void UploadStagingBuffers()
2414 {
2415 if(!m_vNonFlushedStagingBufferRange.empty())
2416 {
2417 vkFlushMappedMemoryRanges(device: m_VKDevice, memoryRangeCount: m_vNonFlushedStagingBufferRange.size(), pMemoryRanges: m_vNonFlushedStagingBufferRange.data());
2418
2419 m_vNonFlushedStagingBufferRange.clear();
2420 }
2421 }
2422
2423 template<bool FlushForRendering>
2424 void UploadNonFlushedBuffers()
2425 {
2426 // streamed vertices
2427 for(auto &StreamVertexBuffer : m_vStreamedVertexBuffers)
2428 UploadStreamedBuffer<FlushForRendering>(StreamVertexBuffer);
2429 // now the buffer objects
2430 for(auto &StreamUniformBuffer : m_vStreamedUniformBuffers)
2431 UploadStreamedBuffer<FlushForRendering>(StreamUniformBuffer);
2432
2433 UploadStagingBuffers();
2434 }
2435
2436 [[nodiscard]] bool PureMemoryFrame()
2437 {
2438 ExecuteMemoryCommandBuffer();
2439
2440 // reset streamed data
2441 UploadNonFlushedBuffers<false>();
2442
2443 ClearFrameMemoryUsage();
2444
2445 return true;
2446 }
2447
2448 [[nodiscard]] bool NextFrame()
2449 {
2450 if(!m_RenderingPaused)
2451 {
2452 if(!WaitFrame())
2453 return false;
2454 if(!PrepareFrame())
2455 return false;
2456 }
2457 // else only execute the memory command buffer
2458 else
2459 {
2460 if(!PureMemoryFrame())
2461 return false;
2462 }
2463
2464 return true;
2465 }
2466
2467 /************************
2468 * TEXTURES
2469 ************************/
2470
2471 size_t VulkanFormatToPixelSize(VkFormat Format)
2472 {
2473 if(Format == VK_FORMAT_R8G8B8_UNORM)
2474 return 3;
2475 else if(Format == VK_FORMAT_R8G8B8A8_UNORM)
2476 return 4;
2477 else if(Format == VK_FORMAT_R8_UNORM)
2478 return 1;
2479 return 4;
2480 }
2481
2482 [[nodiscard]] bool UpdateTexture(size_t TextureSlot, VkFormat Format, uint8_t *&pData, int64_t XOff, int64_t YOff, size_t Width, size_t Height)
2483 {
2484 const size_t ImageSize = Width * Height * VulkanFormatToPixelSize(Format);
2485 SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> StagingBuffer;
2486 if(!GetStagingBufferImage(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: ImageSize))
2487 return false;
2488
2489 auto &Tex = m_vTextures[TextureSlot];
2490
2491 if(Tex.m_RescaleCount > 0)
2492 {
2493 for(uint32_t i = 0; i < Tex.m_RescaleCount; ++i)
2494 {
2495 Width >>= 1;
2496 Height >>= 1;
2497
2498 XOff /= 2;
2499 YOff /= 2;
2500 }
2501
2502 uint8_t *pTmpData = ResizeImage(pImageData: pData, Width, Height, NewWidth: Width, NewHeight: Height, BPP: VulkanFormatToPixelSize(Format));
2503 free(ptr: pData);
2504 pData = pTmpData;
2505 }
2506
2507 if(!ImageBarrier(Image: Tex.m_Img, MipMapBase: 0, MipMapCount: Tex.m_MipMapCount, LayerBase: 0, LayerCount: 1, Format, OldLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
2508 return false;
2509 if(!CopyBufferToImage(Buffer: StagingBuffer.m_Buffer, BufferOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, Image: Tex.m_Img, X: XOff, Y: YOff, Width, Height, Depth: 1))
2510 return false;
2511
2512 if(Tex.m_MipMapCount > 1)
2513 {
2514 if(!BuildMipmaps(Image: Tex.m_Img, ImageFormat: Format, Width, Height, Depth: 1, MipMapLevelCount: Tex.m_MipMapCount))
2515 return false;
2516 }
2517 else
2518 {
2519 if(!ImageBarrier(Image: Tex.m_Img, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: 1, Format, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
2520 return false;
2521 }
2522
2523 UploadAndFreeStagingImageMemBlock(Block&: StagingBuffer);
2524
2525 return true;
2526 }
2527
2528 [[nodiscard]] bool CreateTextureCMD(
2529 int Slot,
2530 int Width,
2531 int Height,
2532 VkFormat Format,
2533 VkFormat StoreFormat,
2534 int Flags,
2535 uint8_t *&pData)
2536 {
2537 size_t ImageIndex = (size_t)Slot;
2538 const size_t PixelSize = VulkanFormatToPixelSize(Format);
2539
2540 while(ImageIndex >= m_vTextures.size())
2541 {
2542 m_vTextures.resize(new_size: (m_vTextures.size() * 2) + 1);
2543 }
2544
2545 // resample if needed
2546 uint32_t RescaleCount = 0;
2547 if((size_t)Width > m_MaxTextureSize || (size_t)Height > m_MaxTextureSize)
2548 {
2549 do
2550 {
2551 Width >>= 1;
2552 Height >>= 1;
2553 ++RescaleCount;
2554 } while((size_t)Width > m_MaxTextureSize || (size_t)Height > m_MaxTextureSize);
2555
2556 uint8_t *pTmpData = ResizeImage(pImageData: pData, Width, Height, NewWidth: Width, NewHeight: Height, BPP: PixelSize);
2557 free(ptr: pData);
2558 pData = pTmpData;
2559 }
2560
2561 bool Requires2DTexture = (Flags & TextureFlag::NO_2D_TEXTURE) == 0;
2562 bool Requires2DTextureArray = (Flags & TextureFlag::TO_2D_ARRAY_TEXTURE) != 0;
2563 bool RequiresMipMaps = (Flags & TextureFlag::NO_MIPMAPS) == 0;
2564 size_t MipMapLevelCount = 1;
2565 if(RequiresMipMaps)
2566 {
2567 VkExtent3D ImgSize{.width: (uint32_t)Width, .height: (uint32_t)Height, .depth: 1};
2568 MipMapLevelCount = ImageMipLevelCount(ImgExtent: ImgSize);
2569 if(!m_OptimalRGBAImageBlitting)
2570 MipMapLevelCount = 1;
2571 }
2572
2573 CTexture &Texture = m_vTextures[ImageIndex];
2574
2575 Texture.m_Width = Width;
2576 Texture.m_Height = Height;
2577 Texture.m_RescaleCount = RescaleCount;
2578 Texture.m_MipMapCount = MipMapLevelCount;
2579
2580 if(Requires2DTexture)
2581 {
2582 if(!CreateTextureImage(ImageIndex, NewImage&: Texture.m_Img, NewImgMem&: Texture.m_ImgMem, pData, Format, Width, Height, Depth: 1, PixelSize, MipMapLevelCount))
2583 return false;
2584 VkFormat ImgFormat = Format;
2585 VkImageView ImgView = CreateTextureImageView(TexImage: Texture.m_Img, ImgFormat, ViewType: VK_IMAGE_VIEW_TYPE_2D, Depth: 1, MipMapLevelCount);
2586 Texture.m_ImgView = ImgView;
2587 VkSampler ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_REPEAT);
2588 Texture.m_aSamplers[0] = ImgSampler;
2589 ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE);
2590 Texture.m_aSamplers[1] = ImgSampler;
2591
2592 if(!CreateNewTexturedStandardDescriptorSets(TextureSlot: ImageIndex, DescrIndex: 0))
2593 return false;
2594 if(!CreateNewTexturedStandardDescriptorSets(TextureSlot: ImageIndex, DescrIndex: 1))
2595 return false;
2596 }
2597
2598 if(Requires2DTextureArray)
2599 {
2600 int Image3DWidth = Width;
2601 int Image3DHeight = Height;
2602
2603 int ConvertWidth = Width;
2604 int ConvertHeight = Height;
2605
2606 if(ConvertWidth == 0 || (ConvertWidth % 16) != 0 || ConvertHeight == 0 || (ConvertHeight % 16) != 0)
2607 {
2608 int NewWidth = maximum<int>(a: HighestBit(OfVar: ConvertWidth), b: 16);
2609 int NewHeight = maximum<int>(a: HighestBit(OfVar: ConvertHeight), b: 16);
2610 uint8_t *pNewTexData = ResizeImage(pImageData: pData, Width: ConvertWidth, Height: ConvertHeight, NewWidth, NewHeight, BPP: PixelSize);
2611 if(IsVerbose())
2612 {
2613 log_debug("gfx/vulkan", "3D/2D array texture was resized. Slot=%d Size=(%d, %d) Resized=(%d, %d)", Slot, ConvertWidth, ConvertHeight, NewWidth, NewHeight);
2614 }
2615
2616 ConvertWidth = NewWidth;
2617 ConvertHeight = NewHeight;
2618
2619 free(ptr: pData);
2620 pData = pNewTexData;
2621 }
2622
2623 bool Needs3DTexDel = false;
2624 uint8_t *pTexData3D = static_cast<uint8_t *>(malloc(size: (size_t)PixelSize * ConvertWidth * ConvertHeight));
2625 if(!Texture2DTo3D(pImageBuffer: pData, ImageWidth: ConvertWidth, ImageHeight: ConvertHeight, PixelSize, SplitCountWidth: 16, SplitCountHeight: 16, pTarget3DImageData: pTexData3D, Target3DImageWidth&: Image3DWidth, Target3DImageHeight&: Image3DHeight))
2626 {
2627 free(ptr: pTexData3D);
2628 pTexData3D = nullptr;
2629 }
2630 Needs3DTexDel = true;
2631
2632 if(pTexData3D != nullptr)
2633 {
2634 const size_t ImageDepth2DArray = (size_t)16 * 16;
2635 VkExtent3D ImgSize{.width: (uint32_t)Image3DWidth, .height: (uint32_t)Image3DHeight, .depth: 1};
2636 if(RequiresMipMaps)
2637 {
2638 MipMapLevelCount = ImageMipLevelCount(ImgExtent: ImgSize);
2639 if(!m_OptimalRGBAImageBlitting)
2640 MipMapLevelCount = 1;
2641 }
2642
2643 if(!CreateTextureImage(ImageIndex, NewImage&: Texture.m_Img3D, NewImgMem&: Texture.m_Img3DMem, pData: pTexData3D, Format, Width: Image3DWidth, Height: Image3DHeight, Depth: ImageDepth2DArray, PixelSize, MipMapLevelCount))
2644 return false;
2645 VkFormat ImgFormat = Format;
2646 VkImageView ImgView = CreateTextureImageView(TexImage: Texture.m_Img3D, ImgFormat, ViewType: VK_IMAGE_VIEW_TYPE_2D_ARRAY, Depth: ImageDepth2DArray, MipMapLevelCount);
2647 Texture.m_Img3DView = ImgView;
2648 VkSampler ImgSampler = GetTextureSampler(SamplerType: SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY);
2649 Texture.m_Sampler3D = ImgSampler;
2650
2651 if(!CreateNew3DTexturedStandardDescriptorSets(TextureSlot: ImageIndex))
2652 return false;
2653
2654 if(Needs3DTexDel)
2655 free(ptr: pTexData3D);
2656 }
2657 }
2658 return true;
2659 }
2660
2661 [[nodiscard]] bool BuildMipmaps(VkImage Image, VkFormat ImageFormat, size_t Width, size_t Height, size_t Depth, size_t MipMapLevelCount)
2662 {
2663 VkCommandBuffer *pMemCommandBuffer;
2664 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2665 return false;
2666 auto &MemCommandBuffer = *pMemCommandBuffer;
2667
2668 VkImageMemoryBarrier Barrier{};
2669 Barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2670 Barrier.image = Image;
2671 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2672 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2673 Barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2674 Barrier.subresourceRange.levelCount = 1;
2675 Barrier.subresourceRange.baseArrayLayer = 0;
2676 Barrier.subresourceRange.layerCount = Depth;
2677
2678 int32_t TmpMipWidth = (int32_t)Width;
2679 int32_t TmpMipHeight = (int32_t)Height;
2680
2681 for(size_t i = 1; i < MipMapLevelCount; ++i)
2682 {
2683 Barrier.subresourceRange.baseMipLevel = i - 1;
2684 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2685 Barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2686 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2687 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2688
2689 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer, srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dependencyFlags: 0, memoryBarrierCount: 0, pMemoryBarriers: nullptr, bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr, imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2690
2691 VkImageBlit Blit{};
2692 Blit.srcOffsets[0] = {.x: 0, .y: 0, .z: 0};
2693 Blit.srcOffsets[1] = {.x: TmpMipWidth, .y: TmpMipHeight, .z: 1};
2694 Blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2695 Blit.srcSubresource.mipLevel = i - 1;
2696 Blit.srcSubresource.baseArrayLayer = 0;
2697 Blit.srcSubresource.layerCount = Depth;
2698 Blit.dstOffsets[0] = {.x: 0, .y: 0, .z: 0};
2699 Blit.dstOffsets[1] = {.x: TmpMipWidth > 1 ? TmpMipWidth / 2 : 1, .y: TmpMipHeight > 1 ? TmpMipHeight / 2 : 1, .z: 1};
2700 Blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2701 Blit.dstSubresource.mipLevel = i;
2702 Blit.dstSubresource.baseArrayLayer = 0;
2703 Blit.dstSubresource.layerCount = Depth;
2704
2705 vkCmdBlitImage(commandBuffer: MemCommandBuffer,
2706 srcImage: Image, srcImageLayout: VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2707 dstImage: Image, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2708 regionCount: 1, pRegions: &Blit,
2709 filter: m_AllowsLinearBlitting ? VK_FILTER_LINEAR : VK_FILTER_NEAREST);
2710
2711 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
2712 Barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2713 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2714 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2715
2716 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer,
2717 srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, dependencyFlags: 0,
2718 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2719 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2720 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2721
2722 if(TmpMipWidth > 1)
2723 TmpMipWidth /= 2;
2724 if(TmpMipHeight > 1)
2725 TmpMipHeight /= 2;
2726 }
2727
2728 Barrier.subresourceRange.baseMipLevel = MipMapLevelCount - 1;
2729 Barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2730 Barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2731 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2732 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2733
2734 vkCmdPipelineBarrier(commandBuffer: MemCommandBuffer,
2735 srcStageMask: VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, dependencyFlags: 0,
2736 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2737 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2738 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2739
2740 return true;
2741 }
2742
2743 [[nodiscard]] bool CreateTextureImage(size_t ImageIndex, VkImage &NewImage, SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &NewImgMem, const uint8_t *pData, VkFormat Format, size_t Width, size_t Height, size_t Depth, size_t PixelSize, size_t MipMapLevelCount)
2744 {
2745 VkDeviceSize ImageSize = Width * Height * Depth * PixelSize;
2746
2747 SMemoryBlock<STAGING_BUFFER_IMAGE_CACHE_ID> StagingBuffer;
2748 if(!GetStagingBufferImage(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: ImageSize))
2749 return false;
2750
2751 VkFormat ImgFormat = Format;
2752
2753 if(!CreateImage(Width, Height, Depth, MipMapLevelCount, Format: ImgFormat, Tiling: VK_IMAGE_TILING_OPTIMAL, Image&: NewImage, ImageMemory&: NewImgMem))
2754 return false;
2755
2756 if(!ImageBarrier(Image: NewImage, MipMapBase: 0, MipMapCount: MipMapLevelCount, LayerBase: 0, LayerCount: Depth, Format: ImgFormat, OldLayout: VK_IMAGE_LAYOUT_UNDEFINED, NewLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
2757 return false;
2758 if(!CopyBufferToImage(Buffer: StagingBuffer.m_Buffer, BufferOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, Image: NewImage, X: 0, Y: 0, Width: static_cast<uint32_t>(Width), Height: static_cast<uint32_t>(Height), Depth))
2759 return false;
2760
2761 UploadAndFreeStagingImageMemBlock(Block&: StagingBuffer);
2762
2763 if(MipMapLevelCount > 1)
2764 {
2765 if(!BuildMipmaps(Image: NewImage, ImageFormat: ImgFormat, Width, Height, Depth, MipMapLevelCount))
2766 return false;
2767 }
2768 else
2769 {
2770 if(!ImageBarrier(Image: NewImage, MipMapBase: 0, MipMapCount: 1, LayerBase: 0, LayerCount: Depth, Format: ImgFormat, OldLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, NewLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
2771 return false;
2772 }
2773
2774 return true;
2775 }
2776
2777 VkImageView CreateTextureImageView(VkImage TexImage, VkFormat ImgFormat, VkImageViewType ViewType, size_t Depth, size_t MipMapLevelCount)
2778 {
2779 return CreateImageView(Image: TexImage, Format: ImgFormat, ViewType, Depth, MipMapLevelCount);
2780 }
2781
2782 [[nodiscard]] bool CreateTextureSamplersImpl(VkSampler &CreatedSampler, VkSamplerAddressMode AddrModeU, VkSamplerAddressMode AddrModeV, VkSamplerAddressMode AddrModeW)
2783 {
2784 VkSamplerCreateInfo SamplerInfo{};
2785 SamplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
2786 SamplerInfo.magFilter = VK_FILTER_LINEAR;
2787 SamplerInfo.minFilter = VK_FILTER_LINEAR;
2788 SamplerInfo.addressModeU = AddrModeU;
2789 SamplerInfo.addressModeV = AddrModeV;
2790 SamplerInfo.addressModeW = AddrModeW;
2791 SamplerInfo.anisotropyEnable = VK_FALSE;
2792 SamplerInfo.maxAnisotropy = m_MaxSamplerAnisotropy;
2793 SamplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2794 SamplerInfo.unnormalizedCoordinates = VK_FALSE;
2795 SamplerInfo.compareEnable = VK_FALSE;
2796 SamplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
2797 SamplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
2798 SamplerInfo.mipLodBias = (m_GlobalTextureLodBIAS / 1000.0f);
2799 SamplerInfo.minLod = -1000;
2800 SamplerInfo.maxLod = 1000;
2801
2802 if(vkCreateSampler(device: m_VKDevice, pCreateInfo: &SamplerInfo, pAllocator: nullptr, pSampler: &CreatedSampler) != VK_SUCCESS)
2803 {
2804 log_error("gfx/vulkan", "Failed to create texture sampler.");
2805 return false;
2806 }
2807 return true;
2808 }
2809
2810 [[nodiscard]] bool CreateTextureSamplers()
2811 {
2812 bool Ret = true;
2813 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_REPEAT], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_REPEAT);
2814 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE);
2815 Ret &= CreateTextureSamplersImpl(CreatedSampler&: m_aSamplers[SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY], AddrModeU: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeV: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, AddrModeW: VkSamplerAddressMode::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT);
2816 return Ret;
2817 }
2818
2819 void DestroyTextureSamplers()
2820 {
2821 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_REPEAT], pAllocator: nullptr);
2822 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_CLAMP_TO_EDGE], pAllocator: nullptr);
2823 vkDestroySampler(device: m_VKDevice, sampler: m_aSamplers[SUPPORTED_SAMPLER_TYPE_2D_TEXTURE_ARRAY], pAllocator: nullptr);
2824 }
2825
2826 VkSampler GetTextureSampler(ESupportedSamplerTypes SamplerType)
2827 {
2828 return m_aSamplers[SamplerType];
2829 }
2830
2831 VkImageView CreateImageView(VkImage Image, VkFormat Format, VkImageViewType ViewType, size_t Depth, size_t MipMapLevelCount)
2832 {
2833 VkImageViewCreateInfo ViewCreateInfo{};
2834 ViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
2835 ViewCreateInfo.image = Image;
2836 ViewCreateInfo.viewType = ViewType;
2837 ViewCreateInfo.format = Format;
2838 ViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2839 ViewCreateInfo.subresourceRange.baseMipLevel = 0;
2840 ViewCreateInfo.subresourceRange.levelCount = MipMapLevelCount;
2841 ViewCreateInfo.subresourceRange.baseArrayLayer = 0;
2842 ViewCreateInfo.subresourceRange.layerCount = Depth;
2843
2844 VkImageView ImageView;
2845 if(vkCreateImageView(device: m_VKDevice, pCreateInfo: &ViewCreateInfo, pAllocator: nullptr, pView: &ImageView) != VK_SUCCESS)
2846 {
2847 return VK_NULL_HANDLE;
2848 }
2849
2850 return ImageView;
2851 }
2852
2853 [[nodiscard]] bool CreateImage(uint32_t Width, uint32_t Height, uint32_t Depth, size_t MipMapLevelCount, VkFormat Format, VkImageTiling Tiling, VkImage &Image, SMemoryImageBlock<IMAGE_BUFFER_CACHE_ID> &ImageMemory, VkImageUsageFlags ImageUsage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT)
2854 {
2855 VkImageCreateInfo ImageInfo{};
2856 ImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
2857 ImageInfo.imageType = VK_IMAGE_TYPE_2D;
2858 ImageInfo.extent.width = Width;
2859 ImageInfo.extent.height = Height;
2860 ImageInfo.extent.depth = 1;
2861 ImageInfo.mipLevels = MipMapLevelCount;
2862 ImageInfo.arrayLayers = Depth;
2863 ImageInfo.format = Format;
2864 ImageInfo.tiling = Tiling;
2865 ImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2866 ImageInfo.usage = ImageUsage;
2867 ImageInfo.samples = (ImageUsage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) == 0 ? VK_SAMPLE_COUNT_1_BIT : GetSampleCount();
2868 ImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2869
2870 if(vkCreateImage(device: m_VKDevice, pCreateInfo: &ImageInfo, pAllocator: nullptr, pImage: &Image) != VK_SUCCESS)
2871 {
2872 log_error("gfx/vulkan", "Failed to create image.");
2873 return false;
2874 }
2875
2876 VkMemoryRequirements MemRequirements;
2877 vkGetImageMemoryRequirements(device: m_VKDevice, image: Image, pMemoryRequirements: &MemRequirements);
2878
2879 if(!GetImageMemory(RetBlock&: ImageMemory, RequiredSize: MemRequirements.size, RequiredAlignment: MemRequirements.alignment, RequiredMemoryTypeBits: MemRequirements.memoryTypeBits))
2880 return false;
2881
2882 vkBindImageMemory(device: m_VKDevice, image: Image, memory: ImageMemory.m_BufferMem.m_Mem, memoryOffset: ImageMemory.m_HeapData.m_OffsetToAlign);
2883
2884 return true;
2885 }
2886
2887 [[nodiscard]] bool ImageBarrier(const VkImage &Image, size_t MipMapBase, size_t MipMapCount, size_t LayerBase, size_t LayerCount, VkFormat Format, VkImageLayout OldLayout, VkImageLayout NewLayout)
2888 {
2889 VkCommandBuffer *pMemCommandBuffer;
2890 if(!GetMemoryCommandBuffer(pMemCommandBuffer))
2891 return false;
2892 auto &MemCommandBuffer = *pMemCommandBuffer;
2893
2894 VkImageMemoryBarrier Barrier{};
2895 Barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
2896 Barrier.oldLayout = OldLayout;
2897 Barrier.newLayout = NewLayout;
2898 Barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2899 Barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2900 Barrier.image = Image;
2901 Barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2902 Barrier.subresourceRange.baseMipLevel = MipMapBase;
2903 Barrier.subresourceRange.levelCount = MipMapCount;
2904 Barrier.subresourceRange.baseArrayLayer = LayerBase;
2905 Barrier.subresourceRange.layerCount = LayerCount;
2906
2907 VkPipelineStageFlags SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2908 VkPipelineStageFlags DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2909
2910 if(OldLayout == VK_IMAGE_LAYOUT_UNDEFINED && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2911 {
2912 Barrier.srcAccessMask = 0;
2913 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2914
2915 SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2916 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2917 }
2918 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
2919 {
2920 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2921 Barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2922
2923 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2924 DestinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2925 }
2926 else if(OldLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2927 {
2928 Barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2929 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2930
2931 SourceStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
2932 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2933 }
2934 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
2935 {
2936 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2937 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2938
2939 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2940 DestinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
2941 }
2942 else if(OldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
2943 {
2944 Barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2945 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2946
2947 SourceStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
2948 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2949 }
2950 else if(OldLayout == VK_IMAGE_LAYOUT_UNDEFINED && NewLayout == VK_IMAGE_LAYOUT_GENERAL)
2951 {
2952 Barrier.srcAccessMask = 0;
2953 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2954
2955 SourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2956 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2957 }
2958 else if(OldLayout == VK_IMAGE_LAYOUT_GENERAL && NewLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
2959 {
2960 Barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2961 Barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2962
2963 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2964 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2965 }
2966 else if(OldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && NewLayout == VK_IMAGE_LAYOUT_GENERAL)
2967 {
2968 Barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2969 Barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2970
2971 SourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2972 DestinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
2973 }
2974 else
2975 {
2976 dbg_assert_failed("Unsupported layout transition. OldLayout=%d NewLayout=%d", (int)OldLayout, (int)NewLayout);
2977 }
2978
2979 vkCmdPipelineBarrier(
2980 commandBuffer: MemCommandBuffer,
2981 srcStageMask: SourceStage, dstStageMask: DestinationStage,
2982 dependencyFlags: 0,
2983 memoryBarrierCount: 0, pMemoryBarriers: nullptr,
2984 bufferMemoryBarrierCount: 0, pBufferMemoryBarriers: nullptr,
2985 imageMemoryBarrierCount: 1, pImageMemoryBarriers: &Barrier);
2986
2987 return true;
2988 }
2989
2990 [[nodiscard]] bool CopyBufferToImage(VkBuffer Buffer, VkDeviceSize BufferOffset, VkImage Image, int32_t X, int32_t Y, uint32_t Width, uint32_t Height, size_t Depth)
2991 {
2992 VkCommandBuffer *pCommandBuffer;
2993 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
2994 return false;
2995 auto &CommandBuffer = *pCommandBuffer;
2996
2997 VkBufferImageCopy Region{};
2998 Region.bufferOffset = BufferOffset;
2999 Region.bufferRowLength = 0;
3000 Region.bufferImageHeight = 0;
3001 Region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3002 Region.imageSubresource.mipLevel = 0;
3003 Region.imageSubresource.baseArrayLayer = 0;
3004 Region.imageSubresource.layerCount = Depth;
3005 Region.imageOffset = {.x: X, .y: Y, .z: 0};
3006 Region.imageExtent = {
3007 .width: Width,
3008 .height: Height,
3009 .depth: 1};
3010
3011 vkCmdCopyBufferToImage(commandBuffer: CommandBuffer, srcBuffer: Buffer, dstImage: Image, dstImageLayout: VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, regionCount: 1, pRegions: &Region);
3012
3013 return true;
3014 }
3015
3016 /************************
3017 * BUFFERS
3018 ************************/
3019
3020 [[nodiscard]] bool CreateBufferObject(size_t BufferIndex, const void *pUploadData, VkDeviceSize BufferDataSize, bool IsOneFrameBuffer)
3021 {
3022 std::vector<uint8_t> UploadDataTmp;
3023 if(pUploadData == nullptr)
3024 {
3025 UploadDataTmp.resize(new_size: BufferDataSize);
3026 pUploadData = UploadDataTmp.data();
3027 }
3028
3029 while(BufferIndex >= m_vBufferObjects.size())
3030 {
3031 m_vBufferObjects.resize(new_size: (m_vBufferObjects.size() * 2) + 1);
3032 }
3033 auto &BufferObject = m_vBufferObjects[BufferIndex];
3034
3035 VkBuffer VertexBuffer;
3036 size_t BufferOffset = 0;
3037 if(!IsOneFrameBuffer)
3038 {
3039 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
3040 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pUploadData, RequiredSize: BufferDataSize))
3041 return false;
3042
3043 SMemoryBlock<VERTEX_BUFFER_CACHE_ID> Mem;
3044 if(!GetVertexBuffer(ResBlock&: Mem, RequiredSize: BufferDataSize))
3045 return false;
3046
3047 BufferObject.m_BufferObject.m_Mem = Mem;
3048 VertexBuffer = Mem.m_Buffer;
3049 BufferOffset = Mem.m_HeapData.m_OffsetToAlign;
3050
3051 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Mem.m_HeapData.m_OffsetToAlign, Size: BufferDataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
3052 return false;
3053 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: Mem.m_HeapData.m_OffsetToAlign, CopySize: BufferDataSize))
3054 return false;
3055 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Mem.m_HeapData.m_OffsetToAlign, Size: BufferDataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
3056 return false;
3057 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
3058 }
3059 else
3060 {
3061 SDeviceMemoryBlock VertexBufferMemory;
3062 if(!CreateStreamVertexBuffer(RenderThreadIndex: MAIN_THREAD_INDEX, NewBuffer&: VertexBuffer, NewBufferMem&: VertexBufferMemory, BufferOffset, pData: pUploadData, DataSize: BufferDataSize))
3063 return false;
3064 }
3065 BufferObject.m_IsStreamedBuffer = IsOneFrameBuffer;
3066 BufferObject.m_CurBuffer = VertexBuffer;
3067 BufferObject.m_CurBufferOffset = BufferOffset;
3068
3069 return true;
3070 }
3071
3072 void DeleteBufferObject(size_t BufferIndex)
3073 {
3074 auto &BufferObject = m_vBufferObjects[BufferIndex];
3075 if(!BufferObject.m_IsStreamedBuffer)
3076 {
3077 FreeVertexMemBlock(Block&: BufferObject.m_BufferObject.m_Mem);
3078 }
3079 BufferObject = {};
3080 }
3081
3082 [[nodiscard]] bool CopyBuffer(VkBuffer SrcBuffer, VkBuffer DstBuffer, VkDeviceSize SrcOffset, VkDeviceSize DstOffset, VkDeviceSize CopySize)
3083 {
3084 VkCommandBuffer *pCommandBuffer;
3085 if(!GetMemoryCommandBuffer(pMemCommandBuffer&: pCommandBuffer))
3086 return false;
3087 auto &CommandBuffer = *pCommandBuffer;
3088 VkBufferCopy CopyRegion{};
3089 CopyRegion.srcOffset = SrcOffset;
3090 CopyRegion.dstOffset = DstOffset;
3091 CopyRegion.size = CopySize;
3092 vkCmdCopyBuffer(commandBuffer: CommandBuffer, srcBuffer: SrcBuffer, dstBuffer: DstBuffer, regionCount: 1, pRegions: &CopyRegion);
3093
3094 return true;
3095 }
3096
3097 /************************
3098 * RENDER STATES
3099 ************************/
3100
3101 void GetStateMatrix(const CCommandBuffer::SState &State, std::array<float, (size_t)4 * 2> &Matrix)
3102 {
3103 Matrix = {
3104 // column 1
3105 2.f / (State.m_ScreenBR.x - State.m_ScreenTL.x),
3106 0,
3107 // column 2
3108 0,
3109 2.f / (State.m_ScreenBR.y - State.m_ScreenTL.y),
3110 // column 3
3111 0,
3112 0,
3113 // column 4
3114 -((State.m_ScreenTL.x + State.m_ScreenBR.x) / (State.m_ScreenBR.x - State.m_ScreenTL.x)),
3115 -((State.m_ScreenTL.y + State.m_ScreenBR.y) / (State.m_ScreenBR.y - State.m_ScreenTL.y)),
3116 };
3117 }
3118
3119 [[nodiscard]] bool GetIsTextured(const CCommandBuffer::SState &State)
3120 {
3121 return State.m_Texture != -1;
3122 }
3123
3124 size_t GetAddressModeIndex(const CCommandBuffer::SState &State)
3125 {
3126 switch(State.m_WrapMode)
3127 {
3128 case EWrapMode::REPEAT:
3129 return VULKAN_BACKEND_ADDRESS_MODE_REPEAT;
3130 case EWrapMode::CLAMP:
3131 return VULKAN_BACKEND_ADDRESS_MODE_CLAMP_EDGES;
3132 default:
3133 dbg_assert_failed("Invalid wrap mode: %d", (int)State.m_WrapMode);
3134 };
3135 }
3136
3137 size_t GetBlendModeIndex(const CCommandBuffer::SState &State)
3138 {
3139 switch(State.m_BlendMode)
3140 {
3141 case EBlendMode::NONE:
3142 return VULKAN_BACKEND_BLEND_MODE_NONE;
3143 case EBlendMode::ALPHA:
3144 return VULKAN_BACKEND_BLEND_MODE_ALPHA;
3145 case EBlendMode::ADDITIVE:
3146 return VULKAN_BACKEND_BLEND_MODE_ADDITATIVE;
3147 default:
3148 dbg_assert_failed("Invalid blend mode: %d", (int)State.m_BlendMode);
3149 };
3150 }
3151
3152 size_t GetDynamicModeIndexFromState(const CCommandBuffer::SState &State) const
3153 {
3154 return (State.m_ClipEnable || m_HasDynamicViewport || m_VKSwapImgAndViewportExtent.m_HasForcedViewport) ? VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT : VULKAN_BACKEND_CLIP_MODE_NONE;
3155 }
3156
3157 size_t GetDynamicModeIndexFromExecBuffer(const SRenderCommandExecuteBuffer &ExecBuffer)
3158 {
3159 return (ExecBuffer.m_HasDynamicState) ? VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT : VULKAN_BACKEND_CLIP_MODE_NONE;
3160 }
3161
3162 VkPipeline &GetPipeline(SPipelineContainer &Container, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3163 {
3164 return Container.m_aaaPipelines[BlendModeIndex][DynamicIndex][(size_t)IsTextured];
3165 }
3166
3167 VkPipelineLayout &GetPipeLayout(SPipelineContainer &Container, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3168 {
3169 return Container.m_aaaPipelineLayouts[BlendModeIndex][DynamicIndex][(size_t)IsTextured];
3170 }
3171
3172 VkPipelineLayout &GetStandardPipeLayout(bool IsLineGeometry, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3173 {
3174 if(IsLineGeometry)
3175 return GetPipeLayout(Container&: m_StandardLinePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3176 else
3177 return GetPipeLayout(Container&: m_StandardPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3178 }
3179
3180 VkPipeline &GetStandardPipe(bool IsLineGeometry, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3181 {
3182 if(IsLineGeometry)
3183 return GetPipeline(Container&: m_StandardLinePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3184 else
3185 return GetPipeline(Container&: m_StandardPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3186 }
3187
3188 VkPipelineLayout &GetTileLayerPipeLayout(bool IsBorder, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3189 {
3190 if(!IsBorder)
3191 return GetPipeLayout(Container&: m_TilePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3192 else
3193 return GetPipeLayout(Container&: m_TileBorderPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3194 }
3195
3196 VkPipeline &GetTileLayerPipe(bool IsBorder, bool IsTextured, size_t BlendModeIndex, size_t DynamicIndex)
3197 {
3198 if(!IsBorder)
3199 return GetPipeline(Container&: m_TilePipeline, IsTextured, BlendModeIndex, DynamicIndex);
3200 else
3201 return GetPipeline(Container&: m_TileBorderPipeline, IsTextured, BlendModeIndex, DynamicIndex);
3202 }
3203
3204 void GetStateIndices(const SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, bool &IsTextured, size_t &BlendModeIndex, size_t &DynamicIndex, size_t &AddressModeIndex)
3205 {
3206 IsTextured = GetIsTextured(State);
3207 AddressModeIndex = GetAddressModeIndex(State);
3208 BlendModeIndex = GetBlendModeIndex(State);
3209 DynamicIndex = GetDynamicModeIndexFromExecBuffer(ExecBuffer);
3210 }
3211
3212 void ExecBufferFillDynamicStates(const CCommandBuffer::SState &State, SRenderCommandExecuteBuffer &ExecBuffer)
3213 {
3214 // Workaround for a bug in molten-vk: https://github.com/KhronosGroup/MoltenVK/issues/2304
3215#ifdef CONF_PLATFORM_MACOS
3216 auto HasDynamicState = true;
3217#else
3218 size_t DynamicStateIndex = GetDynamicModeIndexFromState(State);
3219 auto HasDynamicState = DynamicStateIndex == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT;
3220#endif
3221
3222 if(HasDynamicState)
3223 {
3224 VkViewport Viewport;
3225 if(m_HasDynamicViewport)
3226 {
3227 Viewport.x = (float)m_DynamicViewportOffset.x;
3228 Viewport.y = (float)m_DynamicViewportOffset.y;
3229 Viewport.width = (float)m_DynamicViewportSize.width;
3230 Viewport.height = (float)m_DynamicViewportSize.height;
3231 Viewport.minDepth = 0.0f;
3232 Viewport.maxDepth = 1.0f;
3233 }
3234 // else check if there is a forced viewport
3235 else if(m_VKSwapImgAndViewportExtent.m_HasForcedViewport)
3236 {
3237 Viewport.x = 0.0f;
3238 Viewport.y = 0.0f;
3239 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_ForcedViewport.width;
3240 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_ForcedViewport.height;
3241 Viewport.minDepth = 0.0f;
3242 Viewport.maxDepth = 1.0f;
3243 }
3244 else
3245 {
3246 Viewport.x = 0.0f;
3247 Viewport.y = 0.0f;
3248 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
3249 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
3250 Viewport.minDepth = 0.0f;
3251 Viewport.maxDepth = 1.0f;
3252 }
3253
3254 VkRect2D Scissor;
3255 // convert from OGL to vulkan clip
3256
3257 // the scissor always assumes the presented viewport, because the front-end keeps the calculation
3258 // for the forced viewport in sync
3259 auto ScissorViewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
3260 if(State.m_ClipEnable)
3261 {
3262 int32_t ScissorY = (int32_t)ScissorViewport.height - ((int32_t)State.m_ClipY + (int32_t)State.m_ClipH);
3263 uint32_t ScissorH = (int32_t)State.m_ClipH;
3264 Scissor.offset = {.x: (int32_t)State.m_ClipX, .y: ScissorY};
3265 Scissor.extent = {.width: (uint32_t)State.m_ClipW, .height: ScissorH};
3266 }
3267 else
3268 {
3269 Scissor.offset = {.x: 0, .y: 0};
3270 Scissor.extent = {.width: ScissorViewport.width, .height: ScissorViewport.height};
3271 }
3272
3273 // if there is a dynamic viewport make sure the scissor data is scaled down to that
3274 if(m_HasDynamicViewport)
3275 {
3276 Scissor.offset.x = (int32_t)(((float)Scissor.offset.x / (float)ScissorViewport.width) * (float)m_DynamicViewportSize.width) + m_DynamicViewportOffset.x;
3277 Scissor.offset.y = (int32_t)(((float)Scissor.offset.y / (float)ScissorViewport.height) * (float)m_DynamicViewportSize.height) + m_DynamicViewportOffset.y;
3278 Scissor.extent.width = (uint32_t)(((float)Scissor.extent.width / (float)ScissorViewport.width) * (float)m_DynamicViewportSize.width);
3279 Scissor.extent.height = (uint32_t)(((float)Scissor.extent.height / (float)ScissorViewport.height) * (float)m_DynamicViewportSize.height);
3280 }
3281
3282 Viewport.x = std::clamp(val: Viewport.x, lo: 0.0f, hi: std::numeric_limits<decltype(Viewport.x)>::max());
3283 Viewport.y = std::clamp(val: Viewport.y, lo: 0.0f, hi: std::numeric_limits<decltype(Viewport.y)>::max());
3284
3285 Scissor.offset.x = std::clamp(val: Scissor.offset.x, lo: 0, hi: std::numeric_limits<decltype(Scissor.offset.x)>::max());
3286 Scissor.offset.y = std::clamp(val: Scissor.offset.y, lo: 0, hi: std::numeric_limits<decltype(Scissor.offset.y)>::max());
3287
3288 ExecBuffer.m_HasDynamicState = true;
3289 ExecBuffer.m_Viewport = Viewport;
3290 ExecBuffer.m_Scissor = Scissor;
3291 }
3292 else
3293 {
3294 ExecBuffer.m_HasDynamicState = false;
3295 }
3296 }
3297
3298 void BindPipeline(size_t RenderThreadIndex, VkCommandBuffer &CommandBuffer, SRenderCommandExecuteBuffer &ExecBuffer, VkPipeline &BindingPipe, const CCommandBuffer::SState &State)
3299 {
3300 if(m_vLastPipeline[RenderThreadIndex] != BindingPipe)
3301 {
3302 vkCmdBindPipeline(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline: BindingPipe);
3303 m_vLastPipeline[RenderThreadIndex] = BindingPipe;
3304 }
3305
3306 size_t DynamicStateIndex = GetDynamicModeIndexFromExecBuffer(ExecBuffer);
3307 if(DynamicStateIndex == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT)
3308 {
3309 vkCmdSetViewport(commandBuffer: CommandBuffer, firstViewport: 0, viewportCount: 1, pViewports: &ExecBuffer.m_Viewport);
3310 vkCmdSetScissor(commandBuffer: CommandBuffer, firstScissor: 0, scissorCount: 1, pScissors: &ExecBuffer.m_Scissor);
3311 }
3312 }
3313
3314 /**************************
3315 * RENDERING IMPLEMENTATION
3316 ***************************/
3317
3318 void RenderTileLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, size_t DrawCalls, const CCommandBuffer::SState &State, size_t BufferContainerIndex)
3319 {
3320 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
3321 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
3322
3323 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
3324 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
3325
3326 bool IsTextured = GetIsTextured(State);
3327 if(IsTextured)
3328 {
3329 ExecBuffer.m_aDescriptors[0] = m_vTextures[State.m_Texture].m_VKStandard3DTexturedDescrSet;
3330 }
3331
3332 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
3333
3334 ExecBuffer.m_EstimatedRenderCallCount = DrawCalls;
3335
3336 ExecBufferFillDynamicStates(State, ExecBuffer);
3337 }
3338
3339 [[nodiscard]] bool RenderTileLayer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, bool IsBorder, const GL_SColorf &Color, const vec2 &Scale, const vec2 &Off, size_t IndicesDrawNum, char *const *pIndicesOffsets, const unsigned int *pDrawCount)
3340 {
3341 std::array<float, (size_t)4 * 2> m;
3342 GetStateMatrix(State, Matrix&: m);
3343
3344 bool IsTextured;
3345 size_t BlendModeIndex;
3346 size_t DynamicIndex;
3347 size_t AddressModeIndex;
3348 GetStateIndices(ExecBuffer, State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
3349 auto &PipeLayout = GetTileLayerPipeLayout(IsBorder, IsTextured, BlendModeIndex, DynamicIndex);
3350 auto &PipeLine = GetTileLayerPipe(IsBorder, IsTextured, BlendModeIndex, DynamicIndex);
3351
3352 VkCommandBuffer *pCommandBuffer;
3353 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
3354 return false;
3355 auto &CommandBuffer = *pCommandBuffer;
3356
3357 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State);
3358
3359 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
3360 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
3361 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
3362
3363 if(IsTextured)
3364 {
3365 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
3366 }
3367
3368 SUniformTileGPosBorder VertexPushConstants;
3369 size_t VertexPushConstantSize = sizeof(SUniformTileGPos);
3370 SUniformTileGVertColor FragPushConstants;
3371 size_t FragPushConstantSize = sizeof(SUniformTileGVertColor);
3372
3373 mem_copy(dest: VertexPushConstants.m_aPos, source: m.data(), size: m.size() * sizeof(float));
3374 FragPushConstants = Color;
3375
3376 if(IsBorder)
3377 {
3378 VertexPushConstants.m_Scale = Scale;
3379 VertexPushConstants.m_Offset = Off;
3380 VertexPushConstantSize = sizeof(SUniformTileGPosBorder);
3381 }
3382
3383 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: VertexPushConstantSize, pValues: &VertexPushConstants);
3384 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformTileGPosBorder) + sizeof(SUniformTileGVertColorAlign), size: FragPushConstantSize, pValues: &FragPushConstants);
3385
3386 size_t DrawCount = IndicesDrawNum;
3387 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
3388 for(size_t i = 0; i < DrawCount; ++i)
3389 {
3390 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pIndicesOffsets[i] / sizeof(uint32_t));
3391
3392 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pDrawCount[i]), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
3393 }
3394
3395 return true;
3396 }
3397
3398 template<typename TName, bool Is3DTextured>
3399 [[nodiscard]] bool RenderStandard(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, EPrimitiveType PrimType, const TName *pVertices, int PrimitiveCount)
3400 {
3401 std::array<float, (size_t)4 * 2> m;
3402 GetStateMatrix(State, Matrix&: m);
3403
3404 bool IsLineGeometry = PrimType == EPrimitiveType::LINES;
3405
3406 bool IsTextured;
3407 size_t BlendModeIndex;
3408 size_t DynamicIndex;
3409 size_t AddressModeIndex;
3410 GetStateIndices(ExecBuffer, State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
3411 auto &PipeLayout = Is3DTextured ? GetPipeLayout(Container&: m_Standard3DPipeline, IsTextured, BlendModeIndex, DynamicIndex) : GetStandardPipeLayout(IsLineGeometry, IsTextured, BlendModeIndex, DynamicIndex);
3412 auto &PipeLine = Is3DTextured ? GetPipeline(Container&: m_Standard3DPipeline, IsTextured, BlendModeIndex, DynamicIndex) : GetStandardPipe(IsLineGeometry, IsTextured, BlendModeIndex, DynamicIndex);
3413
3414 VkCommandBuffer *pCommandBuffer;
3415 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
3416 return false;
3417 auto &CommandBuffer = *pCommandBuffer;
3418
3419 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State);
3420
3421 size_t VertPerPrim = 2;
3422 bool IsIndexed = false;
3423 if(PrimType == EPrimitiveType::QUADS)
3424 {
3425 VertPerPrim = 4;
3426 IsIndexed = true;
3427 }
3428 else if(PrimType == EPrimitiveType::TRIANGLES)
3429 {
3430 VertPerPrim = 3;
3431 }
3432
3433 VkBuffer VKBuffer;
3434 SDeviceMemoryBlock VKBufferMem;
3435 size_t BufferOff = 0;
3436 if(!CreateStreamVertexBuffer(RenderThreadIndex: ExecBuffer.m_ThreadIndex, NewBuffer&: VKBuffer, NewBufferMem&: VKBufferMem, BufferOffset&: BufferOff, pData: pVertices, DataSize: VertPerPrim * sizeof(TName) * PrimitiveCount))
3437 return false;
3438
3439 std::array<VkBuffer, 1> aVertexBuffers = {VKBuffer};
3440 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)BufferOff};
3441 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
3442
3443 if(IsIndexed)
3444 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
3445
3446 if(IsTextured)
3447 {
3448 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
3449 }
3450
3451 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGPos), pValues: m.data());
3452
3453 if(IsIndexed)
3454 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(PrimitiveCount * 6), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
3455 else
3456 vkCmdDraw(commandBuffer: CommandBuffer, vertexCount: static_cast<uint32_t>(PrimitiveCount * VertPerPrim), instanceCount: 1, firstVertex: 0, firstInstance: 0);
3457
3458 return true;
3459 }
3460
3461public:
3462 CCommandProcessorFragment_Vulkan()
3463 {
3464 m_vTextures.reserve(n: CCommandBuffer::MAX_TEXTURES);
3465 }
3466
3467 /************************
3468 * VULKAN SETUP CODE
3469 ************************/
3470
3471 [[nodiscard]] bool GetVulkanExtensions(SDL_Window *pWindow, std::vector<std::string> &vVKExtensions)
3472 {
3473 unsigned int ExtCount = 0;
3474 if(!SDL_Vulkan_GetInstanceExtensions(window: pWindow, pCount: &ExtCount, pNames: nullptr))
3475 {
3476 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get instance extensions from SDL.");
3477 return false;
3478 }
3479
3480 std::vector<const char *> vExtensionList(ExtCount);
3481 if(!SDL_Vulkan_GetInstanceExtensions(window: pWindow, pCount: &ExtCount, pNames: vExtensionList.data()))
3482 {
3483 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get instance extensions from SDL.");
3484 return false;
3485 }
3486
3487 vVKExtensions.reserve(n: ExtCount);
3488 for(uint32_t i = 0; i < ExtCount; i++)
3489 {
3490 vVKExtensions.emplace_back(args&: vExtensionList[i]);
3491 }
3492
3493 return true;
3494 }
3495
3496 std::set<std::string> OurVKLayers()
3497 {
3498 std::set<std::string> OurLayers;
3499
3500 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
3501 {
3502 OurLayers.emplace(args: "VK_LAYER_KHRONOS_validation");
3503 // deprecated, but VK_LAYER_KHRONOS_validation was released after vulkan 1.1
3504 OurLayers.emplace(args: "VK_LAYER_LUNARG_standard_validation");
3505 }
3506
3507 return OurLayers;
3508 }
3509
3510 std::set<std::string> OurDeviceExtensions()
3511 {
3512 std::set<std::string> OurExt;
3513 OurExt.emplace(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
3514 return OurExt;
3515 }
3516
3517 std::vector<VkImageUsageFlags> OurImageUsages()
3518 {
3519 std::vector<VkImageUsageFlags> vImgUsages;
3520
3521 vImgUsages.emplace_back(args: VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
3522 vImgUsages.emplace_back(args: VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
3523
3524 return vImgUsages;
3525 }
3526
3527 [[nodiscard]] bool GetVulkanLayers(std::vector<std::string> &vVKLayers)
3528 {
3529 uint32_t LayerCount = 0;
3530 VkResult Res = vkEnumerateInstanceLayerProperties(pPropertyCount: &LayerCount, NULL);
3531 if(Res != VK_SUCCESS)
3532 {
3533 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get Vulkan layers.");
3534 return false;
3535 }
3536
3537 std::vector<VkLayerProperties> vVKInstanceLayers(LayerCount);
3538 Res = vkEnumerateInstanceLayerProperties(pPropertyCount: &LayerCount, pProperties: vVKInstanceLayers.data());
3539 if(Res != VK_SUCCESS)
3540 {
3541 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get Vulkan layers.");
3542 return false;
3543 }
3544
3545 std::set<std::string> ReqLayerNames = OurVKLayers();
3546 vVKLayers.clear();
3547 for(const auto &LayerName : vVKInstanceLayers)
3548 {
3549 if(ReqLayerNames.contains(x: std::string(LayerName.layerName)))
3550 {
3551 vVKLayers.emplace_back(args: LayerName.layerName);
3552 }
3553 }
3554
3555 return true;
3556 }
3557
3558 bool IsGpuDenied(uint32_t Vendor, uint32_t DriverVersion, uint32_t ApiMajor, uint32_t ApiMinor, uint32_t ApiPatch)
3559 {
3560#ifdef CONF_FAMILY_WINDOWS
3561 // AMD
3562 if(0x1002 == Vendor)
3563 {
3564 auto Major = (DriverVersion >> 22);
3565 auto Minor = (DriverVersion >> 12) & 0x3ff;
3566 auto Patch = DriverVersion & 0xfff;
3567
3568 return Major == 2 && Minor == 0 && Patch > 137 && Patch < 220 && ((ApiMajor <= 1 && ApiMinor < 3) || (ApiMajor <= 1 && ApiMinor == 3 && ApiPatch < 206));
3569 }
3570#endif
3571 return false;
3572 }
3573
3574 [[nodiscard]] bool CreateVulkanInstance(const std::vector<std::string> &vVKLayers, const std::vector<std::string> &vVKExtensions, bool TryDebugExtensions)
3575 {
3576 std::vector<const char *> vLayersCStr;
3577 vLayersCStr.reserve(n: vVKLayers.size());
3578 for(const auto &Layer : vVKLayers)
3579 vLayersCStr.emplace_back(args: Layer.c_str());
3580
3581 std::vector<const char *> vExtCStr;
3582 vExtCStr.reserve(n: vVKExtensions.size() + 1);
3583 for(const auto &Ext : vVKExtensions)
3584 vExtCStr.emplace_back(args: Ext.c_str());
3585
3586#ifdef VK_EXT_debug_utils
3587 if(TryDebugExtensions && (g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL))
3588 {
3589 // debug message support
3590 vExtCStr.emplace_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
3591 }
3592#endif
3593
3594 VkApplicationInfo VKAppInfo = {};
3595 VKAppInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
3596 VKAppInfo.pNext = NULL;
3597 VKAppInfo.pApplicationName = "DDNet";
3598 VKAppInfo.applicationVersion = 1;
3599 VKAppInfo.pEngineName = "DDNet-Vulkan";
3600 VKAppInfo.engineVersion = 1;
3601 VKAppInfo.apiVersion = VK_API_VERSION_1_1;
3602
3603 void *pExt = nullptr;
3604#if defined(VK_EXT_validation_features) && VK_EXT_VALIDATION_FEATURES_SPEC_VERSION >= 5
3605 VkValidationFeaturesEXT Features = {};
3606 std::array<VkValidationFeatureEnableEXT, 2> aEnables = {VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT};
3607 if(TryDebugExtensions && (g_Config.m_DbgGfx == DEBUG_GFX_MODE_AFFECTS_PERFORMANCE || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL))
3608 {
3609 Features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
3610 Features.enabledValidationFeatureCount = aEnables.size();
3611 Features.pEnabledValidationFeatures = aEnables.data();
3612
3613 pExt = &Features;
3614 }
3615#endif
3616
3617 VkInstanceCreateInfo VKInstanceInfo = {};
3618 VKInstanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
3619 VKInstanceInfo.pNext = pExt;
3620 VKInstanceInfo.flags = 0;
3621 VKInstanceInfo.pApplicationInfo = &VKAppInfo;
3622 VKInstanceInfo.enabledExtensionCount = static_cast<uint32_t>(vExtCStr.size());
3623 VKInstanceInfo.ppEnabledExtensionNames = vExtCStr.data();
3624 VKInstanceInfo.enabledLayerCount = static_cast<uint32_t>(vLayersCStr.size());
3625 VKInstanceInfo.ppEnabledLayerNames = vLayersCStr.data();
3626
3627 bool TryAgain = false;
3628
3629 VkResult Res = vkCreateInstance(pCreateInfo: &VKInstanceInfo, NULL, pInstance: &m_VKInstance);
3630 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: Res);
3631 if(pCritErrorMsg != nullptr)
3632 {
3633 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating instance failed.", pErrStrExtra: pCritErrorMsg);
3634 return false;
3635 }
3636 else if(Res == VK_ERROR_LAYER_NOT_PRESENT || Res == VK_ERROR_EXTENSION_NOT_PRESENT)
3637 TryAgain = true;
3638
3639 if(TryAgain && TryDebugExtensions)
3640 return CreateVulkanInstance(vVKLayers, vVKExtensions, TryDebugExtensions: false);
3641
3642 return true;
3643 }
3644
3645 STWGraphicGpu::ETWGraphicsGpuType VKGPUTypeToGraphicsGpuType(VkPhysicalDeviceType VKGPUType)
3646 {
3647 if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU)
3648 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_DISCRETE;
3649 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU)
3650 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INTEGRATED;
3651 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU)
3652 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_VIRTUAL;
3653 else if(VKGPUType == VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_CPU)
3654 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_CPU;
3655
3656 return STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_CPU;
3657 }
3658
3659 static void GetVendorString(uint32_t VendorId, char *pVendorStr, size_t Size)
3660 {
3661 switch(VendorId)
3662 {
3663 case 0x1002:
3664 case 0x1022:
3665 str_copy(dst: pVendorStr, src: "AMD", dst_size: Size);
3666 break;
3667 case 0x1010:
3668 str_copy(dst: pVendorStr, src: "ImgTec", dst_size: Size);
3669 break;
3670 case 0x106B:
3671 str_copy(dst: pVendorStr, src: "Apple", dst_size: Size);
3672 break;
3673 case 0x10DE:
3674 str_copy(dst: pVendorStr, src: "NVIDIA", dst_size: Size);
3675 break;
3676 case 0x13B5:
3677 str_copy(dst: pVendorStr, src: "ARM", dst_size: Size);
3678 break;
3679 case 0x5143:
3680 str_copy(dst: pVendorStr, src: "Qualcomm", dst_size: Size);
3681 break;
3682 case 0x8086:
3683 str_copy(dst: pVendorStr, src: "Intel", dst_size: Size);
3684 break;
3685 case 0x10005:
3686 str_copy(dst: pVendorStr, src: "Mesa", dst_size: Size);
3687 break;
3688 default:
3689 log_warn("gfx/vulkan", "Unknown GPU vendor ID %08X.", VendorId);
3690 str_format(buffer: pVendorStr, buffer_size: Size, format: "Unknown (%08X)", VendorId);
3691 break;
3692 }
3693 }
3694
3695 // from: https://github.com/SaschaWillems/vulkan.gpuinfo.org/blob/5c3986798afc39d736b825bf8a5fbf92b8d9ed49/includes/functions.php#L364
3696 void FormatDriverVersion(char (&aDriverVersion)[256], uint32_t DriverVersion, uint32_t VendorId)
3697 {
3698 if(VendorId == 0x10DE) // NVIDIA
3699 {
3700 str_format(buffer: aDriverVersion, buffer_size: std::size(aDriverVersion), format: "%d.%d.%d.%d",
3701 (DriverVersion >> 22) & 0x3ff,
3702 (DriverVersion >> 14) & 0x0ff,
3703 (DriverVersion >> 6) & 0x0ff,
3704 (DriverVersion) & 0x003f);
3705 }
3706#ifdef CONF_FAMILY_WINDOWS
3707 else if(VendorId == 0x8086) // Windows with Intel only
3708 {
3709 str_format(aDriverVersion, std::size(aDriverVersion),
3710 "%d.%d",
3711 (DriverVersion >> 14),
3712 (DriverVersion) & 0x3fff);
3713 }
3714#endif
3715 else
3716 {
3717 // Use Vulkan version conventions if vendor mapping is not available
3718 str_format(buffer: aDriverVersion, buffer_size: std::size(aDriverVersion),
3719 format: "%d.%d.%d",
3720 (DriverVersion >> 22),
3721 (DriverVersion >> 12) & 0x3ff,
3722 DriverVersion & 0xfff);
3723 }
3724 }
3725
3726 [[nodiscard]] bool SelectGpu(char *pRendererName, char *pVendorName, char *pVersionName)
3727 {
3728 uint32_t DevicesCount = 0;
3729 auto Res = vkEnumeratePhysicalDevices(instance: m_VKInstance, pPhysicalDeviceCount: &DevicesCount, pPhysicalDevices: nullptr);
3730 if(Res != VK_SUCCESS)
3731 {
3732 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: CheckVulkanCriticalError(CallResult: Res));
3733 return false;
3734 }
3735 if(DevicesCount == 0)
3736 {
3737 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No Vulkan compatible devices found.");
3738 return false;
3739 }
3740
3741 std::vector<VkPhysicalDevice> vDeviceList(DevicesCount);
3742 Res = vkEnumeratePhysicalDevices(instance: m_VKInstance, pPhysicalDeviceCount: &DevicesCount, pPhysicalDevices: vDeviceList.data());
3743 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
3744 {
3745 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: CheckVulkanCriticalError(CallResult: Res));
3746 return false;
3747 }
3748 if(DevicesCount == 0)
3749 {
3750 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED_MISSING_INTEGRATED_GPU_DRIVER, pWarning: "No Vulkan compatible devices found.");
3751 return false;
3752 }
3753 // make sure to use the correct amount of devices available
3754 // the amount of physical devices can be smaller than the amount of devices reported
3755 // see vkEnumeratePhysicalDevices for details
3756 vDeviceList.resize(new_size: DevicesCount);
3757
3758 size_t Index = 0;
3759 std::vector<VkPhysicalDeviceProperties> vDevicePropList(vDeviceList.size());
3760 m_pGpuList->m_vGpus.reserve(n: vDeviceList.size());
3761
3762 size_t FoundDeviceIndex = 0;
3763
3764 STWGraphicGpu::ETWGraphicsGpuType AutoGpuType = STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INVALID;
3765
3766 bool IsAutoGpu = str_comp(a: g_Config.m_GfxGpuName, b: "auto") == 0;
3767
3768 bool UserSelectedGpuChosen = false;
3769 for(auto &CurDevice : vDeviceList)
3770 {
3771 vkGetPhysicalDeviceProperties(physicalDevice: CurDevice, pProperties: &(vDevicePropList[Index]));
3772
3773 auto &DeviceProp = vDevicePropList[Index];
3774
3775 STWGraphicGpu::ETWGraphicsGpuType GPUType = VKGPUTypeToGraphicsGpuType(VKGPUType: DeviceProp.deviceType);
3776
3777 int DevApiMajor = (int)VK_API_VERSION_MAJOR(DeviceProp.apiVersion);
3778 int DevApiMinor = (int)VK_API_VERSION_MINOR(DeviceProp.apiVersion);
3779 int DevApiPatch = (int)VK_API_VERSION_PATCH(DeviceProp.apiVersion);
3780
3781 auto IsDenied = CCommandProcessorFragment_Vulkan::IsGpuDenied(Vendor: DeviceProp.vendorID, DriverVersion: DeviceProp.driverVersion, ApiMajor: DevApiMajor, ApiMinor: DevApiMinor, ApiPatch: DevApiPatch);
3782 if((DevApiMajor > BACKEND_VULKAN_VERSION_MAJOR || (DevApiMajor == BACKEND_VULKAN_VERSION_MAJOR && DevApiMinor >= BACKEND_VULKAN_VERSION_MINOR)) && !IsDenied)
3783 {
3784 STWGraphicGpu::STWGraphicGpuItem NewGpu;
3785 str_copy(dst&: NewGpu.m_aName, src: DeviceProp.deviceName);
3786 NewGpu.m_GpuType = GPUType;
3787 m_pGpuList->m_vGpus.push_back(x: NewGpu);
3788
3789 // We always decide what the 'auto' GPU would be, even if user is forcing a GPU by name in config
3790 // Reminder: A worse GPU enumeration has a higher value than a better GPU enumeration, thus the '>'
3791 if(AutoGpuType > STWGraphicGpu::ETWGraphicsGpuType::GRAPHICS_GPU_TYPE_INTEGRATED)
3792 {
3793 str_copy(dst&: m_pGpuList->m_AutoGpu.m_aName, src: DeviceProp.deviceName);
3794 m_pGpuList->m_AutoGpu.m_GpuType = GPUType;
3795
3796 AutoGpuType = GPUType;
3797
3798 if(IsAutoGpu)
3799 FoundDeviceIndex = Index;
3800 }
3801 // We only select the first GPU that matches, because it comes first in the enumeration array, it's preferred by the system
3802 // Reminder: We can't break the cycle here if the name matches because we need to choose the best GPU for 'auto' mode
3803 if(!IsAutoGpu && !UserSelectedGpuChosen && str_comp(a: DeviceProp.deviceName, b: g_Config.m_GfxGpuName) == 0)
3804 {
3805 FoundDeviceIndex = Index;
3806 UserSelectedGpuChosen = true;
3807 }
3808 }
3809 Index++;
3810 }
3811
3812 if(m_pGpuList->m_vGpus.empty())
3813 {
3814 SetWarning(WarningType: EGfxWarningType::GFX_WARNING_TYPE_INIT_FAILED_NO_DEVICE_WITH_REQUIRED_VERSION, pWarning: "No devices with required Vulkan version found.");
3815 return false;
3816 }
3817
3818 {
3819 auto &DeviceProp = vDevicePropList[FoundDeviceIndex];
3820
3821 int DevApiMajor = (int)VK_API_VERSION_MAJOR(DeviceProp.apiVersion);
3822 int DevApiMinor = (int)VK_API_VERSION_MINOR(DeviceProp.apiVersion);
3823 int DevApiPatch = (int)VK_API_VERSION_PATCH(DeviceProp.apiVersion);
3824
3825 str_copy(dst: pRendererName, src: DeviceProp.deviceName, dst_size: GPU_INFO_STRING_SIZE);
3826 GetVendorString(VendorId: DeviceProp.vendorID, pVendorStr: pVendorName, Size: GPU_INFO_STRING_SIZE);
3827 char aDriverVersion[256];
3828 FormatDriverVersion(aDriverVersion, DriverVersion: DeviceProp.driverVersion, VendorId: DeviceProp.vendorID);
3829 str_format(buffer: pVersionName, buffer_size: GPU_INFO_STRING_SIZE, format: "Vulkan %d.%d.%d (driver: %s)",
3830 DevApiMajor, DevApiMinor, DevApiPatch, aDriverVersion);
3831
3832 // get important device limits
3833 m_NonCoherentMemAlignment = DeviceProp.limits.nonCoherentAtomSize;
3834 m_OptimalImageCopyMemAlignment = DeviceProp.limits.optimalBufferCopyOffsetAlignment;
3835 m_MaxTextureSize = DeviceProp.limits.maxImageDimension2D;
3836 m_MaxSamplerAnisotropy = DeviceProp.limits.maxSamplerAnisotropy;
3837
3838 m_MinUniformAlign = DeviceProp.limits.minUniformBufferOffsetAlignment;
3839 m_MaxMultiSample = DeviceProp.limits.framebufferColorSampleCounts;
3840
3841 if(IsVerbose())
3842 {
3843 log_debug("gfx/vulkan", "Device prop: non-coherent align: %" PRIzu ", optimal image copy align: %" PRIzu ", max texture size: %u, max sampler anisotropy: %u",
3844 (size_t)m_NonCoherentMemAlignment, (size_t)m_OptimalImageCopyMemAlignment, m_MaxTextureSize, m_MaxSamplerAnisotropy);
3845 log_debug("gfx/vulkan", "Device prop: min uniform align: %u, multi sample: %u",
3846 m_MinUniformAlign, (uint32_t)m_MaxMultiSample);
3847 }
3848 }
3849
3850 VkPhysicalDevice CurDevice = vDeviceList[FoundDeviceIndex];
3851
3852 uint32_t FamQueueCount = 0;
3853 vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice: CurDevice, pQueueFamilyPropertyCount: &FamQueueCount, pQueueFamilyProperties: nullptr);
3854 if(FamQueueCount == 0)
3855 {
3856 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No Vulkan queue family properties found.");
3857 return false;
3858 }
3859
3860 std::vector<VkQueueFamilyProperties> vQueuePropList(FamQueueCount);
3861 vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice: CurDevice, pQueueFamilyPropertyCount: &FamQueueCount, pQueueFamilyProperties: vQueuePropList.data());
3862
3863 uint32_t QueueNodeIndex = std::numeric_limits<uint32_t>::max();
3864 for(uint32_t i = 0; i < FamQueueCount; i++)
3865 {
3866 if(vQueuePropList[i].queueCount > 0 && (vQueuePropList[i].queueFlags & VK_QUEUE_GRAPHICS_BIT))
3867 {
3868 QueueNodeIndex = i;
3869 }
3870 /*if(vQueuePropList[i].queueCount > 0 && (vQueuePropList[i].queueFlags & VK_QUEUE_COMPUTE_BIT))
3871 {
3872 QueueNodeIndex = i;
3873 }*/
3874 }
3875
3876 if(QueueNodeIndex == std::numeric_limits<uint32_t>::max())
3877 {
3878 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "No Vulkan queue found that matches the requirements: graphics queue.");
3879 return false;
3880 }
3881
3882 m_VKGPU = CurDevice;
3883 m_VKGraphicsQueueIndex = QueueNodeIndex;
3884 return true;
3885 }
3886
3887 [[nodiscard]] bool CreateLogicalDevice(const std::vector<std::string> &vVKLayers)
3888 {
3889 std::vector<const char *> vLayerCNames;
3890 vLayerCNames.reserve(n: vVKLayers.size());
3891 for(const auto &Layer : vVKLayers)
3892 vLayerCNames.emplace_back(args: Layer.c_str());
3893
3894 uint32_t DevPropCount = 0;
3895 if(vkEnumerateDeviceExtensionProperties(physicalDevice: m_VKGPU, NULL, pPropertyCount: &DevPropCount, NULL) != VK_SUCCESS)
3896 {
3897 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Querying logical device extension properties failed.");
3898 return false;
3899 }
3900
3901 std::vector<VkExtensionProperties> vDevPropList(DevPropCount);
3902 if(vkEnumerateDeviceExtensionProperties(physicalDevice: m_VKGPU, NULL, pPropertyCount: &DevPropCount, pProperties: vDevPropList.data()) != VK_SUCCESS)
3903 {
3904 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Querying logical device extension properties failed.");
3905 return false;
3906 }
3907
3908 std::vector<const char *> vDevPropCNames;
3909 std::set<std::string> OurDevExt = OurDeviceExtensions();
3910
3911 for(const auto &CurExtProp : vDevPropList)
3912 {
3913 if(OurDevExt.contains(x: std::string(CurExtProp.extensionName)))
3914 {
3915 vDevPropCNames.emplace_back(args: CurExtProp.extensionName);
3916 }
3917 }
3918
3919 VkDeviceQueueCreateInfo VKQueueCreateInfo;
3920 VKQueueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
3921 VKQueueCreateInfo.queueFamilyIndex = m_VKGraphicsQueueIndex;
3922 VKQueueCreateInfo.queueCount = 1;
3923 float QueuePrio = 1.0f;
3924 VKQueueCreateInfo.pQueuePriorities = &QueuePrio;
3925 VKQueueCreateInfo.pNext = NULL;
3926 VKQueueCreateInfo.flags = 0;
3927
3928 VkDeviceCreateInfo VKCreateInfo;
3929 VKCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
3930 VKCreateInfo.queueCreateInfoCount = 1;
3931 VKCreateInfo.pQueueCreateInfos = &VKQueueCreateInfo;
3932 VKCreateInfo.ppEnabledLayerNames = vLayerCNames.data();
3933 VKCreateInfo.enabledLayerCount = static_cast<uint32_t>(vLayerCNames.size());
3934 VKCreateInfo.ppEnabledExtensionNames = vDevPropCNames.data();
3935 VKCreateInfo.enabledExtensionCount = static_cast<uint32_t>(vDevPropCNames.size());
3936 VKCreateInfo.pNext = NULL;
3937 VKCreateInfo.pEnabledFeatures = NULL;
3938 VKCreateInfo.flags = 0;
3939
3940 if(vkCreateDevice(physicalDevice: m_VKGPU, pCreateInfo: &VKCreateInfo, pAllocator: nullptr, pDevice: &m_VKDevice) != VK_SUCCESS)
3941 {
3942 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Logical device could not be created.");
3943 return false;
3944 }
3945
3946 return true;
3947 }
3948
3949 [[nodiscard]] bool CreateSurface(SDL_Window *pWindow)
3950 {
3951 if(!SDL_Vulkan_CreateSurface(window: pWindow, instance: m_VKInstance, surface: &m_VKPresentSurface))
3952 {
3953 log_error("gfx/vulkan", "Failed to create surface. SDL error: %s", SDL_GetError());
3954 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating a Vulkan surface for the SDL window failed.");
3955 return false;
3956 }
3957
3958 VkBool32 IsSupported = false;
3959 vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice: m_VKGPU, queueFamilyIndex: m_VKGraphicsQueueIndex, surface: m_VKPresentSurface, pSupported: &IsSupported);
3960 if(!IsSupported)
3961 {
3962 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface does not support presenting the framebuffer to a screen. Maybe the wrong GPU was selected?");
3963 return false;
3964 }
3965
3966 return true;
3967 }
3968
3969 void DestroySurface()
3970 {
3971 vkDestroySurfaceKHR(instance: m_VKInstance, surface: m_VKPresentSurface, pAllocator: nullptr);
3972 }
3973
3974 [[nodiscard]] bool GetPresentationMode(VkPresentModeKHR &VKIOMode)
3975 {
3976 uint32_t PresentModeCount = 0;
3977 if(vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pPresentModeCount: &PresentModeCount, NULL) != VK_SUCCESS)
3978 {
3979 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface presentation modes could not be fetched.");
3980 return false;
3981 }
3982
3983 std::vector<VkPresentModeKHR> vPresentModeList(PresentModeCount);
3984 if(vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pPresentModeCount: &PresentModeCount, pPresentModes: vPresentModeList.data()) != VK_SUCCESS)
3985 {
3986 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface presentation modes could not be fetched.");
3987 return false;
3988 }
3989
3990 VKIOMode = g_Config.m_GfxVsync ? VK_PRESENT_MODE_FIFO_KHR : VK_PRESENT_MODE_IMMEDIATE_KHR;
3991 for(const auto &Mode : vPresentModeList)
3992 {
3993 if(Mode == VKIOMode)
3994 return true;
3995 }
3996
3997 log_warn("gfx/vulkan", "Requested presentation mode was not available. Falling back to mailbox / FIFO relaxed.");
3998 VKIOMode = g_Config.m_GfxVsync ? VK_PRESENT_MODE_FIFO_RELAXED_KHR : VK_PRESENT_MODE_MAILBOX_KHR;
3999 for(const auto &Mode : vPresentModeList)
4000 {
4001 if(Mode == VKIOMode)
4002 return true;
4003 }
4004
4005 log_warn("gfx/vulkan", "Requested presentation mode was not available. Using first available.");
4006 if(PresentModeCount > 0)
4007 VKIOMode = vPresentModeList[0];
4008
4009 return true;
4010 }
4011
4012 [[nodiscard]] bool GetSurfaceProperties(VkSurfaceCapabilitiesKHR &VKSurfCapabilities)
4013 {
4014 if(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceCapabilities: &VKSurfCapabilities) != VK_SUCCESS)
4015 {
4016 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface capabilities could not be fetched.");
4017 return false;
4018 }
4019 return true;
4020 }
4021
4022 uint32_t GetNumberOfSwapImages(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4023 {
4024 uint32_t ImgNumber = VKCapabilities.minImageCount + 1;
4025 if(IsVerbose())
4026 {
4027 log_debug("gfx/vulkan", "Minimal swap image count: %u", VKCapabilities.minImageCount);
4028 }
4029 return (VKCapabilities.maxImageCount > 0 && ImgNumber > VKCapabilities.maxImageCount) ? VKCapabilities.maxImageCount : ImgNumber;
4030 }
4031
4032 SSwapImgViewportExtent GetSwapImageSize(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4033 {
4034 VkExtent2D RetSize = {.width: m_CanvasWidth, .height: m_CanvasHeight};
4035
4036 if(VKCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max())
4037 {
4038 RetSize.width = std::clamp<uint32_t>(val: RetSize.width, lo: VKCapabilities.minImageExtent.width, hi: VKCapabilities.maxImageExtent.width);
4039 RetSize.height = std::clamp<uint32_t>(val: RetSize.height, lo: VKCapabilities.minImageExtent.height, hi: VKCapabilities.maxImageExtent.height);
4040 }
4041 else
4042 {
4043 RetSize = VKCapabilities.currentExtent;
4044 }
4045
4046 VkExtent2D AutoViewportExtent = RetSize;
4047 bool UsesForcedViewport = false;
4048 // keep this in sync with graphics_threaded AdjustViewport's check
4049 if(AutoViewportExtent.height > 4 * AutoViewportExtent.width / 5)
4050 {
4051 AutoViewportExtent.height = 4 * AutoViewportExtent.width / 5;
4052 UsesForcedViewport = true;
4053 }
4054
4055 SSwapImgViewportExtent Ext;
4056 Ext.m_SwapImageViewport = RetSize;
4057 Ext.m_ForcedViewport = AutoViewportExtent;
4058 Ext.m_HasForcedViewport = UsesForcedViewport;
4059
4060 return Ext;
4061 }
4062
4063 [[nodiscard]] bool GetImageUsage(const VkSurfaceCapabilitiesKHR &VKCapabilities, VkImageUsageFlags &VKOutUsage)
4064 {
4065 std::vector<VkImageUsageFlags> vOurImgUsages = OurImageUsages();
4066 if(vOurImgUsages.empty())
4067 {
4068 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Framebuffer image attachment types not supported.");
4069 return false;
4070 }
4071
4072 VKOutUsage = vOurImgUsages[0];
4073
4074 for(const auto &ImgUsage : vOurImgUsages)
4075 {
4076 VkImageUsageFlags ImgUsageFlags = ImgUsage & VKCapabilities.supportedUsageFlags;
4077 if(ImgUsageFlags != ImgUsage)
4078 {
4079 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Framebuffer image attachment types not supported.");
4080 return false;
4081 }
4082
4083 VKOutUsage = (VKOutUsage | ImgUsage);
4084 }
4085
4086 return true;
4087 }
4088
4089 VkSurfaceTransformFlagBitsKHR GetTransform(const VkSurfaceCapabilitiesKHR &VKCapabilities)
4090 {
4091 if(VKCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
4092 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
4093 return VKCapabilities.currentTransform;
4094 }
4095
4096 [[nodiscard]] bool GetFormat()
4097 {
4098 uint32_t SurfFormats = 0;
4099 VkResult Res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceFormatCount: &SurfFormats, pSurfaceFormats: nullptr);
4100 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
4101 {
4102 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface format fetching failed.");
4103 return false;
4104 }
4105
4106 std::vector<VkSurfaceFormatKHR> vSurfFormatList(SurfFormats);
4107 Res = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice: m_VKGPU, surface: m_VKPresentSurface, pSurfaceFormatCount: &SurfFormats, pSurfaceFormats: vSurfFormatList.data());
4108 if(Res != VK_SUCCESS && Res != VK_INCOMPLETE)
4109 {
4110 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "The device surface format fetching failed.");
4111 return false;
4112 }
4113
4114 if(Res == VK_INCOMPLETE)
4115 {
4116 log_warn("gfx/vulkan", "Not all surface formats are requestable with your current settings.");
4117 }
4118
4119 if(vSurfFormatList.size() == 1 && vSurfFormatList[0].format == VK_FORMAT_UNDEFINED)
4120 {
4121 m_VKSurfFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
4122 m_VKSurfFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
4123 log_warn("gfx/vulkan", "Surface format was undefined. This can potentially cause bugs.");
4124 return true;
4125 }
4126
4127 for(const auto &FindFormat : vSurfFormatList)
4128 {
4129 if(FindFormat.format == VK_FORMAT_B8G8R8A8_UNORM && FindFormat.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
4130 {
4131 m_VKSurfFormat = FindFormat;
4132 return true;
4133 }
4134 else if(FindFormat.format == VK_FORMAT_R8G8B8A8_UNORM && FindFormat.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
4135 {
4136 m_VKSurfFormat = FindFormat;
4137 return true;
4138 }
4139 }
4140
4141 log_warn("gfx/vulkan", "Surface format was not RGBA (or variants of it). This can potentially cause weird looking images (too bright etc.).");
4142 m_VKSurfFormat = vSurfFormatList[0];
4143 return true;
4144 }
4145
4146 [[nodiscard]] bool CreateSwapChain(VkSwapchainKHR &OldSwapChain)
4147 {
4148 VkSurfaceCapabilitiesKHR VKSurfCap;
4149 if(!GetSurfaceProperties(VKSurfCapabilities&: VKSurfCap))
4150 return false;
4151
4152 VkPresentModeKHR PresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
4153 if(!GetPresentationMode(VKIOMode&: PresentMode))
4154 return false;
4155
4156 uint32_t SwapImgCount = GetNumberOfSwapImages(VKCapabilities: VKSurfCap);
4157
4158 m_VKSwapImgAndViewportExtent = GetSwapImageSize(VKCapabilities: VKSurfCap);
4159
4160 VkImageUsageFlags UsageFlags;
4161 if(!GetImageUsage(VKCapabilities: VKSurfCap, VKOutUsage&: UsageFlags))
4162 return false;
4163
4164 VkSurfaceTransformFlagBitsKHR TransformFlagBits = GetTransform(VKCapabilities: VKSurfCap);
4165
4166 if(!GetFormat())
4167 return false;
4168
4169 OldSwapChain = m_VKSwapChain;
4170
4171 VkSwapchainCreateInfoKHR SwapInfo;
4172 SwapInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
4173 SwapInfo.pNext = nullptr;
4174 SwapInfo.flags = 0;
4175 SwapInfo.surface = m_VKPresentSurface;
4176 SwapInfo.minImageCount = SwapImgCount;
4177 SwapInfo.imageFormat = m_VKSurfFormat.format;
4178 SwapInfo.imageColorSpace = m_VKSurfFormat.colorSpace;
4179 SwapInfo.imageExtent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
4180 SwapInfo.imageArrayLayers = 1;
4181 SwapInfo.imageUsage = UsageFlags;
4182 SwapInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
4183 SwapInfo.queueFamilyIndexCount = 0;
4184 SwapInfo.pQueueFamilyIndices = nullptr;
4185 SwapInfo.preTransform = TransformFlagBits;
4186 SwapInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
4187 SwapInfo.presentMode = PresentMode;
4188 SwapInfo.clipped = true;
4189 SwapInfo.oldSwapchain = OldSwapChain;
4190
4191 m_VKSwapChain = VK_NULL_HANDLE;
4192 VkResult SwapchainCreateRes = vkCreateSwapchainKHR(device: m_VKDevice, pCreateInfo: &SwapInfo, pAllocator: nullptr, pSwapchain: &m_VKSwapChain);
4193 const char *pCritErrorMsg = CheckVulkanCriticalError(CallResult: SwapchainCreateRes);
4194 if(pCritErrorMsg != nullptr)
4195 {
4196 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the swap chain failed.", pErrStrExtra: pCritErrorMsg);
4197 return false;
4198 }
4199 else if(SwapchainCreateRes == VK_ERROR_NATIVE_WINDOW_IN_USE_KHR)
4200 return false;
4201
4202 return true;
4203 }
4204
4205 void DestroySwapChain(bool ForceDestroy)
4206 {
4207 if(ForceDestroy)
4208 {
4209 vkDestroySwapchainKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pAllocator: nullptr);
4210 m_VKSwapChain = VK_NULL_HANDLE;
4211 }
4212 }
4213
4214 [[nodiscard]] bool GetSwapChainImageHandles()
4215 {
4216 uint32_t ImgCount = 0;
4217 if(vkGetSwapchainImagesKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pSwapchainImageCount: &ImgCount, pSwapchainImages: nullptr) != VK_SUCCESS)
4218 {
4219 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get swap chain images.");
4220 return false;
4221 }
4222
4223 m_SwapChainImageCount = ImgCount;
4224
4225 m_vSwapChainImages.resize(new_size: ImgCount);
4226 if(vkGetSwapchainImagesKHR(device: m_VKDevice, swapchain: m_VKSwapChain, pSwapchainImageCount: &ImgCount, pSwapchainImages: m_vSwapChainImages.data()) != VK_SUCCESS)
4227 {
4228 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not get swap chain images.");
4229 return false;
4230 }
4231
4232 return true;
4233 }
4234
4235 void ClearSwapChainImageHandles()
4236 {
4237 m_vSwapChainImages.clear();
4238 }
4239
4240 void GetDeviceQueue()
4241 {
4242 vkGetDeviceQueue(device: m_VKDevice, queueFamilyIndex: m_VKGraphicsQueueIndex, queueIndex: 0, pQueue: &m_VKGraphicsQueue);
4243 vkGetDeviceQueue(device: m_VKDevice, queueFamilyIndex: m_VKGraphicsQueueIndex, queueIndex: 0, pQueue: &m_VKPresentQueue);
4244 }
4245
4246#ifdef VK_EXT_debug_utils
4247 static VKAPI_ATTR VkBool32 VKAPI_CALL VKDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT MessageSeverity, VkDebugUtilsMessageTypeFlagsEXT MessageType, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData)
4248 {
4249 if((MessageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0)
4250 {
4251 log_error("gfx/vulkan", "Validation error: %s", pCallbackData->pMessage);
4252 }
4253 else
4254 {
4255 log_info("gfx/vulkan", "Validation info: %s", pCallbackData->pMessage);
4256 }
4257
4258 return VK_FALSE;
4259 }
4260
4261 VkResult CreateDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pDebugMessenger)
4262 {
4263 auto pfnVulkanCreateDebugUtilsFunction = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance: m_VKInstance, pName: "vkCreateDebugUtilsMessengerEXT");
4264 if(pfnVulkanCreateDebugUtilsFunction != nullptr)
4265 {
4266 return pfnVulkanCreateDebugUtilsFunction(m_VKInstance, pCreateInfo, pAllocator, pDebugMessenger);
4267 }
4268 else
4269 {
4270 return VK_ERROR_EXTENSION_NOT_PRESENT;
4271 }
4272 }
4273
4274 void DestroyDebugUtilsMessengerEXT(VkDebugUtilsMessengerEXT &DebugMessenger)
4275 {
4276 auto pfnVulkanDestroyDebugUtilsFunction = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance: m_VKInstance, pName: "vkDestroyDebugUtilsMessengerEXT");
4277 if(pfnVulkanDestroyDebugUtilsFunction != nullptr)
4278 {
4279 pfnVulkanDestroyDebugUtilsFunction(m_VKInstance, DebugMessenger, nullptr);
4280 }
4281 }
4282#endif
4283
4284 void SetupDebugCallback()
4285 {
4286#ifdef VK_EXT_debug_utils
4287 VkDebugUtilsMessengerCreateInfoEXT CreateInfo = {};
4288 CreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
4289 CreateInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
4290 CreateInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; // | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT <- too annoying
4291 CreateInfo.pfnUserCallback = VKDebugCallback;
4292
4293 if(CreateDebugUtilsMessengerEXT(pCreateInfo: &CreateInfo, pAllocator: nullptr, pDebugMessenger: &m_DebugMessenger) != VK_SUCCESS)
4294 {
4295 m_DebugMessenger = VK_NULL_HANDLE;
4296 log_warn("gfx/vulkan", "Could not find Vulkan debug layer.");
4297 }
4298 else
4299 {
4300 log_info("gfx/vulkan", "Enabled Vulkan debug context.");
4301 }
4302#endif
4303 }
4304
4305 void UnregisterDebugCallback()
4306 {
4307#ifdef VK_EXT_debug_utils
4308 if(m_DebugMessenger != VK_NULL_HANDLE)
4309 DestroyDebugUtilsMessengerEXT(DebugMessenger&: m_DebugMessenger);
4310#endif
4311 }
4312
4313 [[nodiscard]] bool CreateImageViews()
4314 {
4315 m_vSwapChainImageViewList.resize(new_size: m_SwapChainImageCount);
4316
4317 for(size_t i = 0; i < m_SwapChainImageCount; i++)
4318 {
4319 VkImageViewCreateInfo CreateInfo{};
4320 CreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
4321 CreateInfo.image = m_vSwapChainImages[i];
4322 CreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
4323 CreateInfo.format = m_VKSurfFormat.format;
4324 CreateInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
4325 CreateInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
4326 CreateInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
4327 CreateInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
4328 CreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4329 CreateInfo.subresourceRange.baseMipLevel = 0;
4330 CreateInfo.subresourceRange.levelCount = 1;
4331 CreateInfo.subresourceRange.baseArrayLayer = 0;
4332 CreateInfo.subresourceRange.layerCount = 1;
4333
4334 if(vkCreateImageView(device: m_VKDevice, pCreateInfo: &CreateInfo, pAllocator: nullptr, pView: &m_vSwapChainImageViewList[i]) != VK_SUCCESS)
4335 {
4336 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Could not create image views for the swap chain framebuffers.");
4337 return false;
4338 }
4339 }
4340
4341 return true;
4342 }
4343
4344 void DestroyImageViews()
4345 {
4346 for(auto &ImageView : m_vSwapChainImageViewList)
4347 {
4348 vkDestroyImageView(device: m_VKDevice, imageView: ImageView, pAllocator: nullptr);
4349 }
4350
4351 m_vSwapChainImageViewList.clear();
4352 }
4353
4354 [[nodiscard]] bool CreateMultiSamplerImageAttachments()
4355 {
4356 m_vSwapChainMultiSamplingImages.resize(new_size: m_SwapChainImageCount);
4357 if(HasMultiSampling())
4358 {
4359 for(size_t i = 0; i < m_SwapChainImageCount; ++i)
4360 {
4361 if(!CreateImage(Width: m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width, Height: m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height, Depth: 1, MipMapLevelCount: 1, Format: m_VKSurfFormat.format, Tiling: VK_IMAGE_TILING_OPTIMAL, Image&: m_vSwapChainMultiSamplingImages[i].m_Image, ImageMemory&: m_vSwapChainMultiSamplingImages[i].m_ImgMem, ImageUsage: VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
4362 return false;
4363 m_vSwapChainMultiSamplingImages[i].m_ImgView = CreateImageView(Image: m_vSwapChainMultiSamplingImages[i].m_Image, Format: m_VKSurfFormat.format, ViewType: VK_IMAGE_VIEW_TYPE_2D, Depth: 1, MipMapLevelCount: 1);
4364 }
4365 }
4366
4367 return true;
4368 }
4369
4370 void DestroyMultiSamplerImageAttachments()
4371 {
4372 if(HasMultiSampling())
4373 {
4374 m_vSwapChainMultiSamplingImages.resize(new_size: m_SwapChainImageCount);
4375 for(size_t i = 0; i < m_SwapChainImageCount; ++i)
4376 {
4377 vkDestroyImage(device: m_VKDevice, image: m_vSwapChainMultiSamplingImages[i].m_Image, pAllocator: nullptr);
4378 vkDestroyImageView(device: m_VKDevice, imageView: m_vSwapChainMultiSamplingImages[i].m_ImgView, pAllocator: nullptr);
4379 FreeImageMemBlock(Block&: m_vSwapChainMultiSamplingImages[i].m_ImgMem);
4380 }
4381 }
4382 m_vSwapChainMultiSamplingImages.clear();
4383 }
4384
4385 [[nodiscard]] bool CreateRenderPass(bool ClearAttachments)
4386 {
4387 bool HasMultiSamplingTargets = HasMultiSampling();
4388 VkAttachmentDescription MultiSamplingColorAttachment{};
4389 MultiSamplingColorAttachment.format = m_VKSurfFormat.format;
4390 MultiSamplingColorAttachment.samples = GetSampleCount();
4391 MultiSamplingColorAttachment.loadOp = ClearAttachments ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4392 MultiSamplingColorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4393 MultiSamplingColorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4394 MultiSamplingColorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4395 MultiSamplingColorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4396 MultiSamplingColorAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4397
4398 VkAttachmentDescription ColorAttachment{};
4399 ColorAttachment.format = m_VKSurfFormat.format;
4400 ColorAttachment.samples = VK_SAMPLE_COUNT_1_BIT;
4401 ColorAttachment.loadOp = ClearAttachments && !HasMultiSamplingTargets ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4402 ColorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
4403 ColorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
4404 ColorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
4405 ColorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4406 ColorAttachment.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
4407
4408 VkAttachmentReference MultiSamplingColorAttachmentRef{};
4409 MultiSamplingColorAttachmentRef.attachment = 0;
4410 MultiSamplingColorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4411
4412 VkAttachmentReference ColorAttachmentRef{};
4413 ColorAttachmentRef.attachment = HasMultiSamplingTargets ? 1 : 0;
4414 ColorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4415
4416 VkSubpassDescription Subpass{};
4417 Subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
4418 Subpass.colorAttachmentCount = 1;
4419 Subpass.pColorAttachments = HasMultiSamplingTargets ? &MultiSamplingColorAttachmentRef : &ColorAttachmentRef;
4420 Subpass.pResolveAttachments = HasMultiSamplingTargets ? &ColorAttachmentRef : nullptr;
4421
4422 std::array<VkAttachmentDescription, 2> aAttachments;
4423 aAttachments[0] = MultiSamplingColorAttachment;
4424 aAttachments[1] = ColorAttachment;
4425
4426 VkSubpassDependency Dependency{};
4427 Dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
4428 Dependency.dstSubpass = 0;
4429 Dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
4430 Dependency.srcAccessMask = 0;
4431 Dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
4432 Dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
4433
4434 VkRenderPassCreateInfo CreateRenderPassInfo{};
4435 CreateRenderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
4436 CreateRenderPassInfo.attachmentCount = HasMultiSamplingTargets ? 2 : 1;
4437 CreateRenderPassInfo.pAttachments = HasMultiSamplingTargets ? aAttachments.data() : aAttachments.data() + 1;
4438 CreateRenderPassInfo.subpassCount = 1;
4439 CreateRenderPassInfo.pSubpasses = &Subpass;
4440 CreateRenderPassInfo.dependencyCount = 1;
4441 CreateRenderPassInfo.pDependencies = &Dependency;
4442
4443 if(vkCreateRenderPass(device: m_VKDevice, pCreateInfo: &CreateRenderPassInfo, pAllocator: nullptr, pRenderPass: &m_VKRenderPass) != VK_SUCCESS)
4444 {
4445 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the render pass failed.");
4446 return false;
4447 }
4448
4449 return true;
4450 }
4451
4452 void DestroyRenderPass()
4453 {
4454 vkDestroyRenderPass(device: m_VKDevice, renderPass: m_VKRenderPass, pAllocator: nullptr);
4455 }
4456
4457 [[nodiscard]] bool CreateFramebuffers()
4458 {
4459 m_vFramebufferList.resize(new_size: m_SwapChainImageCount);
4460
4461 for(size_t i = 0; i < m_SwapChainImageCount; i++)
4462 {
4463 std::array<VkImageView, 2> aAttachments = {
4464 m_vSwapChainMultiSamplingImages[i].m_ImgView,
4465 m_vSwapChainImageViewList[i]};
4466
4467 bool HasMultiSamplingTargets = HasMultiSampling();
4468
4469 VkFramebufferCreateInfo FramebufferInfo{};
4470 FramebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
4471 FramebufferInfo.renderPass = m_VKRenderPass;
4472 FramebufferInfo.attachmentCount = HasMultiSamplingTargets ? aAttachments.size() : aAttachments.size() - 1;
4473 FramebufferInfo.pAttachments = HasMultiSamplingTargets ? aAttachments.data() : aAttachments.data() + 1;
4474 FramebufferInfo.width = m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
4475 FramebufferInfo.height = m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
4476 FramebufferInfo.layers = 1;
4477
4478 if(vkCreateFramebuffer(device: m_VKDevice, pCreateInfo: &FramebufferInfo, pAllocator: nullptr, pFramebuffer: &m_vFramebufferList[i]) != VK_SUCCESS)
4479 {
4480 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the framebuffers failed.");
4481 return false;
4482 }
4483 }
4484
4485 return true;
4486 }
4487
4488 void DestroyFramebuffers()
4489 {
4490 for(auto &FrameBuffer : m_vFramebufferList)
4491 {
4492 vkDestroyFramebuffer(device: m_VKDevice, framebuffer: FrameBuffer, pAllocator: nullptr);
4493 }
4494
4495 m_vFramebufferList.clear();
4496 }
4497
4498 [[nodiscard]] bool CreateShaderModule(const std::vector<uint8_t> &vCode, VkShaderModule &ShaderModule)
4499 {
4500 VkShaderModuleCreateInfo CreateInfo{};
4501 CreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
4502 CreateInfo.codeSize = vCode.size();
4503 CreateInfo.pCode = (const uint32_t *)(vCode.data());
4504
4505 if(vkCreateShaderModule(device: m_VKDevice, pCreateInfo: &CreateInfo, pAllocator: nullptr, pShaderModule: &ShaderModule) != VK_SUCCESS)
4506 {
4507 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Shader module was not created.");
4508 return false;
4509 }
4510
4511 return true;
4512 }
4513
4514 [[nodiscard]] bool CreateDescriptorSetLayouts()
4515 {
4516 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4517 SamplerLayoutBinding.binding = 0;
4518 SamplerLayoutBinding.descriptorCount = 1;
4519 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
4520 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4521 SamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
4522
4523 std::array<VkDescriptorSetLayoutBinding, 1> aBindings = {SamplerLayoutBinding};
4524 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4525 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4526 LayoutInfo.bindingCount = aBindings.size();
4527 LayoutInfo.pBindings = aBindings.data();
4528
4529 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_StandardTexturedDescriptorSetLayout) != VK_SUCCESS)
4530 {
4531 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4532 return false;
4533 }
4534
4535 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_Standard3DTexturedDescriptorSetLayout) != VK_SUCCESS)
4536 {
4537 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4538 return false;
4539 }
4540 return true;
4541 }
4542
4543 void DestroyDescriptorSetLayouts()
4544 {
4545 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_StandardTexturedDescriptorSetLayout, pAllocator: nullptr);
4546 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_Standard3DTexturedDescriptorSetLayout, pAllocator: nullptr);
4547 }
4548
4549 [[nodiscard]] bool LoadShader(const char *pFilename, std::vector<uint8_t> *&pvShaderData)
4550 {
4551 auto ShaderFileIterator = m_ShaderFiles.find(x: pFilename);
4552 if(ShaderFileIterator == m_ShaderFiles.end())
4553 {
4554 void *pShaderBuff;
4555 unsigned FileSize;
4556 if(!m_pStorage->ReadFile(pFilename, Type: IStorage::TYPE_ALL, ppResult: &pShaderBuff, pResultLen: &FileSize))
4557 return false;
4558
4559 std::vector<uint8_t> vShaderBuff;
4560 vShaderBuff.resize(new_size: FileSize);
4561 mem_copy(dest: vShaderBuff.data(), source: pShaderBuff, size: FileSize);
4562 free(ptr: pShaderBuff);
4563
4564 ShaderFileIterator = m_ShaderFiles.insert(x: {pFilename, {.m_vBinary: std::move(vShaderBuff)}}).first;
4565 }
4566
4567 pvShaderData = &ShaderFileIterator->second.m_vBinary;
4568
4569 return true;
4570 }
4571
4572 [[nodiscard]] bool CreateShaders(const char *pVertName, const char *pFragName, VkPipelineShaderStageCreateInfo (&aShaderStages)[2], SShaderModule &ShaderModule)
4573 {
4574 bool ShaderLoaded = true;
4575
4576 std::vector<uint8_t> *pvVertBuff;
4577 std::vector<uint8_t> *pvFragBuff;
4578 ShaderLoaded &= LoadShader(pFilename: pVertName, pvShaderData&: pvVertBuff);
4579 ShaderLoaded &= LoadShader(pFilename: pFragName, pvShaderData&: pvFragBuff);
4580
4581 ShaderModule.m_VKDevice = m_VKDevice;
4582
4583 if(!ShaderLoaded)
4584 {
4585 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "A shader file could not load correctly.");
4586 return false;
4587 }
4588
4589 if(!CreateShaderModule(vCode: *pvVertBuff, ShaderModule&: ShaderModule.m_VertShaderModule))
4590 return false;
4591
4592 if(!CreateShaderModule(vCode: *pvFragBuff, ShaderModule&: ShaderModule.m_FragShaderModule))
4593 return false;
4594
4595 VkPipelineShaderStageCreateInfo &VertShaderStageInfo = aShaderStages[0];
4596 VertShaderStageInfo = {};
4597 VertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
4598 VertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
4599 VertShaderStageInfo.module = ShaderModule.m_VertShaderModule;
4600 VertShaderStageInfo.pName = "main";
4601
4602 VkPipelineShaderStageCreateInfo &FragShaderStageInfo = aShaderStages[1];
4603 FragShaderStageInfo = {};
4604 FragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
4605 FragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
4606 FragShaderStageInfo.module = ShaderModule.m_FragShaderModule;
4607 FragShaderStageInfo.pName = "main";
4608 return true;
4609 }
4610
4611 bool GetStandardPipelineInfo(VkPipelineInputAssemblyStateCreateInfo &InputAssembly,
4612 VkViewport &Viewport,
4613 VkRect2D &Scissor,
4614 VkPipelineViewportStateCreateInfo &ViewportState,
4615 VkPipelineRasterizationStateCreateInfo &Rasterizer,
4616 VkPipelineMultisampleStateCreateInfo &Multisampling,
4617 VkPipelineColorBlendAttachmentState &ColorBlendAttachment,
4618 VkPipelineColorBlendStateCreateInfo &ColorBlending) const
4619 {
4620 InputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
4621 InputAssembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
4622 InputAssembly.primitiveRestartEnable = VK_FALSE;
4623
4624 Viewport.x = 0.0f;
4625 Viewport.y = 0.0f;
4626 Viewport.width = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.width;
4627 Viewport.height = (float)m_VKSwapImgAndViewportExtent.m_SwapImageViewport.height;
4628 Viewport.minDepth = 0.0f;
4629 Viewport.maxDepth = 1.0f;
4630
4631 Scissor.offset = {.x: 0, .y: 0};
4632 Scissor.extent = m_VKSwapImgAndViewportExtent.m_SwapImageViewport;
4633
4634 ViewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
4635 ViewportState.viewportCount = 1;
4636 ViewportState.pViewports = &Viewport;
4637 ViewportState.scissorCount = 1;
4638 ViewportState.pScissors = &Scissor;
4639
4640 Rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
4641 Rasterizer.depthClampEnable = VK_FALSE;
4642 Rasterizer.rasterizerDiscardEnable = VK_FALSE;
4643 Rasterizer.polygonMode = VK_POLYGON_MODE_FILL;
4644 Rasterizer.lineWidth = 1.0f;
4645 Rasterizer.cullMode = VK_CULL_MODE_NONE;
4646 Rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE;
4647 Rasterizer.depthBiasEnable = VK_FALSE;
4648
4649 Multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
4650 Multisampling.sampleShadingEnable = VK_FALSE;
4651 Multisampling.rasterizationSamples = GetSampleCount();
4652
4653 ColorBlendAttachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
4654 ColorBlendAttachment.blendEnable = VK_TRUE;
4655
4656 ColorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
4657 ColorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
4658 ColorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD;
4659 ColorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
4660 ColorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
4661 ColorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
4662
4663 ColorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
4664 ColorBlending.logicOpEnable = VK_FALSE;
4665 ColorBlending.logicOp = VK_LOGIC_OP_COPY;
4666 ColorBlending.attachmentCount = 1;
4667 ColorBlending.pAttachments = &ColorBlendAttachment;
4668 ColorBlending.blendConstants[0] = 0.0f;
4669 ColorBlending.blendConstants[1] = 0.0f;
4670 ColorBlending.blendConstants[2] = 0.0f;
4671 ColorBlending.blendConstants[3] = 0.0f;
4672
4673 return true;
4674 }
4675
4676 template<bool ForceRequireDescriptors, size_t ArraySize, size_t DescrArraySize, size_t PushArraySize>
4677 [[nodiscard]] bool CreateGraphicsPipeline(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, uint32_t Stride, std::array<VkVertexInputAttributeDescription, ArraySize> &aInputAttr,
4678 std::array<VkDescriptorSetLayout, DescrArraySize> &aSetLayouts, std::array<VkPushConstantRange, PushArraySize> &aPushConstants, EVulkanBackendTextureModes TexMode,
4679 EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode, bool IsLinePrim = false)
4680 {
4681 VkPipelineShaderStageCreateInfo aShaderStages[2];
4682 SShaderModule Module;
4683 if(!CreateShaders(pVertName, pFragName, aShaderStages, ShaderModule&: Module))
4684 return false;
4685
4686 bool HasSampler = TexMode == VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
4687
4688 VkPipelineVertexInputStateCreateInfo VertexInputInfo{};
4689 VertexInputInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
4690 VkVertexInputBindingDescription BindingDescription{};
4691 BindingDescription.binding = 0;
4692 BindingDescription.stride = Stride;
4693 BindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
4694
4695 VertexInputInfo.vertexBindingDescriptionCount = 1;
4696 VertexInputInfo.vertexAttributeDescriptionCount = aInputAttr.size();
4697 VertexInputInfo.pVertexBindingDescriptions = &BindingDescription;
4698 VertexInputInfo.pVertexAttributeDescriptions = aInputAttr.data();
4699
4700 VkPipelineInputAssemblyStateCreateInfo InputAssembly{};
4701 VkViewport Viewport{};
4702 VkRect2D Scissor{};
4703 VkPipelineViewportStateCreateInfo ViewportState{};
4704 VkPipelineRasterizationStateCreateInfo Rasterizer{};
4705 VkPipelineMultisampleStateCreateInfo Multisampling{};
4706 VkPipelineColorBlendAttachmentState ColorBlendAttachment{};
4707 VkPipelineColorBlendStateCreateInfo ColorBlending{};
4708
4709 GetStandardPipelineInfo(InputAssembly, Viewport, Scissor, ViewportState, Rasterizer, Multisampling, ColorBlendAttachment, ColorBlending);
4710 InputAssembly.topology = IsLinePrim ? VK_PRIMITIVE_TOPOLOGY_LINE_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
4711
4712 VkPipelineLayoutCreateInfo PipelineLayoutInfo{};
4713 PipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
4714 PipelineLayoutInfo.setLayoutCount = (HasSampler || ForceRequireDescriptors) ? aSetLayouts.size() : 0;
4715 PipelineLayoutInfo.pSetLayouts = (HasSampler || ForceRequireDescriptors) && !aSetLayouts.empty() ? aSetLayouts.data() : nullptr;
4716
4717 PipelineLayoutInfo.pushConstantRangeCount = aPushConstants.size();
4718 PipelineLayoutInfo.pPushConstantRanges = !aPushConstants.empty() ? aPushConstants.data() : nullptr;
4719
4720 VkPipelineLayout &PipeLayout = GetPipeLayout(Container&: PipeContainer, IsTextured: HasSampler, BlendModeIndex: size_t(BlendMode), DynamicIndex: size_t(DynamicMode));
4721 VkPipeline &Pipeline = GetPipeline(Container&: PipeContainer, IsTextured: HasSampler, BlendModeIndex: size_t(BlendMode), DynamicIndex: size_t(DynamicMode));
4722
4723 if(vkCreatePipelineLayout(device: m_VKDevice, pCreateInfo: &PipelineLayoutInfo, pAllocator: nullptr, pPipelineLayout: &PipeLayout) != VK_SUCCESS)
4724 {
4725 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating pipeline layout failed.");
4726 return false;
4727 }
4728
4729 VkGraphicsPipelineCreateInfo PipelineInfo{};
4730 PipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
4731 PipelineInfo.stageCount = 2;
4732 PipelineInfo.pStages = aShaderStages;
4733 PipelineInfo.pVertexInputState = &VertexInputInfo;
4734 PipelineInfo.pInputAssemblyState = &InputAssembly;
4735 PipelineInfo.pViewportState = &ViewportState;
4736 PipelineInfo.pRasterizationState = &Rasterizer;
4737 PipelineInfo.pMultisampleState = &Multisampling;
4738 PipelineInfo.pColorBlendState = &ColorBlending;
4739 PipelineInfo.layout = PipeLayout;
4740 PipelineInfo.renderPass = m_VKRenderPass;
4741 PipelineInfo.subpass = 0;
4742 PipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
4743
4744 std::array<VkDynamicState, 2> aDynamicStates = {
4745 VK_DYNAMIC_STATE_VIEWPORT,
4746 VK_DYNAMIC_STATE_SCISSOR,
4747 };
4748
4749 VkPipelineDynamicStateCreateInfo DynamicStateCreate{};
4750 DynamicStateCreate.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
4751 DynamicStateCreate.dynamicStateCount = aDynamicStates.size();
4752 DynamicStateCreate.pDynamicStates = aDynamicStates.data();
4753
4754 if(DynamicMode == VULKAN_BACKEND_CLIP_MODE_DYNAMIC_SCISSOR_AND_VIEWPORT)
4755 {
4756 PipelineInfo.pDynamicState = &DynamicStateCreate;
4757 }
4758
4759 if(vkCreateGraphicsPipelines(device: m_VKDevice, VK_NULL_HANDLE, createInfoCount: 1, pCreateInfos: &PipelineInfo, pAllocator: nullptr, pPipelines: &Pipeline) != VK_SUCCESS)
4760 {
4761 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the graphic pipeline failed.");
4762 return false;
4763 }
4764
4765 return true;
4766 }
4767
4768 [[nodiscard]] bool CreateStandardGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode, bool IsLinePrim)
4769 {
4770 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4771
4772 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4773 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4774 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4775
4776 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_StandardTexturedDescriptorSetLayout};
4777
4778 std::array<VkPushConstantRange, 1> aPushConstants{};
4779 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGPos)};
4780
4781 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode, IsLinePrim);
4782 }
4783
4784 [[nodiscard]] bool CreateStandardGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler, bool IsLinePipe)
4785 {
4786 bool Ret = true;
4787
4788 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4789
4790 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4791 {
4792 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4793 {
4794 Ret &= CreateStandardGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: IsLinePipe ? m_StandardLinePipeline : m_StandardPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j), IsLinePrim: IsLinePipe);
4795 }
4796 }
4797
4798 return Ret;
4799 }
4800
4801 [[nodiscard]] bool CreateStandard3DGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4802 {
4803 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4804
4805 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4806 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * 2};
4807 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R32G32B32_SFLOAT, .offset: sizeof(float) * 2 + sizeof(uint8_t) * 4};
4808
4809 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_Standard3DTexturedDescriptorSetLayout};
4810
4811 std::array<VkPushConstantRange, 1> aPushConstants{};
4812 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGPos)};
4813
4814 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * 2 + sizeof(uint8_t) * 4 + sizeof(float) * 3, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4815 }
4816
4817 [[nodiscard]] bool CreateStandard3DGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler)
4818 {
4819 bool Ret = true;
4820
4821 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4822
4823 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4824 {
4825 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4826 {
4827 Ret &= CreateStandard3DGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_Standard3DPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4828 }
4829 }
4830
4831 return Ret;
4832 }
4833
4834 [[nodiscard]] bool CreateTextDescriptorSetLayout()
4835 {
4836 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4837 SamplerLayoutBinding.binding = 0;
4838 SamplerLayoutBinding.descriptorCount = 1;
4839 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
4840 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4841 SamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
4842
4843 auto SamplerLayoutBinding2 = SamplerLayoutBinding;
4844 SamplerLayoutBinding2.binding = 1;
4845
4846 std::array<VkDescriptorSetLayoutBinding, 2> aBindings = {SamplerLayoutBinding, SamplerLayoutBinding2};
4847 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4848 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4849 LayoutInfo.bindingCount = aBindings.size();
4850 LayoutInfo.pBindings = aBindings.data();
4851
4852 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &m_TextDescriptorSetLayout) != VK_SUCCESS)
4853 {
4854 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4855 return false;
4856 }
4857
4858 return true;
4859 }
4860
4861 void DestroyTextDescriptorSetLayout()
4862 {
4863 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_TextDescriptorSetLayout, pAllocator: nullptr);
4864 }
4865
4866 [[nodiscard]] bool CreateTextGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4867 {
4868 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4869 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4870 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4871 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4872
4873 std::array<VkDescriptorSetLayout, 1> aSetLayouts = {m_TextDescriptorSetLayout};
4874
4875 std::array<VkPushConstantRange, 2> aPushConstants{};
4876 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: sizeof(SUniformGTextPos)};
4877 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformGTextPos) + sizeof(SUniformTextGFragmentOffset), .size: sizeof(SUniformTextGFragmentConstants)};
4878
4879 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4880 }
4881
4882 [[nodiscard]] bool CreateTextGraphicsPipeline(const char *pVertName, const char *pFragName)
4883 {
4884 bool Ret = true;
4885
4886 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
4887
4888 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4889 {
4890 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4891 {
4892 Ret &= CreateTextGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_TextPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4893 }
4894 }
4895
4896 return Ret;
4897 }
4898
4899 template<bool HasSampler>
4900 [[nodiscard]] bool CreateTileGraphicsPipelineImpl(const char *pVertName, const char *pFragName, bool IsBorder, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4901 {
4902 std::array<VkVertexInputAttributeDescription, HasSampler ? 2 : 1> aAttributeDescriptions = {};
4903 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32_SFLOAT, 0};
4904 if(HasSampler)
4905 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UINT, sizeof(float) * 2};
4906
4907 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
4908 aSetLayouts[0] = m_Standard3DTexturedDescriptorSetLayout;
4909
4910 uint32_t VertPushConstantSize = sizeof(SUniformTileGPos);
4911 if(IsBorder)
4912 VertPushConstantSize = sizeof(SUniformTileGPosBorder);
4913
4914 uint32_t FragPushConstantSize = sizeof(SUniformTileGVertColor);
4915
4916 std::array<VkPushConstantRange, 2> aPushConstants{};
4917 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
4918 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformTileGPosBorder) + sizeof(SUniformTileGVertColorAlign), .size: FragPushConstantSize};
4919
4920 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, HasSampler ? (sizeof(float) * 2 + sizeof(uint8_t) * 4) : (sizeof(float) * 2), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4921 }
4922
4923 template<bool HasSampler>
4924 [[nodiscard]] bool CreateTileGraphicsPipeline(const char *pVertName, const char *pFragName, bool IsBorder)
4925 {
4926 bool Ret = true;
4927
4928 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4929
4930 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4931 {
4932 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4933 {
4934 Ret &= CreateTileGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, IsBorder, !IsBorder ? m_TilePipeline : m_TileBorderPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
4935 }
4936 }
4937
4938 return Ret;
4939 }
4940
4941 [[nodiscard]] bool CreatePrimExGraphicsPipelineImpl(const char *pVertName, const char *pFragName, bool Rotationless, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
4942 {
4943 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
4944 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
4945 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
4946 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
4947
4948 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
4949 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
4950 uint32_t VertPushConstantSize = sizeof(SUniformPrimExGPos);
4951 if(Rotationless)
4952 VertPushConstantSize = sizeof(SUniformPrimExGPosRotationless);
4953
4954 uint32_t FragPushConstantSize = sizeof(SUniformPrimExGVertColor);
4955
4956 std::array<VkPushConstantRange, 2> aPushConstants{};
4957 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
4958 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformPrimExGPos) + sizeof(SUniformPrimExGVertColorAlign), .size: FragPushConstantSize};
4959
4960 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
4961 }
4962
4963 [[nodiscard]] bool CreatePrimExGraphicsPipeline(const char *pVertName, const char *pFragName, bool HasSampler, bool Rotationless)
4964 {
4965 bool Ret = true;
4966
4967 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
4968
4969 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
4970 {
4971 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
4972 {
4973 Ret &= CreatePrimExGraphicsPipelineImpl(pVertName, pFragName, Rotationless, PipeContainer&: Rotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
4974 }
4975 }
4976
4977 return Ret;
4978 }
4979
4980 [[nodiscard]] bool CreateUniformDescriptorSetLayout(VkDescriptorSetLayout &SetLayout, VkShaderStageFlags StageFlags)
4981 {
4982 VkDescriptorSetLayoutBinding SamplerLayoutBinding{};
4983 SamplerLayoutBinding.binding = 1;
4984 SamplerLayoutBinding.descriptorCount = 1;
4985 SamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
4986 SamplerLayoutBinding.pImmutableSamplers = nullptr;
4987 SamplerLayoutBinding.stageFlags = StageFlags;
4988
4989 std::array<VkDescriptorSetLayoutBinding, 1> aBindings = {SamplerLayoutBinding};
4990 VkDescriptorSetLayoutCreateInfo LayoutInfo{};
4991 LayoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
4992 LayoutInfo.bindingCount = aBindings.size();
4993 LayoutInfo.pBindings = aBindings.data();
4994
4995 if(vkCreateDescriptorSetLayout(device: m_VKDevice, pCreateInfo: &LayoutInfo, pAllocator: nullptr, pSetLayout: &SetLayout) != VK_SUCCESS)
4996 {
4997 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating descriptor layout failed.");
4998 return false;
4999 }
5000 return true;
5001 }
5002
5003 [[nodiscard]] bool CreateSpriteMultiUniformDescriptorSetLayout()
5004 {
5005 return CreateUniformDescriptorSetLayout(SetLayout&: m_SpriteMultiUniformDescriptorSetLayout, StageFlags: VK_SHADER_STAGE_VERTEX_BIT);
5006 }
5007
5008 [[nodiscard]] bool CreateQuadUniformDescriptorSetLayout()
5009 {
5010 return CreateUniformDescriptorSetLayout(SetLayout&: m_QuadUniformDescriptorSetLayout, StageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
5011 }
5012
5013 void DestroyUniformDescriptorSetLayouts()
5014 {
5015 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_QuadUniformDescriptorSetLayout, pAllocator: nullptr);
5016 vkDestroyDescriptorSetLayout(device: m_VKDevice, descriptorSetLayout: m_SpriteMultiUniformDescriptorSetLayout, pAllocator: nullptr);
5017 }
5018
5019 [[nodiscard]] bool CreateUniformDescriptorSets(size_t RenderThreadIndex, VkDescriptorSetLayout &SetLayout, SDeviceDescriptorSet *pSets, size_t SetCount, VkBuffer BindBuffer, size_t SingleBufferInstanceSize, VkDeviceSize MemoryOffset)
5020 {
5021 VkDescriptorPool RetDescr;
5022 if(!GetDescriptorPoolForAlloc(RetDescr, DescriptorPools&: m_vUniformBufferDescrPools[RenderThreadIndex], pSets, AllocNum: SetCount))
5023 return false;
5024 VkDescriptorSetAllocateInfo DesAllocInfo{};
5025 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5026 DesAllocInfo.descriptorSetCount = 1;
5027 DesAllocInfo.pSetLayouts = &SetLayout;
5028 for(size_t i = 0; i < SetCount; ++i)
5029 {
5030 DesAllocInfo.descriptorPool = pSets[i].m_pPools->m_vPools[pSets[i].m_PoolIndex].m_Pool;
5031 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &pSets[i].m_Descriptor) != VK_SUCCESS)
5032 {
5033 return false;
5034 }
5035
5036 VkDescriptorBufferInfo BufferInfo{};
5037 BufferInfo.buffer = BindBuffer;
5038 BufferInfo.offset = MemoryOffset + SingleBufferInstanceSize * i;
5039 BufferInfo.range = SingleBufferInstanceSize;
5040
5041 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5042
5043 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5044 aDescriptorWrites[0].dstSet = pSets[i].m_Descriptor;
5045 aDescriptorWrites[0].dstBinding = 1;
5046 aDescriptorWrites[0].dstArrayElement = 0;
5047 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
5048 aDescriptorWrites[0].descriptorCount = 1;
5049 aDescriptorWrites[0].pBufferInfo = &BufferInfo;
5050
5051 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5052 }
5053
5054 return true;
5055 }
5056
5057 void DestroyUniformDescriptorSets(SDeviceDescriptorSet *pSets, size_t SetCount)
5058 {
5059 for(size_t i = 0; i < SetCount; ++i)
5060 {
5061 vkFreeDescriptorSets(device: m_VKDevice, descriptorPool: pSets[i].m_pPools->m_vPools[pSets[i].m_PoolIndex].m_Pool, descriptorSetCount: 1, pDescriptorSets: &pSets[i].m_Descriptor);
5062 pSets[i].m_Descriptor = VK_NULL_HANDLE;
5063 }
5064 }
5065
5066 [[nodiscard]] bool CreateSpriteMultiGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5067 {
5068 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
5069 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
5070 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
5071 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
5072
5073 std::array<VkDescriptorSetLayout, 2> aSetLayouts;
5074 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5075 aSetLayouts[1] = m_SpriteMultiUniformDescriptorSetLayout;
5076
5077 uint32_t VertPushConstantSize = sizeof(SUniformSpriteMultiGPos);
5078 uint32_t FragPushConstantSize = sizeof(SUniformSpriteMultiGVertColor);
5079
5080 std::array<VkPushConstantRange, 2> aPushConstants{};
5081 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
5082 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformSpriteMultiGPos) + sizeof(SUniformSpriteMultiGVertColorAlign), .size: FragPushConstantSize};
5083
5084 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5085 }
5086
5087 [[nodiscard]] bool CreateSpriteMultiGraphicsPipeline(const char *pVertName, const char *pFragName)
5088 {
5089 bool Ret = true;
5090
5091 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
5092
5093 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5094 {
5095 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5096 {
5097 Ret &= CreateSpriteMultiGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_SpriteMultiPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
5098 }
5099 }
5100
5101 return Ret;
5102 }
5103
5104 [[nodiscard]] bool CreateSpriteMultiPushGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5105 {
5106 std::array<VkVertexInputAttributeDescription, 3> aAttributeDescriptions = {};
5107 aAttributeDescriptions[0] = {.location: 0, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: 0};
5108 aAttributeDescriptions[1] = {.location: 1, .binding: 0, .format: VK_FORMAT_R32G32_SFLOAT, .offset: sizeof(float) * 2};
5109 aAttributeDescriptions[2] = {.location: 2, .binding: 0, .format: VK_FORMAT_R8G8B8A8_UNORM, .offset: sizeof(float) * (2 + 2)};
5110
5111 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
5112 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5113
5114 uint32_t VertPushConstantSize = sizeof(SUniformSpriteMultiPushGPos);
5115 uint32_t FragPushConstantSize = sizeof(SUniformSpriteMultiPushGVertColor);
5116
5117 std::array<VkPushConstantRange, 2> aPushConstants{};
5118 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: VertPushConstantSize};
5119 aPushConstants[1] = {.stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, .offset: sizeof(SUniformSpriteMultiPushGPos), .size: FragPushConstantSize};
5120
5121 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, Stride: sizeof(float) * (2 + 2) + sizeof(uint8_t) * 4, aInputAttr&: aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5122 }
5123
5124 [[nodiscard]] bool CreateSpriteMultiPushGraphicsPipeline(const char *pVertName, const char *pFragName)
5125 {
5126 bool Ret = true;
5127
5128 EVulkanBackendTextureModes TexMode = VULKAN_BACKEND_TEXTURE_MODE_TEXTURED;
5129
5130 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5131 {
5132 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5133 {
5134 Ret &= CreateSpriteMultiPushGraphicsPipelineImpl(pVertName, pFragName, PipeContainer&: m_SpriteMultiPushPipeline, TexMode, BlendMode: EVulkanBackendBlendModes(i), DynamicMode: EVulkanBackendClipModes(j));
5135 }
5136 }
5137
5138 return Ret;
5139 }
5140
5141 template<bool IsTextured>
5142 [[nodiscard]] bool CreateQuadGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5143 {
5144 std::array<VkVertexInputAttributeDescription, IsTextured ? 3 : 2> aAttributeDescriptions = {};
5145 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32B32A32_SFLOAT, 0};
5146 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UNORM, sizeof(float) * 4};
5147 if(IsTextured)
5148 aAttributeDescriptions[2] = {2, 0, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 4 + sizeof(uint8_t) * 4};
5149
5150 std::array<VkDescriptorSetLayout, IsTextured ? 2 : 1> aSetLayouts;
5151 if(IsTextured)
5152 {
5153 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5154 aSetLayouts[1] = m_QuadUniformDescriptorSetLayout;
5155 }
5156 else
5157 {
5158 aSetLayouts[0] = m_QuadUniformDescriptorSetLayout;
5159 }
5160
5161 uint32_t PushConstantSize = sizeof(SUniformQuadGPos);
5162
5163 std::array<VkPushConstantRange, 1> aPushConstants{};
5164 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT, .offset: 0, .size: PushConstantSize};
5165
5166 return CreateGraphicsPipeline<true>(pVertName, pFragName, PipeContainer, sizeof(float) * 4 + sizeof(uint8_t) * 4 + (IsTextured ? (sizeof(float) * 2) : 0), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5167 }
5168
5169 template<bool HasSampler>
5170 [[nodiscard]] bool CreateQuadGraphicsPipeline(const char *pVertName, const char *pFragName)
5171 {
5172 bool Ret = true;
5173
5174 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
5175
5176 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5177 {
5178 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5179 {
5180 Ret &= CreateQuadGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, m_QuadPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
5181 }
5182 }
5183
5184 return Ret;
5185 }
5186
5187 template<bool IsTextured>
5188 [[nodiscard]] bool CreateQuadGroupedGraphicsPipelineImpl(const char *pVertName, const char *pFragName, SPipelineContainer &PipeContainer, EVulkanBackendTextureModes TexMode, EVulkanBackendBlendModes BlendMode, EVulkanBackendClipModes DynamicMode)
5189 {
5190 std::array<VkVertexInputAttributeDescription, IsTextured ? 3 : 2> aAttributeDescriptions = {};
5191 aAttributeDescriptions[0] = {0, 0, VK_FORMAT_R32G32B32A32_SFLOAT, 0};
5192 aAttributeDescriptions[1] = {1, 0, VK_FORMAT_R8G8B8A8_UNORM, sizeof(float) * 4};
5193 if(IsTextured)
5194 aAttributeDescriptions[2] = {2, 0, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 4 + sizeof(uint8_t) * 4};
5195
5196 std::array<VkDescriptorSetLayout, 1> aSetLayouts;
5197 aSetLayouts[0] = m_StandardTexturedDescriptorSetLayout;
5198
5199 uint32_t PushConstantSize = sizeof(SUniformQuadGroupedGPos);
5200
5201 std::array<VkPushConstantRange, 1> aPushConstants{};
5202 aPushConstants[0] = {.stageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, .offset: 0, .size: PushConstantSize};
5203
5204 return CreateGraphicsPipeline<false>(pVertName, pFragName, PipeContainer, sizeof(float) * 4 + sizeof(uint8_t) * 4 + (IsTextured ? (sizeof(float) * 2) : 0), aAttributeDescriptions, aSetLayouts, aPushConstants, TexMode, BlendMode, DynamicMode);
5205 }
5206
5207 template<bool HasSampler>
5208 [[nodiscard]] bool CreateQuadGroupedGraphicsPipeline(const char *pVertName, const char *pFragName)
5209 {
5210 bool Ret = true;
5211
5212 EVulkanBackendTextureModes TexMode = HasSampler ? VULKAN_BACKEND_TEXTURE_MODE_TEXTURED : VULKAN_BACKEND_TEXTURE_MODE_NOT_TEXTURED;
5213
5214 for(size_t i = 0; i < VULKAN_BACKEND_BLEND_MODE_COUNT; ++i)
5215 {
5216 for(size_t j = 0; j < VULKAN_BACKEND_CLIP_MODE_COUNT; ++j)
5217 {
5218 Ret &= CreateQuadGroupedGraphicsPipelineImpl<HasSampler>(pVertName, pFragName, m_QuadGroupedPipeline, TexMode, EVulkanBackendBlendModes(i), EVulkanBackendClipModes(j));
5219 }
5220 }
5221
5222 return Ret;
5223 }
5224
5225 [[nodiscard]] bool CreateCommandPool()
5226 {
5227 VkCommandPoolCreateInfo CreatePoolInfo{};
5228 CreatePoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
5229 CreatePoolInfo.queueFamilyIndex = m_VKGraphicsQueueIndex;
5230 CreatePoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
5231
5232 m_vCommandPools.resize(new_size: m_ThreadCount);
5233 for(size_t i = 0; i < m_ThreadCount; ++i)
5234 {
5235 if(vkCreateCommandPool(device: m_VKDevice, pCreateInfo: &CreatePoolInfo, pAllocator: nullptr, pCommandPool: &m_vCommandPools[i]) != VK_SUCCESS)
5236 {
5237 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the command pool failed.");
5238 return false;
5239 }
5240 }
5241 return true;
5242 }
5243
5244 void DestroyCommandPool()
5245 {
5246 for(size_t i = 0; i < m_ThreadCount; ++i)
5247 {
5248 vkDestroyCommandPool(device: m_VKDevice, commandPool: m_vCommandPools[i], pAllocator: nullptr);
5249 }
5250 }
5251
5252 [[nodiscard]] bool CreateCommandBuffers()
5253 {
5254 m_vMainDrawCommandBuffers.resize(new_size: m_SwapChainImageCount);
5255 if(m_ThreadCount > 1)
5256 {
5257 m_vvThreadDrawCommandBuffers.resize(new_size: m_ThreadCount);
5258 m_vvUsedThreadDrawCommandBuffer.resize(new_size: m_ThreadCount);
5259 m_vHelperThreadDrawCommandBuffers.resize(new_size: m_ThreadCount);
5260 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5261 {
5262 ThreadDrawCommandBuffers.resize(new_size: m_SwapChainImageCount);
5263 }
5264 for(auto &UsedThreadDrawCommandBuffer : m_vvUsedThreadDrawCommandBuffer)
5265 {
5266 UsedThreadDrawCommandBuffer.resize(new_size: m_SwapChainImageCount, x: false);
5267 }
5268 }
5269 m_vMemoryCommandBuffers.resize(new_size: m_SwapChainImageCount);
5270 m_vUsedMemoryCommandBuffer.resize(new_size: m_SwapChainImageCount, x: false);
5271
5272 VkCommandBufferAllocateInfo AllocInfo{};
5273 AllocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
5274 AllocInfo.commandPool = m_vCommandPools[0];
5275 AllocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
5276 AllocInfo.commandBufferCount = (uint32_t)m_vMainDrawCommandBuffers.size();
5277
5278 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: m_vMainDrawCommandBuffers.data()) != VK_SUCCESS)
5279 {
5280 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating command buffers failed.");
5281 return false;
5282 }
5283
5284 AllocInfo.commandBufferCount = (uint32_t)m_vMemoryCommandBuffers.size();
5285
5286 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: m_vMemoryCommandBuffers.data()) != VK_SUCCESS)
5287 {
5288 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating memory command buffers failed.");
5289 return false;
5290 }
5291
5292 if(m_ThreadCount > 1)
5293 {
5294 size_t Count = 0;
5295 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5296 {
5297 AllocInfo.commandPool = m_vCommandPools[Count];
5298 ++Count;
5299 AllocInfo.commandBufferCount = (uint32_t)ThreadDrawCommandBuffers.size();
5300 AllocInfo.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
5301 if(vkAllocateCommandBuffers(device: m_VKDevice, pAllocateInfo: &AllocInfo, pCommandBuffers: ThreadDrawCommandBuffers.data()) != VK_SUCCESS)
5302 {
5303 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Allocating thread command buffers failed.");
5304 return false;
5305 }
5306 }
5307 }
5308
5309 return true;
5310 }
5311
5312 void DestroyCommandBuffer()
5313 {
5314 if(m_ThreadCount > 1)
5315 {
5316 size_t Count = 0;
5317 for(auto &ThreadDrawCommandBuffers : m_vvThreadDrawCommandBuffers)
5318 {
5319 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[Count], commandBufferCount: static_cast<uint32_t>(ThreadDrawCommandBuffers.size()), pCommandBuffers: ThreadDrawCommandBuffers.data());
5320 ++Count;
5321 }
5322 }
5323
5324 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[0], commandBufferCount: static_cast<uint32_t>(m_vMemoryCommandBuffers.size()), pCommandBuffers: m_vMemoryCommandBuffers.data());
5325 vkFreeCommandBuffers(device: m_VKDevice, commandPool: m_vCommandPools[0], commandBufferCount: static_cast<uint32_t>(m_vMainDrawCommandBuffers.size()), pCommandBuffers: m_vMainDrawCommandBuffers.data());
5326
5327 m_vvThreadDrawCommandBuffers.clear();
5328 m_vvUsedThreadDrawCommandBuffer.clear();
5329 m_vHelperThreadDrawCommandBuffers.clear();
5330
5331 m_vMainDrawCommandBuffers.clear();
5332 m_vMemoryCommandBuffers.clear();
5333 m_vUsedMemoryCommandBuffer.clear();
5334 }
5335
5336 [[nodiscard]] bool CreateSyncObjects()
5337 {
5338 auto SyncObjectCount = m_SwapChainImageCount;
5339 m_vQueueSubmitSemaphores.resize(new_size: SyncObjectCount);
5340 m_vBusyAcquireImageSemaphores.resize(new_size: SyncObjectCount);
5341
5342 m_vQueueSubmitFences.resize(new_size: SyncObjectCount);
5343
5344 VkSemaphoreCreateInfo CreateSemaphoreInfo{};
5345 CreateSemaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
5346
5347 VkFenceCreateInfo FenceInfo{};
5348 FenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
5349 FenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
5350
5351 if(vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_AcquireImageSemaphore) != VK_SUCCESS)
5352 {
5353 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating acquire next image semaphore failed.");
5354 return false;
5355 }
5356 for(size_t i = 0; i < SyncObjectCount; i++)
5357 {
5358 if(vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_vQueueSubmitSemaphores[i]) != VK_SUCCESS ||
5359 vkCreateSemaphore(device: m_VKDevice, pCreateInfo: &CreateSemaphoreInfo, pAllocator: nullptr, pSemaphore: &m_vBusyAcquireImageSemaphores[i]) != VK_SUCCESS ||
5360 vkCreateFence(device: m_VKDevice, pCreateInfo: &FenceInfo, pAllocator: nullptr, pFence: &m_vQueueSubmitFences[i]) != VK_SUCCESS)
5361 {
5362 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating swap chain sync objects(fences, semaphores) failed.");
5363 return false;
5364 }
5365 }
5366
5367 return true;
5368 }
5369
5370 void DestroySyncObjects()
5371 {
5372 for(size_t i = 0; i < m_vBusyAcquireImageSemaphores.size(); i++)
5373 {
5374 vkDestroySemaphore(device: m_VKDevice, semaphore: m_vBusyAcquireImageSemaphores[i], pAllocator: nullptr);
5375 vkDestroySemaphore(device: m_VKDevice, semaphore: m_vQueueSubmitSemaphores[i], pAllocator: nullptr);
5376 vkDestroyFence(device: m_VKDevice, fence: m_vQueueSubmitFences[i], pAllocator: nullptr);
5377 }
5378 vkDestroySemaphore(device: m_VKDevice, semaphore: m_AcquireImageSemaphore, pAllocator: nullptr);
5379
5380 m_vBusyAcquireImageSemaphores.clear();
5381 m_vQueueSubmitSemaphores.clear();
5382
5383 m_vQueueSubmitFences.clear();
5384 }
5385
5386 void DestroyBufferOfFrame(size_t ImageIndex, SFrameBuffers &Buffer)
5387 {
5388 CleanBufferPair(ImageIndex, Buffer&: Buffer.m_Buffer, BufferMem&: Buffer.m_BufferMem);
5389 }
5390
5391 void DestroyUniBufferOfFrame(size_t ImageIndex, SFrameUniformBuffers &Buffer)
5392 {
5393 CleanBufferPair(ImageIndex, Buffer&: Buffer.m_Buffer, BufferMem&: Buffer.m_BufferMem);
5394 for(auto &DescrSet : Buffer.m_aUniformSets)
5395 {
5396 if(DescrSet.m_Descriptor != VK_NULL_HANDLE)
5397 {
5398 DestroyUniformDescriptorSets(pSets: &DescrSet, SetCount: 1);
5399 }
5400 }
5401 }
5402
5403 /*************
5404 * SWAP CHAIN
5405 **************/
5406
5407 void CleanupVulkanSwapChain(bool ForceSwapChainDestruct)
5408 {
5409 m_StandardPipeline.Destroy(Device&: m_VKDevice);
5410 m_StandardLinePipeline.Destroy(Device&: m_VKDevice);
5411 m_Standard3DPipeline.Destroy(Device&: m_VKDevice);
5412 m_TextPipeline.Destroy(Device&: m_VKDevice);
5413 m_TilePipeline.Destroy(Device&: m_VKDevice);
5414 m_TileBorderPipeline.Destroy(Device&: m_VKDevice);
5415 m_PrimExPipeline.Destroy(Device&: m_VKDevice);
5416 m_PrimExRotationlessPipeline.Destroy(Device&: m_VKDevice);
5417 m_SpriteMultiPipeline.Destroy(Device&: m_VKDevice);
5418 m_SpriteMultiPushPipeline.Destroy(Device&: m_VKDevice);
5419 m_QuadPipeline.Destroy(Device&: m_VKDevice);
5420 m_QuadGroupedPipeline.Destroy(Device&: m_VKDevice);
5421
5422 DestroyFramebuffers();
5423
5424 DestroyRenderPass();
5425
5426 DestroyMultiSamplerImageAttachments();
5427
5428 DestroyImageViews();
5429 ClearSwapChainImageHandles();
5430
5431 DestroySwapChain(ForceDestroy: ForceSwapChainDestruct);
5432
5433 m_SwapchainCreated = false;
5434 }
5435
5436 template<bool IsLastCleanup>
5437 void CleanupVulkan(size_t SwapchainCount)
5438 {
5439 if(IsLastCleanup)
5440 {
5441 if(m_SwapchainCreated)
5442 CleanupVulkanSwapChain(ForceSwapChainDestruct: true);
5443
5444 // clean all images, buffers, buffer containers
5445 for(auto &Texture : m_vTextures)
5446 {
5447 if(Texture.m_VKTextDescrSet.m_Descriptor != VK_NULL_HANDLE && IsVerbose())
5448 {
5449 log_warn("gfx/vulkan", "Text textures were not cleared over command.");
5450 }
5451 DestroyTexture(Texture);
5452 }
5453
5454 for(auto &BufferObject : m_vBufferObjects)
5455 {
5456 if(!BufferObject.m_IsStreamedBuffer)
5457 FreeVertexMemBlock(Block&: BufferObject.m_BufferObject.m_Mem);
5458 }
5459
5460 m_vBufferContainers.clear();
5461 }
5462
5463 m_vImageLastFrameCheck.clear();
5464
5465 m_vLastPipeline.clear();
5466
5467 for(size_t i = 0; i < m_ThreadCount; ++i)
5468 {
5469 m_vStreamedVertexBuffers[i].Destroy(DestroyBuffer: [&](size_t ImageIndex, SFrameBuffers &Buffer) { DestroyBufferOfFrame(ImageIndex, Buffer); });
5470 m_vStreamedUniformBuffers[i].Destroy(DestroyBuffer: [&](size_t ImageIndex, SFrameUniformBuffers &Buffer) { DestroyUniBufferOfFrame(ImageIndex, Buffer); });
5471 }
5472 m_vStreamedVertexBuffers.clear();
5473 m_vStreamedUniformBuffers.clear();
5474
5475 for(size_t i = 0; i < SwapchainCount; ++i)
5476 {
5477 ClearFrameData(FrameImageIndex: i);
5478 }
5479
5480 m_vvFrameDelayedBufferCleanup.clear();
5481 m_vvFrameDelayedTextureCleanup.clear();
5482 m_vvFrameDelayedTextTexturesCleanup.clear();
5483
5484 m_StagingBufferCache.DestroyFrameData(ImageCount: SwapchainCount);
5485 m_StagingBufferCacheImage.DestroyFrameData(ImageCount: SwapchainCount);
5486 m_VertexBufferCache.DestroyFrameData(ImageCount: SwapchainCount);
5487 for(auto &ImageBufferCache : m_ImageBufferCaches)
5488 ImageBufferCache.second.DestroyFrameData(ImageCount: SwapchainCount);
5489
5490 if(IsLastCleanup)
5491 {
5492 m_StagingBufferCache.Destroy(Device&: m_VKDevice);
5493 m_StagingBufferCacheImage.Destroy(Device&: m_VKDevice);
5494 m_VertexBufferCache.Destroy(Device&: m_VKDevice);
5495 for(auto &ImageBufferCache : m_ImageBufferCaches)
5496 ImageBufferCache.second.Destroy(Device&: m_VKDevice);
5497
5498 m_ImageBufferCaches.clear();
5499
5500 DestroyTextureSamplers();
5501 DestroyDescriptorPools();
5502
5503 DeletePresentedImageDataImage();
5504 }
5505
5506 DestroySyncObjects();
5507 DestroyCommandBuffer();
5508
5509 if(IsLastCleanup)
5510 {
5511 DestroyCommandPool();
5512 }
5513
5514 if(IsLastCleanup)
5515 {
5516 DestroyUniformDescriptorSetLayouts();
5517 DestroyTextDescriptorSetLayout();
5518 DestroyDescriptorSetLayouts();
5519 }
5520 }
5521
5522 void CleanupVulkanSDL()
5523 {
5524 if(m_VKInstance != VK_NULL_HANDLE)
5525 {
5526 DestroySurface();
5527 vkDestroyDevice(device: m_VKDevice, pAllocator: nullptr);
5528
5529 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
5530 {
5531 UnregisterDebugCallback();
5532 }
5533 vkDestroyInstance(instance: m_VKInstance, pAllocator: nullptr);
5534 m_VKInstance = VK_NULL_HANDLE;
5535 }
5536 }
5537
5538 int RecreateSwapChain()
5539 {
5540 int Ret = 0;
5541 vkDeviceWaitIdle(device: m_VKDevice);
5542
5543 if(IsVerbose())
5544 {
5545 log_info("gfx/vulkan", "Recreating swap chain.");
5546 }
5547
5548 VkSwapchainKHR OldSwapChain = VK_NULL_HANDLE;
5549 uint32_t OldSwapChainImageCount = m_SwapChainImageCount;
5550
5551 if(m_SwapchainCreated)
5552 CleanupVulkanSwapChain(ForceSwapChainDestruct: false);
5553
5554 // set new multi sampling if it was requested
5555 if(m_NextMultiSamplingCount != std::numeric_limits<uint32_t>::max())
5556 {
5557 m_MultiSamplingCount = m_NextMultiSamplingCount;
5558 m_NextMultiSamplingCount = std::numeric_limits<uint32_t>::max();
5559 }
5560
5561 if(!m_SwapchainCreated)
5562 Ret = InitVulkanSwapChain(OldSwapChain);
5563
5564 if(OldSwapChainImageCount != m_SwapChainImageCount)
5565 {
5566 CleanupVulkan<false>(SwapchainCount: OldSwapChainImageCount);
5567 InitVulkan<false>();
5568 }
5569
5570 if(OldSwapChain != VK_NULL_HANDLE)
5571 {
5572 vkDestroySwapchainKHR(device: m_VKDevice, swapchain: OldSwapChain, pAllocator: nullptr);
5573 }
5574
5575 if(Ret != 0 && IsVerbose())
5576 {
5577 log_warn("gfx/vulkan", "Recreating swap chain failed.");
5578 }
5579
5580 return Ret;
5581 }
5582
5583 int InitVulkanSDL(SDL_Window *pWindow, uint32_t CanvasWidth, uint32_t CanvasHeight, char *pRendererString, char *pVendorString, char *pVersionString)
5584 {
5585 std::vector<std::string> vVKExtensions;
5586 std::vector<std::string> vVKLayers;
5587
5588 m_CanvasWidth = CanvasWidth;
5589 m_CanvasHeight = CanvasHeight;
5590
5591 if(!GetVulkanExtensions(pWindow, vVKExtensions))
5592 return -1;
5593
5594 if(!GetVulkanLayers(vVKLayers))
5595 return -1;
5596
5597 if(!CreateVulkanInstance(vVKLayers, vVKExtensions, TryDebugExtensions: true))
5598 return -1;
5599
5600 if(g_Config.m_DbgGfx == DEBUG_GFX_MODE_MINIMUM || g_Config.m_DbgGfx == DEBUG_GFX_MODE_ALL)
5601 {
5602 SetupDebugCallback();
5603
5604 for(auto &VKLayer : vVKLayers)
5605 {
5606 log_info("gfx/vulkan", "Validation layer: %s", VKLayer.c_str());
5607 }
5608 }
5609
5610 if(!SelectGpu(pRendererName: pRendererString, pVendorName: pVendorString, pVersionName: pVersionString))
5611 return -1;
5612
5613 if(!CreateLogicalDevice(vVKLayers))
5614 return -1;
5615
5616 GetDeviceQueue();
5617
5618 if(!CreateSurface(pWindow))
5619 return -1;
5620
5621 return 0;
5622 }
5623
5624 /************************
5625 * MEMORY MANAGEMENT
5626 ************************/
5627
5628 uint32_t FindMemoryType(VkPhysicalDevice PhyDevice, uint32_t TypeFilter, VkMemoryPropertyFlags Properties)
5629 {
5630 VkPhysicalDeviceMemoryProperties MemProperties;
5631 vkGetPhysicalDeviceMemoryProperties(physicalDevice: PhyDevice, pMemoryProperties: &MemProperties);
5632
5633 for(uint32_t i = 0; i < MemProperties.memoryTypeCount; i++)
5634 {
5635 if((TypeFilter & (1 << i)) && (MemProperties.memoryTypes[i].propertyFlags & Properties) == Properties)
5636 {
5637 return i;
5638 }
5639 }
5640
5641 return 0;
5642 }
5643
5644 [[nodiscard]] bool CreateBuffer(VkDeviceSize BufferSize, EMemoryBlockUsage MemUsage, VkBufferUsageFlags BufferUsage, VkMemoryPropertyFlags MemoryProperties, VkBuffer &VKBuffer, SDeviceMemoryBlock &VKBufferMemory)
5645 {
5646 VkBufferCreateInfo BufferInfo{};
5647 BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5648 BufferInfo.size = BufferSize;
5649 BufferInfo.usage = BufferUsage;
5650 BufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
5651
5652 if(vkCreateBuffer(device: m_VKDevice, pCreateInfo: &BufferInfo, pAllocator: nullptr, pBuffer: &VKBuffer) != VK_SUCCESS)
5653 {
5654 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Buffer creation failed.");
5655 return false;
5656 }
5657
5658 VkMemoryRequirements MemRequirements;
5659 vkGetBufferMemoryRequirements(device: m_VKDevice, buffer: VKBuffer, pMemoryRequirements: &MemRequirements);
5660
5661 VkMemoryAllocateInfo MemAllocInfo{};
5662 MemAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
5663 MemAllocInfo.allocationSize = MemRequirements.size;
5664 MemAllocInfo.memoryTypeIndex = FindMemoryType(PhyDevice: m_VKGPU, TypeFilter: MemRequirements.memoryTypeBits, Properties: MemoryProperties);
5665
5666 VKBufferMemory.m_Size = MemRequirements.size;
5667
5668 if(MemUsage == MEMORY_BLOCK_USAGE_BUFFER)
5669 m_pBufferMemoryUsage->store(i: m_pBufferMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5670 else if(MemUsage == MEMORY_BLOCK_USAGE_STAGING)
5671 m_pStagingMemoryUsage->store(i: m_pStagingMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5672 else if(MemUsage == MEMORY_BLOCK_USAGE_STREAM)
5673 m_pStreamMemoryUsage->store(i: m_pStreamMemoryUsage->load(m: std::memory_order_relaxed) + MemRequirements.size, m: std::memory_order_relaxed);
5674
5675 if(IsVerbose())
5676 {
5677 VerboseAllocatedMemory(Size: MemRequirements.size, FrameImageIndex: m_CurImageIndex, MemUsage);
5678 }
5679
5680 if(!AllocateVulkanMemory(pAllocateInfo: &MemAllocInfo, pMemory: &VKBufferMemory.m_Mem))
5681 {
5682 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Allocation for buffer object failed.");
5683 return false;
5684 }
5685
5686 VKBufferMemory.m_UsageType = MemUsage;
5687
5688 if(vkBindBufferMemory(device: m_VKDevice, buffer: VKBuffer, memory: VKBufferMemory.m_Mem, memoryOffset: 0) != VK_SUCCESS)
5689 {
5690 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Binding memory to buffer failed.");
5691 return false;
5692 }
5693
5694 return true;
5695 }
5696
5697 [[nodiscard]] bool AllocateDescriptorPool(SDeviceDescriptorPools &DescriptorPools, size_t AllocPoolSize)
5698 {
5699 SDeviceDescriptorPool NewPool;
5700 NewPool.m_Size = AllocPoolSize;
5701
5702 VkDescriptorPoolSize PoolSize{};
5703 if(DescriptorPools.m_IsUniformPool)
5704 PoolSize.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
5705 else
5706 PoolSize.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5707 PoolSize.descriptorCount = AllocPoolSize;
5708
5709 VkDescriptorPoolCreateInfo PoolInfo{};
5710 PoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
5711 PoolInfo.poolSizeCount = 1;
5712 PoolInfo.pPoolSizes = &PoolSize;
5713 PoolInfo.maxSets = AllocPoolSize;
5714 PoolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
5715
5716 if(vkCreateDescriptorPool(device: m_VKDevice, pCreateInfo: &PoolInfo, pAllocator: nullptr, pDescriptorPool: &NewPool.m_Pool) != VK_SUCCESS)
5717 {
5718 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_INIT, pErr: "Creating the descriptor pool failed.");
5719 return false;
5720 }
5721
5722 DescriptorPools.m_vPools.push_back(x: NewPool);
5723
5724 return true;
5725 }
5726
5727 [[nodiscard]] bool CreateDescriptorPools()
5728 {
5729 m_StandardTextureDescrPool.m_IsUniformPool = false;
5730 m_StandardTextureDescrPool.m_DefaultAllocSize = 1024;
5731 m_TextTextureDescrPool.m_IsUniformPool = false;
5732 m_TextTextureDescrPool.m_DefaultAllocSize = 8;
5733
5734 m_vUniformBufferDescrPools.resize(new_size: m_ThreadCount);
5735 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5736 {
5737 UniformBufferDescrPool.m_IsUniformPool = true;
5738 UniformBufferDescrPool.m_DefaultAllocSize = 512;
5739 }
5740
5741 bool Ret = AllocateDescriptorPool(DescriptorPools&: m_StandardTextureDescrPool, AllocPoolSize: CCommandBuffer::MAX_TEXTURES);
5742 Ret |= AllocateDescriptorPool(DescriptorPools&: m_TextTextureDescrPool, AllocPoolSize: 8);
5743
5744 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5745 {
5746 Ret |= AllocateDescriptorPool(DescriptorPools&: UniformBufferDescrPool, AllocPoolSize: 64);
5747 }
5748
5749 return Ret;
5750 }
5751
5752 void DestroyDescriptorPools()
5753 {
5754 for(auto &DescrPool : m_StandardTextureDescrPool.m_vPools)
5755 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5756 for(auto &DescrPool : m_TextTextureDescrPool.m_vPools)
5757 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5758
5759 for(auto &UniformBufferDescrPool : m_vUniformBufferDescrPools)
5760 {
5761 for(auto &DescrPool : UniformBufferDescrPool.m_vPools)
5762 vkDestroyDescriptorPool(device: m_VKDevice, descriptorPool: DescrPool.m_Pool, pAllocator: nullptr);
5763 }
5764 m_vUniformBufferDescrPools.clear();
5765 }
5766
5767 [[nodiscard]] bool GetDescriptorPoolForAlloc(VkDescriptorPool &RetDescr, SDeviceDescriptorPools &DescriptorPools, SDeviceDescriptorSet *pSets, size_t AllocNum)
5768 {
5769 size_t CurAllocNum = AllocNum;
5770 size_t CurAllocOffset = 0;
5771 RetDescr = VK_NULL_HANDLE;
5772
5773 while(CurAllocNum > 0)
5774 {
5775 size_t AllocatedInThisRun = 0;
5776
5777 bool Found = false;
5778 size_t DescriptorPoolIndex = std::numeric_limits<size_t>::max();
5779 for(size_t i = 0; i < DescriptorPools.m_vPools.size(); ++i)
5780 {
5781 auto &Pool = DescriptorPools.m_vPools[i];
5782 if(Pool.m_CurSize + CurAllocNum < Pool.m_Size)
5783 {
5784 AllocatedInThisRun = CurAllocNum;
5785 Pool.m_CurSize += CurAllocNum;
5786 Found = true;
5787 if(RetDescr == VK_NULL_HANDLE)
5788 RetDescr = Pool.m_Pool;
5789 DescriptorPoolIndex = i;
5790 break;
5791 }
5792 else
5793 {
5794 size_t RemainingPoolCount = Pool.m_Size - Pool.m_CurSize;
5795 if(RemainingPoolCount > 0)
5796 {
5797 AllocatedInThisRun = RemainingPoolCount;
5798 Pool.m_CurSize += RemainingPoolCount;
5799 Found = true;
5800 if(RetDescr == VK_NULL_HANDLE)
5801 RetDescr = Pool.m_Pool;
5802 DescriptorPoolIndex = i;
5803 break;
5804 }
5805 }
5806 }
5807
5808 if(!Found)
5809 {
5810 DescriptorPoolIndex = DescriptorPools.m_vPools.size();
5811
5812 if(!AllocateDescriptorPool(DescriptorPools, AllocPoolSize: DescriptorPools.m_DefaultAllocSize))
5813 return false;
5814
5815 AllocatedInThisRun = minimum(a: (size_t)DescriptorPools.m_DefaultAllocSize, b: CurAllocNum);
5816
5817 auto &Pool = DescriptorPools.m_vPools.back();
5818 Pool.m_CurSize += AllocatedInThisRun;
5819 if(RetDescr == VK_NULL_HANDLE)
5820 RetDescr = Pool.m_Pool;
5821 }
5822
5823 for(size_t i = CurAllocOffset; i < CurAllocOffset + AllocatedInThisRun; ++i)
5824 {
5825 pSets[i].m_pPools = &DescriptorPools;
5826 pSets[i].m_PoolIndex = DescriptorPoolIndex;
5827 }
5828 CurAllocOffset += AllocatedInThisRun;
5829 CurAllocNum -= AllocatedInThisRun;
5830 }
5831
5832 return true;
5833 }
5834
5835 void FreeDescriptorSetFromPool(SDeviceDescriptorSet &DescrSet)
5836 {
5837 if(DescrSet.m_PoolIndex != std::numeric_limits<size_t>::max())
5838 {
5839 vkFreeDescriptorSets(device: m_VKDevice, descriptorPool: DescrSet.m_pPools->m_vPools[DescrSet.m_PoolIndex].m_Pool, descriptorSetCount: 1, pDescriptorSets: &DescrSet.m_Descriptor);
5840 DescrSet.m_pPools->m_vPools[DescrSet.m_PoolIndex].m_CurSize -= 1;
5841 }
5842 }
5843
5844 [[nodiscard]] bool CreateNewTexturedStandardDescriptorSets(size_t TextureSlot, size_t DescrIndex)
5845 {
5846 auto &Texture = m_vTextures[TextureSlot];
5847
5848 auto &DescrSet = Texture.m_aVKStandardTexturedDescrSets[DescrIndex];
5849
5850 VkDescriptorSetAllocateInfo DesAllocInfo{};
5851 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5852 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_StandardTextureDescrPool, pSets: &DescrSet, AllocNum: 1))
5853 return false;
5854 DesAllocInfo.descriptorSetCount = 1;
5855 DesAllocInfo.pSetLayouts = &m_StandardTexturedDescriptorSetLayout;
5856
5857 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSet.m_Descriptor) != VK_SUCCESS)
5858 {
5859 return false;
5860 }
5861
5862 VkDescriptorImageInfo ImageInfo{};
5863 ImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5864 ImageInfo.imageView = Texture.m_ImgView;
5865 ImageInfo.sampler = Texture.m_aSamplers[DescrIndex];
5866
5867 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5868
5869 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5870 aDescriptorWrites[0].dstSet = DescrSet.m_Descriptor;
5871 aDescriptorWrites[0].dstBinding = 0;
5872 aDescriptorWrites[0].dstArrayElement = 0;
5873 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5874 aDescriptorWrites[0].descriptorCount = 1;
5875 aDescriptorWrites[0].pImageInfo = &ImageInfo;
5876
5877 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5878
5879 return true;
5880 }
5881
5882 void DestroyTexturedStandardDescriptorSets(CTexture &Texture, size_t DescrIndex)
5883 {
5884 auto &DescrSet = Texture.m_aVKStandardTexturedDescrSets[DescrIndex];
5885 FreeDescriptorSetFromPool(DescrSet);
5886 DescrSet = {};
5887 }
5888
5889 [[nodiscard]] bool CreateNew3DTexturedStandardDescriptorSets(size_t TextureSlot)
5890 {
5891 auto &Texture = m_vTextures[TextureSlot];
5892
5893 auto &DescrSet = Texture.m_VKStandard3DTexturedDescrSet;
5894
5895 VkDescriptorSetAllocateInfo DesAllocInfo{};
5896 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5897 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_StandardTextureDescrPool, pSets: &DescrSet, AllocNum: 1))
5898 return false;
5899 DesAllocInfo.descriptorSetCount = 1;
5900 DesAllocInfo.pSetLayouts = &m_Standard3DTexturedDescriptorSetLayout;
5901
5902 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSet.m_Descriptor) != VK_SUCCESS)
5903 {
5904 return false;
5905 }
5906
5907 VkDescriptorImageInfo ImageInfo{};
5908 ImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5909 ImageInfo.imageView = Texture.m_Img3DView;
5910 ImageInfo.sampler = Texture.m_Sampler3D;
5911
5912 std::array<VkWriteDescriptorSet, 1> aDescriptorWrites{};
5913
5914 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5915 aDescriptorWrites[0].dstSet = DescrSet.m_Descriptor;
5916 aDescriptorWrites[0].dstBinding = 0;
5917 aDescriptorWrites[0].dstArrayElement = 0;
5918 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5919 aDescriptorWrites[0].descriptorCount = 1;
5920 aDescriptorWrites[0].pImageInfo = &ImageInfo;
5921
5922 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5923
5924 return true;
5925 }
5926
5927 void DestroyTextured3DStandardDescriptorSets(CTexture &Texture)
5928 {
5929 auto &DescrSet = Texture.m_VKStandard3DTexturedDescrSet;
5930 FreeDescriptorSetFromPool(DescrSet);
5931 }
5932
5933 [[nodiscard]] bool CreateNewTextDescriptorSets(size_t Texture, size_t TextureOutline)
5934 {
5935 auto &TextureText = m_vTextures[Texture];
5936 auto &TextureTextOutline = m_vTextures[TextureOutline];
5937 auto &DescrSetText = TextureText.m_VKTextDescrSet;
5938
5939 VkDescriptorSetAllocateInfo DesAllocInfo{};
5940 DesAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
5941 if(!GetDescriptorPoolForAlloc(RetDescr&: DesAllocInfo.descriptorPool, DescriptorPools&: m_TextTextureDescrPool, pSets: &DescrSetText, AllocNum: 1))
5942 return false;
5943 DesAllocInfo.descriptorSetCount = 1;
5944 DesAllocInfo.pSetLayouts = &m_TextDescriptorSetLayout;
5945
5946 if(vkAllocateDescriptorSets(device: m_VKDevice, pAllocateInfo: &DesAllocInfo, pDescriptorSets: &DescrSetText.m_Descriptor) != VK_SUCCESS)
5947 {
5948 return false;
5949 }
5950
5951 std::array<VkDescriptorImageInfo, 2> aImageInfo{};
5952 aImageInfo[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5953 aImageInfo[0].imageView = TextureText.m_ImgView;
5954 aImageInfo[0].sampler = TextureText.m_aSamplers[0];
5955 aImageInfo[1].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
5956 aImageInfo[1].imageView = TextureTextOutline.m_ImgView;
5957 aImageInfo[1].sampler = TextureTextOutline.m_aSamplers[0];
5958
5959 std::array<VkWriteDescriptorSet, 2> aDescriptorWrites{};
5960
5961 aDescriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
5962 aDescriptorWrites[0].dstSet = DescrSetText.m_Descriptor;
5963 aDescriptorWrites[0].dstBinding = 0;
5964 aDescriptorWrites[0].dstArrayElement = 0;
5965 aDescriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
5966 aDescriptorWrites[0].descriptorCount = 1;
5967 aDescriptorWrites[0].pImageInfo = aImageInfo.data();
5968 aDescriptorWrites[1] = aDescriptorWrites[0];
5969 aDescriptorWrites[1].dstBinding = 1;
5970 aDescriptorWrites[1].pImageInfo = &aImageInfo[1];
5971
5972 vkUpdateDescriptorSets(device: m_VKDevice, descriptorWriteCount: static_cast<uint32_t>(aDescriptorWrites.size()), pDescriptorWrites: aDescriptorWrites.data(), descriptorCopyCount: 0, pDescriptorCopies: nullptr);
5973
5974 return true;
5975 }
5976
5977 void DestroyTextDescriptorSets(CTexture &Texture, CTexture &TextureOutline)
5978 {
5979 auto &DescrSet = Texture.m_VKTextDescrSet;
5980 FreeDescriptorSetFromPool(DescrSet);
5981 }
5982
5983 [[nodiscard]] bool HasMultiSampling() const
5984 {
5985 return GetSampleCount() != VK_SAMPLE_COUNT_1_BIT;
5986 }
5987
5988 VkSampleCountFlagBits GetMaxSampleCount() const
5989 {
5990 if(m_MaxMultiSample & VK_SAMPLE_COUNT_64_BIT)
5991 return VK_SAMPLE_COUNT_64_BIT;
5992 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_32_BIT)
5993 return VK_SAMPLE_COUNT_32_BIT;
5994 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_16_BIT)
5995 return VK_SAMPLE_COUNT_16_BIT;
5996 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_8_BIT)
5997 return VK_SAMPLE_COUNT_8_BIT;
5998 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_4_BIT)
5999 return VK_SAMPLE_COUNT_4_BIT;
6000 else if(m_MaxMultiSample & VK_SAMPLE_COUNT_2_BIT)
6001 return VK_SAMPLE_COUNT_2_BIT;
6002
6003 return VK_SAMPLE_COUNT_1_BIT;
6004 }
6005
6006 VkSampleCountFlagBits GetSampleCount() const
6007 {
6008 auto MaxSampleCount = GetMaxSampleCount();
6009 if(m_MultiSamplingCount >= 64 && MaxSampleCount >= VK_SAMPLE_COUNT_64_BIT)
6010 return VK_SAMPLE_COUNT_64_BIT;
6011 else if(m_MultiSamplingCount >= 32 && MaxSampleCount >= VK_SAMPLE_COUNT_32_BIT)
6012 return VK_SAMPLE_COUNT_32_BIT;
6013 else if(m_MultiSamplingCount >= 16 && MaxSampleCount >= VK_SAMPLE_COUNT_16_BIT)
6014 return VK_SAMPLE_COUNT_16_BIT;
6015 else if(m_MultiSamplingCount >= 8 && MaxSampleCount >= VK_SAMPLE_COUNT_8_BIT)
6016 return VK_SAMPLE_COUNT_8_BIT;
6017 else if(m_MultiSamplingCount >= 4 && MaxSampleCount >= VK_SAMPLE_COUNT_4_BIT)
6018 return VK_SAMPLE_COUNT_4_BIT;
6019 else if(m_MultiSamplingCount >= 2 && MaxSampleCount >= VK_SAMPLE_COUNT_2_BIT)
6020 return VK_SAMPLE_COUNT_2_BIT;
6021
6022 return VK_SAMPLE_COUNT_1_BIT;
6023 }
6024
6025 int InitVulkanSwapChain(VkSwapchainKHR &OldSwapChain)
6026 {
6027 OldSwapChain = VK_NULL_HANDLE;
6028 if(!CreateSwapChain(OldSwapChain))
6029 return -1;
6030
6031 if(!GetSwapChainImageHandles())
6032 return -1;
6033
6034 if(!CreateImageViews())
6035 return -1;
6036
6037 if(!CreateMultiSamplerImageAttachments())
6038 {
6039 return -1;
6040 }
6041
6042 m_LastPresentedSwapChainImageIndex = std::numeric_limits<decltype(m_LastPresentedSwapChainImageIndex)>::max();
6043
6044 if(!CreateRenderPass(ClearAttachments: true))
6045 return -1;
6046
6047 if(!CreateFramebuffers())
6048 return -1;
6049
6050 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim.vert.spv", pFragName: "shader/vulkan/prim.frag.spv", HasSampler: false, IsLinePipe: false))
6051 return -1;
6052
6053 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim_textured.vert.spv", pFragName: "shader/vulkan/prim_textured.frag.spv", HasSampler: true, IsLinePipe: false))
6054 return -1;
6055
6056 if(!CreateStandardGraphicsPipeline(pVertName: "shader/vulkan/prim.vert.spv", pFragName: "shader/vulkan/prim.frag.spv", HasSampler: false, IsLinePipe: true))
6057 return -1;
6058
6059 if(!CreateStandard3DGraphicsPipeline(pVertName: "shader/vulkan/prim3d.vert.spv", pFragName: "shader/vulkan/prim3d.frag.spv", HasSampler: false))
6060 return -1;
6061
6062 if(!CreateStandard3DGraphicsPipeline(pVertName: "shader/vulkan/prim3d_textured.vert.spv", pFragName: "shader/vulkan/prim3d_textured.frag.spv", HasSampler: true))
6063 return -1;
6064
6065 if(!CreateTextGraphicsPipeline(pVertName: "shader/vulkan/text.vert.spv", pFragName: "shader/vulkan/text.frag.spv"))
6066 return -1;
6067
6068 if(!CreateTileGraphicsPipeline<false>(pVertName: "shader/vulkan/tile.vert.spv", pFragName: "shader/vulkan/tile.frag.spv", IsBorder: false))
6069 return -1;
6070
6071 if(!CreateTileGraphicsPipeline<true>(pVertName: "shader/vulkan/tile_textured.vert.spv", pFragName: "shader/vulkan/tile_textured.frag.spv", IsBorder: false))
6072 return -1;
6073
6074 if(!CreateTileGraphicsPipeline<false>(pVertName: "shader/vulkan/tile_border.vert.spv", pFragName: "shader/vulkan/tile_border.frag.spv", IsBorder: true))
6075 return -1;
6076
6077 if(!CreateTileGraphicsPipeline<true>(pVertName: "shader/vulkan/tile_border_textured.vert.spv", pFragName: "shader/vulkan/tile_border_textured.frag.spv", IsBorder: true))
6078 return -1;
6079
6080 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_rotationless.vert.spv", pFragName: "shader/vulkan/primex_rotationless.frag.spv", HasSampler: false, Rotationless: true))
6081 return -1;
6082
6083 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_tex_rotationless.vert.spv", pFragName: "shader/vulkan/primex_tex_rotationless.frag.spv", HasSampler: true, Rotationless: true))
6084 return -1;
6085
6086 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex.vert.spv", pFragName: "shader/vulkan/primex.frag.spv", HasSampler: false, Rotationless: false))
6087 return -1;
6088
6089 if(!CreatePrimExGraphicsPipeline(pVertName: "shader/vulkan/primex_tex.vert.spv", pFragName: "shader/vulkan/primex_tex.frag.spv", HasSampler: true, Rotationless: false))
6090 return -1;
6091
6092 if(!CreateSpriteMultiGraphicsPipeline(pVertName: "shader/vulkan/spritemulti.vert.spv", pFragName: "shader/vulkan/spritemulti.frag.spv"))
6093 return -1;
6094
6095 if(!CreateSpriteMultiPushGraphicsPipeline(pVertName: "shader/vulkan/spritemulti_push.vert.spv", pFragName: "shader/vulkan/spritemulti_push.frag.spv"))
6096 return -1;
6097
6098 if(!CreateQuadGraphicsPipeline<false>(pVertName: "shader/vulkan/quad.vert.spv", pFragName: "shader/vulkan/quad.frag.spv"))
6099 return -1;
6100
6101 if(!CreateQuadGraphicsPipeline<true>(pVertName: "shader/vulkan/quad_textured.vert.spv", pFragName: "shader/vulkan/quad_textured.frag.spv"))
6102 return -1;
6103
6104 if(!CreateQuadGroupedGraphicsPipeline<false>(pVertName: "shader/vulkan/quad_grouped.vert.spv", pFragName: "shader/vulkan/quad_grouped.frag.spv"))
6105 return -1;
6106
6107 if(!CreateQuadGroupedGraphicsPipeline<true>(pVertName: "shader/vulkan/quad_grouped_textured.vert.spv", pFragName: "shader/vulkan/quad_grouped_textured.frag.spv"))
6108 return -1;
6109
6110 m_SwapchainCreated = true;
6111 return 0;
6112 }
6113
6114 template<bool IsFirstInitialization>
6115 int InitVulkan()
6116 {
6117 if(IsFirstInitialization)
6118 {
6119 if(!CreateDescriptorSetLayouts())
6120 return -1;
6121
6122 if(!CreateTextDescriptorSetLayout())
6123 return -1;
6124
6125 if(!CreateSpriteMultiUniformDescriptorSetLayout())
6126 return -1;
6127
6128 if(!CreateQuadUniformDescriptorSetLayout())
6129 return -1;
6130
6131 VkSwapchainKHR OldSwapChain = VK_NULL_HANDLE;
6132 if(InitVulkanSwapChain(OldSwapChain) != 0)
6133 return -1;
6134 }
6135
6136 if(IsFirstInitialization)
6137 {
6138 if(!CreateCommandPool())
6139 return -1;
6140 }
6141
6142 if(!CreateCommandBuffers())
6143 return -1;
6144
6145 if(!CreateSyncObjects())
6146 return -1;
6147
6148 if(IsFirstInitialization)
6149 {
6150 if(!CreateDescriptorPools())
6151 return -1;
6152
6153 if(!CreateTextureSamplers())
6154 return -1;
6155 }
6156
6157 m_vStreamedVertexBuffers.resize(new_size: m_ThreadCount);
6158 m_vStreamedUniformBuffers.resize(new_size: m_ThreadCount);
6159 for(size_t i = 0; i < m_ThreadCount; ++i)
6160 {
6161 m_vStreamedVertexBuffers[i].Init(FrameImageCount: m_SwapChainImageCount);
6162 m_vStreamedUniformBuffers[i].Init(FrameImageCount: m_SwapChainImageCount);
6163 }
6164
6165 m_vLastPipeline.resize(new_size: m_ThreadCount, VK_NULL_HANDLE);
6166
6167 m_vvFrameDelayedBufferCleanup.resize(new_size: m_SwapChainImageCount);
6168 m_vvFrameDelayedTextureCleanup.resize(new_size: m_SwapChainImageCount);
6169 m_vvFrameDelayedTextTexturesCleanup.resize(new_size: m_SwapChainImageCount);
6170 m_StagingBufferCache.Init(SwapChainImageCount: m_SwapChainImageCount);
6171 m_StagingBufferCacheImage.Init(SwapChainImageCount: m_SwapChainImageCount);
6172 m_VertexBufferCache.Init(SwapChainImageCount: m_SwapChainImageCount);
6173 for(auto &ImageBufferCache : m_ImageBufferCaches)
6174 ImageBufferCache.second.Init(SwapChainImageCount: m_SwapChainImageCount);
6175
6176 m_vImageLastFrameCheck.resize(new_size: m_SwapChainImageCount, x: 0);
6177
6178 if(IsFirstInitialization)
6179 {
6180 // check if image format supports linear blitting
6181 VkFormatProperties FormatProperties;
6182 vkGetPhysicalDeviceFormatProperties(physicalDevice: m_VKGPU, format: VK_FORMAT_R8G8B8A8_UNORM, pFormatProperties: &FormatProperties);
6183 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) != 0)
6184 {
6185 m_AllowsLinearBlitting = true;
6186 }
6187 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) != 0 && (FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT) != 0)
6188 {
6189 m_OptimalRGBAImageBlitting = true;
6190 }
6191 // check if image format supports blitting to linear tiled images
6192 if((FormatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT) != 0)
6193 {
6194 m_LinearRGBAImageBlitting = true;
6195 }
6196
6197 vkGetPhysicalDeviceFormatProperties(physicalDevice: m_VKGPU, format: m_VKSurfFormat.format, pFormatProperties: &FormatProperties);
6198 if((FormatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) != 0)
6199 {
6200 m_OptimalSwapChainImageBlitting = true;
6201 }
6202 }
6203
6204 return 0;
6205 }
6206
6207 [[nodiscard]] bool GetMemoryCommandBuffer(VkCommandBuffer *&pMemCommandBuffer)
6208 {
6209 auto &MemCommandBuffer = m_vMemoryCommandBuffers[m_CurImageIndex];
6210 if(!m_vUsedMemoryCommandBuffer[m_CurImageIndex])
6211 {
6212 m_vUsedMemoryCommandBuffer[m_CurImageIndex] = true;
6213
6214 vkResetCommandBuffer(commandBuffer: MemCommandBuffer, flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
6215
6216 VkCommandBufferBeginInfo BeginInfo{};
6217 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
6218 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
6219 if(vkBeginCommandBuffer(commandBuffer: MemCommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
6220 {
6221 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Command buffer cannot be filled anymore.");
6222 return false;
6223 }
6224 }
6225 pMemCommandBuffer = &MemCommandBuffer;
6226 return true;
6227 }
6228
6229 [[nodiscard]] bool GetGraphicCommandBuffer(VkCommandBuffer *&pDrawCommandBuffer, size_t RenderThreadIndex)
6230 {
6231 if(m_ThreadCount < 2)
6232 {
6233 pDrawCommandBuffer = &m_vMainDrawCommandBuffers[m_CurImageIndex];
6234 return true;
6235 }
6236 else
6237 {
6238 auto &DrawCommandBuffer = m_vvThreadDrawCommandBuffers[RenderThreadIndex][m_CurImageIndex];
6239 if(!m_vvUsedThreadDrawCommandBuffer[RenderThreadIndex][m_CurImageIndex])
6240 {
6241 m_vvUsedThreadDrawCommandBuffer[RenderThreadIndex][m_CurImageIndex] = true;
6242
6243 vkResetCommandBuffer(commandBuffer: DrawCommandBuffer, flags: VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
6244
6245 VkCommandBufferBeginInfo BeginInfo{};
6246 BeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
6247 BeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
6248
6249 VkCommandBufferInheritanceInfo InheritanceInfo{};
6250 InheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
6251 InheritanceInfo.framebuffer = m_vFramebufferList[m_CurImageIndex];
6252 InheritanceInfo.occlusionQueryEnable = VK_FALSE;
6253 InheritanceInfo.renderPass = m_VKRenderPass;
6254 InheritanceInfo.subpass = 0;
6255
6256 BeginInfo.pInheritanceInfo = &InheritanceInfo;
6257
6258 if(vkBeginCommandBuffer(commandBuffer: DrawCommandBuffer, pBeginInfo: &BeginInfo) != VK_SUCCESS)
6259 {
6260 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_RENDER_RECORDING, pErr: "Thread draw command buffer cannot be filled anymore.");
6261 return false;
6262 }
6263 }
6264 pDrawCommandBuffer = &DrawCommandBuffer;
6265 return true;
6266 }
6267 }
6268
6269 VkCommandBuffer &GetMainGraphicCommandBuffer()
6270 {
6271 return m_vMainDrawCommandBuffers[m_CurImageIndex];
6272 }
6273
6274 /************************
6275 * STREAM BUFFERS SETUP
6276 ************************/
6277
6278 typedef std::function<bool(SFrameBuffers &, VkBuffer, VkDeviceSize)> TNewMemFunc;
6279
6280 // returns true, if the stream memory was just allocated
6281 template<typename TStreamMemName, typename TInstanceTypeName, size_t InstanceTypeCount, size_t BufferCreateCount, bool UsesCurrentCountOffset>
6282 [[nodiscard]] bool CreateStreamBuffer(TStreamMemName *&pBufferMem, TNewMemFunc &&NewMemFunc, SStreamMemory<TStreamMemName> &StreamUniformBuffer, VkBufferUsageFlagBits Usage, VkBuffer &NewBuffer, SDeviceMemoryBlock &NewBufferMem, size_t &BufferOffset, const void *pData, size_t DataSize)
6283 {
6284 VkBuffer Buffer = VK_NULL_HANDLE;
6285 SDeviceMemoryBlock BufferMem;
6286 size_t Offset = 0;
6287
6288 uint8_t *pMem = nullptr;
6289
6290 size_t BufferCountOffset = 0;
6291 if(UsesCurrentCountOffset)
6292 BufferCountOffset = StreamUniformBuffer.GetUsedCount(m_CurImageIndex);
6293 for(; BufferCountOffset < StreamUniformBuffer.GetBuffers(m_CurImageIndex).size(); ++BufferCountOffset)
6294 {
6295 auto &BufferOfFrame = StreamUniformBuffer.GetBuffers(m_CurImageIndex)[BufferCountOffset];
6296 if(BufferOfFrame.m_Size >= DataSize + BufferOfFrame.m_UsedSize)
6297 {
6298 if(BufferOfFrame.m_UsedSize == 0)
6299 StreamUniformBuffer.IncreaseUsedCount(m_CurImageIndex);
6300 Buffer = BufferOfFrame.m_Buffer;
6301 BufferMem = BufferOfFrame.m_BufferMem;
6302 Offset = BufferOfFrame.m_UsedSize;
6303 BufferOfFrame.m_UsedSize += DataSize;
6304 pMem = BufferOfFrame.m_pMappedBufferData;
6305 pBufferMem = &BufferOfFrame;
6306 break;
6307 }
6308 }
6309
6310 if(BufferMem.m_Mem == VK_NULL_HANDLE)
6311 {
6312 // create memory
6313 VkBuffer StreamBuffer;
6314 SDeviceMemoryBlock StreamBufferMemory;
6315 const VkDeviceSize NewBufferSingleSize = sizeof(TInstanceTypeName) * InstanceTypeCount;
6316 const VkDeviceSize NewBufferSize = NewBufferSingleSize * BufferCreateCount;
6317 if(!CreateBuffer(BufferSize: NewBufferSize, MemUsage: MEMORY_BLOCK_USAGE_STREAM, BufferUsage: Usage, MemoryProperties: VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, VKBuffer&: StreamBuffer, VKBufferMemory&: StreamBufferMemory))
6318 return false;
6319
6320 void *pMappedData = nullptr;
6321 if(vkMapMemory(device: m_VKDevice, memory: StreamBufferMemory.m_Mem, offset: 0, VK_WHOLE_SIZE, flags: 0, ppData: &pMappedData) != VK_SUCCESS)
6322 return false;
6323
6324 size_t NewBufferIndex = StreamUniformBuffer.GetBuffers(m_CurImageIndex).size();
6325 for(size_t i = 0; i < BufferCreateCount; ++i)
6326 {
6327 StreamUniformBuffer.GetBuffers(m_CurImageIndex).push_back(TStreamMemName(StreamBuffer, StreamBufferMemory, NewBufferSingleSize * i, NewBufferSingleSize, 0, ((uint8_t *)pMappedData) + (NewBufferSingleSize * i)));
6328 StreamUniformBuffer.GetRanges(m_CurImageIndex).push_back({});
6329 if(!NewMemFunc(StreamUniformBuffer.GetBuffers(m_CurImageIndex).back(), StreamBuffer, NewBufferSingleSize * i))
6330 return false;
6331 }
6332 auto &NewStreamBuffer = StreamUniformBuffer.GetBuffers(m_CurImageIndex)[NewBufferIndex];
6333
6334 Buffer = StreamBuffer;
6335 BufferMem = StreamBufferMemory;
6336
6337 pBufferMem = &NewStreamBuffer;
6338 pMem = NewStreamBuffer.m_pMappedBufferData;
6339 Offset = NewStreamBuffer.m_OffsetInBuffer;
6340 NewStreamBuffer.m_UsedSize += DataSize;
6341
6342 StreamUniformBuffer.IncreaseUsedCount(m_CurImageIndex);
6343 }
6344
6345 // Offset here is the offset in the buffer
6346 if(BufferMem.m_Size - Offset < DataSize)
6347 {
6348 SetError(ErrType: EGfxErrorType::GFX_ERROR_TYPE_OUT_OF_MEMORY_BUFFER, pErr: "Stream buffers are limited to CCommandBuffer::MAX_VERTICES. Exceeding it is a bug in the high level code.");
6349 return false;
6350 }
6351
6352 {
6353 mem_copy(dest: pMem + Offset, source: pData, size: DataSize);
6354 }
6355
6356 NewBuffer = Buffer;
6357 NewBufferMem = BufferMem;
6358 BufferOffset = Offset;
6359
6360 return true;
6361 }
6362
6363 [[nodiscard]] bool CreateStreamVertexBuffer(size_t RenderThreadIndex, VkBuffer &NewBuffer, SDeviceMemoryBlock &NewBufferMem, size_t &BufferOffset, const void *pData, size_t DataSize)
6364 {
6365 SFrameBuffers *pStreamBuffer;
6366 return CreateStreamBuffer<SFrameBuffers, GL_SVertexTex3DStream, CCommandBuffer::MAX_VERTICES * 2, 1, false>(
6367 pBufferMem&: pStreamBuffer, NewMemFunc: [](SFrameBuffers &, VkBuffer, VkDeviceSize) { return true; }, StreamUniformBuffer&: m_vStreamedVertexBuffers[RenderThreadIndex], Usage: VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, NewBuffer, NewBufferMem, BufferOffset, pData, DataSize);
6368 }
6369
6370 template<typename TName, size_t InstanceMaxParticleCount, size_t MaxInstances>
6371 [[nodiscard]] bool GetUniformBufferObjectImpl(size_t RenderThreadIndex, bool RequiresSharedStagesDescriptor, SStreamMemory<SFrameUniformBuffers> &StreamUniformBuffer, SDeviceDescriptorSet &DescrSet, const void *pData, size_t DataSize)
6372 {
6373 VkBuffer NewBuffer;
6374 SDeviceMemoryBlock NewBufferMem;
6375 size_t BufferOffset;
6376 SFrameUniformBuffers *pMem;
6377 if(!CreateStreamBuffer<SFrameUniformBuffers, TName, InstanceMaxParticleCount, MaxInstances, true>(
6378 pMem,
6379 [this, RenderThreadIndex](SFrameBuffers &Mem, VkBuffer Buffer, VkDeviceSize MemOffset) {
6380 if(!CreateUniformDescriptorSets(RenderThreadIndex, SetLayout&: m_SpriteMultiUniformDescriptorSetLayout, pSets: ((SFrameUniformBuffers *)(&Mem))->m_aUniformSets.data(), SetCount: 1, BindBuffer: Buffer, SingleBufferInstanceSize: InstanceMaxParticleCount * sizeof(TName), MemoryOffset: MemOffset))
6381 return false;
6382 if(!CreateUniformDescriptorSets(RenderThreadIndex, SetLayout&: m_QuadUniformDescriptorSetLayout, pSets: &((SFrameUniformBuffers *)(&Mem))->m_aUniformSets[1], SetCount: 1, BindBuffer: Buffer, SingleBufferInstanceSize: InstanceMaxParticleCount * sizeof(TName), MemoryOffset: MemOffset))
6383 return false;
6384 return true;
6385 },
6386 StreamUniformBuffer, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, NewBuffer, NewBufferMem, BufferOffset, pData, DataSize))
6387 return false;
6388
6389 DescrSet = pMem->m_aUniformSets[RequiresSharedStagesDescriptor ? 1 : 0];
6390 return true;
6391 }
6392
6393 [[nodiscard]] bool GetUniformBufferObject(size_t RenderThreadIndex, bool RequiresSharedStagesDescriptor, SDeviceDescriptorSet &DescrSet, size_t ParticleCount, const void *pData, size_t DataSize)
6394 {
6395 return GetUniformBufferObjectImpl<IGraphics::SRenderSpriteInfo, 512, 128>(RenderThreadIndex, RequiresSharedStagesDescriptor, StreamUniformBuffer&: m_vStreamedUniformBuffers[RenderThreadIndex], DescrSet, pData, DataSize);
6396 }
6397
6398 [[nodiscard]] bool CreateIndexBuffer(void *pData, size_t DataSize, VkBuffer &Buffer, SDeviceMemoryBlock &Memory)
6399 {
6400 VkDeviceSize BufferDataSize = DataSize;
6401
6402 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
6403 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pData, RequiredSize: DataSize))
6404 return false;
6405
6406 SDeviceMemoryBlock VertexBufferMemory;
6407 VkBuffer VertexBuffer;
6408 if(!CreateBuffer(BufferSize: BufferDataSize, MemUsage: MEMORY_BLOCK_USAGE_BUFFER, BufferUsage: VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT, MemoryProperties: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VKBuffer&: VertexBuffer, VKBufferMemory&: VertexBufferMemory))
6409 return false;
6410
6411 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: 0, Size: BufferDataSize, BufferAccessType: VK_ACCESS_INDEX_READ_BIT, BeforeCommand: true))
6412 return false;
6413 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: 0, CopySize: BufferDataSize))
6414 return false;
6415 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: 0, Size: BufferDataSize, BufferAccessType: VK_ACCESS_INDEX_READ_BIT, BeforeCommand: false))
6416 return false;
6417
6418 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
6419
6420 Buffer = VertexBuffer;
6421 Memory = VertexBufferMemory;
6422 return true;
6423 }
6424
6425 void DestroyIndexBuffer(VkBuffer &Buffer, SDeviceMemoryBlock &Memory)
6426 {
6427 CleanBufferPair(ImageIndex: 0, Buffer, BufferMem&: Memory);
6428 }
6429
6430 /************************
6431 * COMMAND IMPLEMENTATION
6432 ************************/
6433 template<typename TName>
6434 [[nodiscard]] static bool IsInCommandRange(TName CMD, TName Min, TName Max)
6435 {
6436 return CMD >= Min && CMD < Max;
6437 }
6438
6439 [[nodiscard]] ERunCommandReturnTypes RunCommand(const CCommandBuffer::SCommand *pBaseCommand) override
6440 {
6441 if(m_HasError)
6442 {
6443 // ignore all further commands
6444 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_ERROR;
6445 }
6446
6447 if(IsInCommandRange<decltype(pBaseCommand->m_Cmd)>(CMD: pBaseCommand->m_Cmd, Min: CCommandBuffer::CMD_FIRST, Max: CCommandBuffer::CMD_COUNT))
6448 {
6449 auto &CallbackObj = m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: CCommandBuffer::ECommandBufferCMD(pBaseCommand->m_Cmd))];
6450 SRenderCommandExecuteBuffer Buffer;
6451 Buffer.m_Command = (CCommandBuffer::ECommandBufferCMD)pBaseCommand->m_Cmd;
6452 Buffer.m_pRawCommand = pBaseCommand;
6453 Buffer.m_ThreadIndex = 0;
6454
6455 if(m_CurCommandInPipe + 1 == m_CommandsInPipe)
6456 {
6457 m_LastCommandsInPipeThreadIndex = std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6458 }
6459
6460 bool CanStartThread = false;
6461 if(CallbackObj.m_IsRenderCommand)
6462 {
6463 bool ForceSingleThread = m_LastCommandsInPipeThreadIndex == std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6464
6465 size_t PotentiallyNextThread = (((m_CurCommandInPipe * (m_ThreadCount - 1)) / m_CommandsInPipe) + 1);
6466 if(PotentiallyNextThread - 1 > m_LastCommandsInPipeThreadIndex)
6467 {
6468 CanStartThread = true;
6469 m_LastCommandsInPipeThreadIndex = PotentiallyNextThread - 1;
6470 }
6471 Buffer.m_ThreadIndex = m_ThreadCount > 1 && !ForceSingleThread ? (m_LastCommandsInPipeThreadIndex + 1) : 0;
6472 CallbackObj.m_FillExecuteBuffer(Buffer, pBaseCommand);
6473 m_CurRenderCallCountInPipe += Buffer.m_EstimatedRenderCallCount;
6474 }
6475 bool Ret = true;
6476 if(!CallbackObj.m_IsRenderCommand || (Buffer.m_ThreadIndex == 0 && !m_RenderingPaused))
6477 {
6478 Ret = CallbackObj.m_CMDIsHandled;
6479 if(!CallbackObj.m_CommandCB(pBaseCommand, Buffer))
6480 {
6481 // an error occurred, stop this command and ignore all further commands
6482 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_ERROR;
6483 }
6484 }
6485 else if(!m_RenderingPaused)
6486 {
6487 if(CanStartThread)
6488 {
6489 StartRenderThread(ThreadIndex: m_LastCommandsInPipeThreadIndex - 1);
6490 }
6491 m_vvThreadCommandLists[Buffer.m_ThreadIndex - 1].push_back(x: Buffer);
6492 }
6493
6494 ++m_CurCommandInPipe;
6495 return Ret ? ERunCommandReturnTypes::RUN_COMMAND_COMMAND_HANDLED : ERunCommandReturnTypes::RUN_COMMAND_COMMAND_UNHANDLED;
6496 }
6497
6498 if(m_CurCommandInPipe + 1 == m_CommandsInPipe)
6499 {
6500 m_LastCommandsInPipeThreadIndex = std::numeric_limits<decltype(m_LastCommandsInPipeThreadIndex)>::max();
6501 }
6502 ++m_CurCommandInPipe;
6503
6504 switch(pBaseCommand->m_Cmd)
6505 {
6506 case CCommandProcessorFragment_GLBase::CMD_INIT:
6507 if(!Cmd_Init(pCommand: static_cast<const SCommand_Init *>(pBaseCommand)))
6508 {
6509 SetWarningPreMsg("Could not initialize Vulkan: ");
6510 return RUN_COMMAND_COMMAND_WARNING;
6511 }
6512 break;
6513 case CCommandProcessorFragment_GLBase::CMD_SHUTDOWN:
6514 if(!Cmd_Shutdown(pCommand: static_cast<const SCommand_Shutdown *>(pBaseCommand)))
6515 {
6516 SetWarningPreMsg("Could not shutdown Vulkan: ");
6517 return RUN_COMMAND_COMMAND_WARNING;
6518 }
6519 break;
6520
6521 case CCommandProcessorFragment_GLBase::CMD_PRE_INIT:
6522 if(!Cmd_PreInit(pCommand: static_cast<const CCommandProcessorFragment_GLBase::SCommand_PreInit *>(pBaseCommand)))
6523 {
6524 SetWarningPreMsg("Could not initialize Vulkan: ");
6525 return RUN_COMMAND_COMMAND_WARNING;
6526 }
6527 break;
6528 case CCommandProcessorFragment_GLBase::CMD_POST_SHUTDOWN:
6529 if(!Cmd_PostShutdown(pCommand: static_cast<const CCommandProcessorFragment_GLBase::SCommand_PostShutdown *>(pBaseCommand)))
6530 {
6531 SetWarningPreMsg("Could not shutdown Vulkan: ");
6532 return RUN_COMMAND_COMMAND_WARNING;
6533 }
6534 break;
6535 default:
6536 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_UNHANDLED;
6537 }
6538
6539 return ERunCommandReturnTypes::RUN_COMMAND_COMMAND_HANDLED;
6540 }
6541
6542 [[nodiscard]] bool Cmd_Init(const SCommand_Init *pCommand)
6543 {
6544 pCommand->m_pCapabilities->m_TileBuffering = true;
6545 pCommand->m_pCapabilities->m_QuadBuffering = true;
6546 pCommand->m_pCapabilities->m_TextBuffering = true;
6547 pCommand->m_pCapabilities->m_QuadContainerBuffering = true;
6548 pCommand->m_pCapabilities->m_ShaderSupport = true;
6549
6550 pCommand->m_pCapabilities->m_MipMapping = true;
6551 pCommand->m_pCapabilities->m_3DTextures = false;
6552 pCommand->m_pCapabilities->m_2DArrayTextures = true;
6553 pCommand->m_pCapabilities->m_NPOTTextures = true;
6554
6555 pCommand->m_pCapabilities->m_ContextMajor = 1;
6556 pCommand->m_pCapabilities->m_ContextMinor = 1;
6557 pCommand->m_pCapabilities->m_ContextPatch = 0;
6558
6559 pCommand->m_pCapabilities->m_TrianglesAsQuads = true;
6560
6561 m_GlobalTextureLodBIAS = g_Config.m_GfxGLTextureLODBIAS;
6562 m_pTextureMemoryUsage = pCommand->m_pTextureMemoryUsage;
6563 m_pBufferMemoryUsage = pCommand->m_pBufferMemoryUsage;
6564 m_pStreamMemoryUsage = pCommand->m_pStreamMemoryUsage;
6565 m_pStagingMemoryUsage = pCommand->m_pStagingMemoryUsage;
6566
6567 m_MultiSamplingCount = (g_Config.m_GfxFsaaSamples & 0xFFFFFFFE); // ignore the uneven bit, only even multi sampling works
6568
6569 *pCommand->m_pReadPresentedImageDataFunc = [this](uint32_t &Width, uint32_t &Height, CImageInfo::EImageFormat &Format, std::vector<uint8_t> &vDstData) {
6570 return GetPresentedImageData(Width, Height, Format, vDstData);
6571 };
6572
6573 m_pWindow = pCommand->m_pWindow;
6574
6575 *pCommand->m_pInitError = m_VKInstance != VK_NULL_HANDLE ? 0 : -1;
6576
6577 if(m_VKInstance == VK_NULL_HANDLE)
6578 {
6579 *pCommand->m_pInitError = -2;
6580 return false;
6581 }
6582
6583 m_pStorage = pCommand->m_pStorage;
6584 if(InitVulkan<true>() != 0)
6585 {
6586 *pCommand->m_pInitError = -2;
6587 return false;
6588 }
6589
6590 std::array<uint32_t, (size_t)CCommandBuffer::MAX_VERTICES / 4 * 6> aIndices;
6591 int Primq = 0;
6592 for(int i = 0; i < CCommandBuffer::MAX_VERTICES / 4 * 6; i += 6)
6593 {
6594 aIndices[i] = Primq;
6595 aIndices[i + 1] = Primq + 1;
6596 aIndices[i + 2] = Primq + 2;
6597 aIndices[i + 3] = Primq;
6598 aIndices[i + 4] = Primq + 2;
6599 aIndices[i + 5] = Primq + 3;
6600 Primq += 4;
6601 }
6602
6603 if(!PrepareFrame())
6604 return false;
6605 if(m_HasError)
6606 {
6607 *pCommand->m_pInitError = -2;
6608 return false;
6609 }
6610
6611 if(!CreateIndexBuffer(pData: aIndices.data(), DataSize: sizeof(uint32_t) * aIndices.size(), Buffer&: m_IndexBuffer, Memory&: m_IndexBufferMemory))
6612 {
6613 *pCommand->m_pInitError = -2;
6614 return false;
6615 }
6616 if(!CreateIndexBuffer(pData: aIndices.data(), DataSize: sizeof(uint32_t) * aIndices.size(), Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory))
6617 {
6618 *pCommand->m_pInitError = -2;
6619 return false;
6620 }
6621 m_CurRenderIndexPrimitiveCount = CCommandBuffer::MAX_VERTICES / 4;
6622
6623 m_CanAssert = true;
6624
6625 return true;
6626 }
6627
6628 [[nodiscard]] bool Cmd_Shutdown(const SCommand_Shutdown *pCommand)
6629 {
6630 vkDeviceWaitIdle(device: m_VKDevice);
6631
6632 DestroyIndexBuffer(Buffer&: m_IndexBuffer, Memory&: m_IndexBufferMemory);
6633 DestroyIndexBuffer(Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory);
6634
6635 CleanupVulkan<true>(SwapchainCount: m_SwapChainImageCount);
6636
6637 return true;
6638 }
6639
6640 [[nodiscard]] bool Cmd_Texture_Destroy(const CCommandBuffer::SCommand_Texture_Destroy *pCommand)
6641 {
6642 size_t ImageIndex = (size_t)pCommand->m_Slot;
6643 auto &Texture = m_vTextures[ImageIndex];
6644
6645 m_vvFrameDelayedTextureCleanup[m_CurImageIndex].push_back(x: Texture);
6646
6647 Texture = CTexture{};
6648
6649 return true;
6650 }
6651
6652 [[nodiscard]] bool Cmd_Texture_Create(const CCommandBuffer::SCommand_Texture_Create *pCommand)
6653 {
6654 int Slot = pCommand->m_Slot;
6655 int Width = pCommand->m_Width;
6656 int Height = pCommand->m_Height;
6657 int Flags = pCommand->m_Flags;
6658 uint8_t *pData = pCommand->m_pData;
6659
6660 if(!CreateTextureCMD(Slot, Width, Height, Format: VK_FORMAT_R8G8B8A8_UNORM, StoreFormat: VK_FORMAT_R8G8B8A8_UNORM, Flags, pData))
6661 return false;
6662
6663 free(ptr: pData);
6664
6665 return true;
6666 }
6667
6668 [[nodiscard]] bool Cmd_TextTextures_Create(const CCommandBuffer::SCommand_TextTextures_Create *pCommand)
6669 {
6670 int Slot = pCommand->m_Slot;
6671 int SlotOutline = pCommand->m_SlotOutline;
6672 int Width = pCommand->m_Width;
6673 int Height = pCommand->m_Height;
6674
6675 uint8_t *pTmpData = pCommand->m_pTextData;
6676 uint8_t *pTmpData2 = pCommand->m_pTextOutlineData;
6677
6678 if(!CreateTextureCMD(Slot, Width, Height, Format: VK_FORMAT_R8_UNORM, StoreFormat: VK_FORMAT_R8_UNORM, Flags: TextureFlag::NO_MIPMAPS, pData&: pTmpData))
6679 return false;
6680 if(!CreateTextureCMD(Slot: SlotOutline, Width, Height, Format: VK_FORMAT_R8_UNORM, StoreFormat: VK_FORMAT_R8_UNORM, Flags: TextureFlag::NO_MIPMAPS, pData&: pTmpData2))
6681 return false;
6682
6683 if(!CreateNewTextDescriptorSets(Texture: Slot, TextureOutline: SlotOutline))
6684 return false;
6685
6686 free(ptr: pTmpData);
6687 free(ptr: pTmpData2);
6688
6689 return true;
6690 }
6691
6692 [[nodiscard]] bool Cmd_TextTextures_Destroy(const CCommandBuffer::SCommand_TextTextures_Destroy *pCommand)
6693 {
6694 size_t ImageIndex = (size_t)pCommand->m_Slot;
6695 size_t ImageIndexOutline = (size_t)pCommand->m_SlotOutline;
6696 auto &Texture = m_vTextures[ImageIndex];
6697 auto &TextureOutline = m_vTextures[ImageIndexOutline];
6698
6699 m_vvFrameDelayedTextTexturesCleanup[m_CurImageIndex].emplace_back(args&: Texture, args&: TextureOutline);
6700
6701 Texture = {};
6702 TextureOutline = {};
6703
6704 return true;
6705 }
6706
6707 [[nodiscard]] bool Cmd_TextTexture_Update(const CCommandBuffer::SCommand_TextTexture_Update *pCommand)
6708 {
6709 size_t IndexTex = pCommand->m_Slot;
6710 uint8_t *pData = pCommand->m_pData;
6711
6712 if(!UpdateTexture(TextureSlot: IndexTex, Format: VK_FORMAT_R8_UNORM, pData, XOff: pCommand->m_X, YOff: pCommand->m_Y, Width: pCommand->m_Width, Height: pCommand->m_Height))
6713 return false;
6714
6715 free(ptr: pData);
6716
6717 return true;
6718 }
6719
6720 void Cmd_Clear_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Clear *pCommand)
6721 {
6722 if(!pCommand->m_ForceClear)
6723 {
6724 bool ColorChanged = m_aClearColor[0] != pCommand->m_Color.r || m_aClearColor[1] != pCommand->m_Color.g ||
6725 m_aClearColor[2] != pCommand->m_Color.b || m_aClearColor[3] != pCommand->m_Color.a;
6726 m_aClearColor[0] = pCommand->m_Color.r;
6727 m_aClearColor[1] = pCommand->m_Color.g;
6728 m_aClearColor[2] = pCommand->m_Color.b;
6729 m_aClearColor[3] = pCommand->m_Color.a;
6730 if(ColorChanged)
6731 ExecBuffer.m_ClearColorInRenderThread = true;
6732 }
6733 else
6734 {
6735 ExecBuffer.m_ClearColorInRenderThread = true;
6736 }
6737 ExecBuffer.m_EstimatedRenderCallCount = 0;
6738 }
6739
6740 [[nodiscard]] bool Cmd_Clear(const SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Clear *pCommand)
6741 {
6742 if(ExecBuffer.m_ClearColorInRenderThread)
6743 {
6744 std::array<VkClearAttachment, 1> aAttachments = {VkClearAttachment{.aspectMask: VK_IMAGE_ASPECT_COLOR_BIT, .colorAttachment: 0, .clearValue: VkClearValue{.color: VkClearColorValue{.float32: {pCommand->m_Color.r, pCommand->m_Color.g, pCommand->m_Color.b, pCommand->m_Color.a}}}}};
6745 std::array<VkClearRect, 1> aClearRects = {VkClearRect{.rect: {.offset: {.x: 0, .y: 0}, .extent: m_VKSwapImgAndViewportExtent.m_SwapImageViewport}, .baseArrayLayer: 0, .layerCount: 1}};
6746
6747 VkCommandBuffer *pCommandBuffer;
6748 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
6749 return false;
6750 auto &CommandBuffer = *pCommandBuffer;
6751 vkCmdClearAttachments(commandBuffer: CommandBuffer, attachmentCount: aAttachments.size(), pAttachments: aAttachments.data(), rectCount: aClearRects.size(), pRects: aClearRects.data());
6752 }
6753
6754 return true;
6755 }
6756
6757 void Cmd_Render_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Render *pCommand)
6758 {
6759 bool IsTextured = GetIsTextured(State: pCommand->m_State);
6760 if(IsTextured)
6761 {
6762 size_t AddressModeIndex = GetAddressModeIndex(State: pCommand->m_State);
6763 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
6764 }
6765
6766 ExecBuffer.m_IndexBuffer = m_IndexBuffer;
6767
6768 ExecBuffer.m_EstimatedRenderCallCount = 1;
6769
6770 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
6771 }
6772
6773 [[nodiscard]] bool Cmd_Render(const CCommandBuffer::SCommand_Render *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
6774 {
6775 return RenderStandard<CCommandBuffer::SVertex, false>(ExecBuffer, State: pCommand->m_State, PrimType: pCommand->m_PrimType, pVertices: pCommand->m_pVertices, PrimitiveCount: pCommand->m_PrimCount);
6776 }
6777
6778 [[nodiscard]] bool Cmd_ReadPixel(const CCommandBuffer::SCommand_TrySwapAndReadPixel *pCommand)
6779 {
6780 if(!*pCommand->m_pSwapped && !NextFrame())
6781 return false;
6782 *pCommand->m_pSwapped = true;
6783
6784 uint32_t Width;
6785 uint32_t Height;
6786 CImageInfo::EImageFormat Format;
6787 if(GetPresentedImageDataImpl(Width, Height, Format, vDstData&: m_vReadPixelHelper, ResetAlpha: false, PixelOffset: pCommand->m_Position))
6788 {
6789 *pCommand->m_pColor = ColorRGBA(m_vReadPixelHelper[0] / 255.0f, m_vReadPixelHelper[1] / 255.0f, m_vReadPixelHelper[2] / 255.0f, 1.0f);
6790 }
6791 else
6792 {
6793 *pCommand->m_pColor = ColorRGBA(1.0f, 1.0f, 1.0f, 1.0f);
6794 }
6795
6796 return true;
6797 }
6798
6799 [[nodiscard]] bool Cmd_Screenshot(const CCommandBuffer::SCommand_TrySwapAndScreenshot *pCommand)
6800 {
6801 if(!*pCommand->m_pSwapped && !NextFrame())
6802 return false;
6803 *pCommand->m_pSwapped = true;
6804
6805 uint32_t Width;
6806 uint32_t Height;
6807 CImageInfo::EImageFormat Format;
6808 if(GetPresentedImageDataImpl(Width, Height, Format, vDstData&: m_vScreenshotHelper, ResetAlpha: true, PixelOffset: {}))
6809 {
6810 const size_t ImgSize = (size_t)Width * (size_t)Height * CImageInfo::PixelSize(Format);
6811 pCommand->m_pImage->m_pData = static_cast<uint8_t *>(malloc(size: ImgSize));
6812 mem_copy(dest: pCommand->m_pImage->m_pData, source: m_vScreenshotHelper.data(), size: ImgSize);
6813 }
6814 else
6815 {
6816 pCommand->m_pImage->m_pData = nullptr;
6817 }
6818 pCommand->m_pImage->m_Width = (int)Width;
6819 pCommand->m_pImage->m_Height = (int)Height;
6820 pCommand->m_pImage->m_Format = Format;
6821
6822 return true;
6823 }
6824
6825 void Cmd_RenderTex3D_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderTex3D *pCommand)
6826 {
6827 bool IsTextured = GetIsTextured(State: pCommand->m_State);
6828 if(IsTextured)
6829 {
6830 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_VKStandard3DTexturedDescrSet;
6831 }
6832
6833 ExecBuffer.m_IndexBuffer = m_IndexBuffer;
6834
6835 ExecBuffer.m_EstimatedRenderCallCount = 1;
6836
6837 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
6838 }
6839
6840 [[nodiscard]] bool Cmd_RenderTex3D(const CCommandBuffer::SCommand_RenderTex3D *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
6841 {
6842 return RenderStandard<CCommandBuffer::SVertexTex3DStream, true>(ExecBuffer, State: pCommand->m_State, PrimType: pCommand->m_PrimType, pVertices: pCommand->m_pVertices, PrimitiveCount: pCommand->m_PrimCount);
6843 }
6844
6845 void Cmd_Update_Viewport_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_Update_Viewport *pCommand)
6846 {
6847 ExecBuffer.m_EstimatedRenderCallCount = 0;
6848 }
6849
6850 [[nodiscard]] bool Cmd_Update_Viewport(const CCommandBuffer::SCommand_Update_Viewport *pCommand)
6851 {
6852 if(pCommand->m_ByResize)
6853 {
6854 if(IsVerbose())
6855 {
6856 log_debug("gfx/vulkan", "Got resize event.");
6857 }
6858 m_CanvasWidth = (uint32_t)pCommand->m_Width;
6859 m_CanvasHeight = (uint32_t)pCommand->m_Height;
6860#ifndef CONF_PLATFORM_MACOS
6861 m_RecreateSwapChain = true;
6862#endif
6863 }
6864 else
6865 {
6866 auto Viewport = m_VKSwapImgAndViewportExtent.GetPresentedImageViewport();
6867 if(pCommand->m_X != 0 || pCommand->m_Y != 0 || (uint32_t)pCommand->m_Width != Viewport.width || (uint32_t)pCommand->m_Height != Viewport.height)
6868 {
6869 m_HasDynamicViewport = true;
6870
6871 // convert viewport from OGL to vulkan
6872 int32_t ViewportY = (int32_t)Viewport.height - ((int32_t)pCommand->m_Y + (int32_t)pCommand->m_Height);
6873 uint32_t ViewportH = (int32_t)pCommand->m_Height;
6874 m_DynamicViewportOffset = {.x: (int32_t)pCommand->m_X, .y: ViewportY};
6875 m_DynamicViewportSize = {.width: (uint32_t)pCommand->m_Width, .height: ViewportH};
6876 }
6877 else
6878 {
6879 m_HasDynamicViewport = false;
6880 }
6881 }
6882
6883 return true;
6884 }
6885
6886 [[nodiscard]] bool Cmd_VSync(const CCommandBuffer::SCommand_VSync *pCommand)
6887 {
6888 if(IsVerbose())
6889 {
6890 log_info("gfx/vulkan", "Queueing swap chain recreation because V-Sync was changed.");
6891 }
6892 m_RecreateSwapChain = true;
6893 *pCommand->m_pRetOk = true;
6894
6895 return true;
6896 }
6897
6898 [[nodiscard]] bool Cmd_MultiSampling(const CCommandBuffer::SCommand_MultiSampling *pCommand)
6899 {
6900 if(IsVerbose())
6901 {
6902 log_info("gfx/vulkan", "Queueing swap chain recreation because multi sampling was changed.");
6903 }
6904 m_RecreateSwapChain = true;
6905
6906 uint32_t MSCount = (std::min(a: pCommand->m_RequestedMultiSamplingCount, b: (uint32_t)GetMaxSampleCount()) & 0xFFFFFFFE); // ignore the uneven bits
6907 m_NextMultiSamplingCount = MSCount;
6908
6909 *pCommand->m_pRetMultiSamplingCount = MSCount;
6910 *pCommand->m_pRetOk = true;
6911
6912 return true;
6913 }
6914
6915 [[nodiscard]] bool Cmd_Swap(const CCommandBuffer::SCommand_Swap *pCommand)
6916 {
6917 return NextFrame();
6918 }
6919
6920 [[nodiscard]] bool Cmd_CreateBufferObject(const CCommandBuffer::SCommand_CreateBufferObject *pCommand)
6921 {
6922 bool IsOneFrameBuffer = (pCommand->m_Flags & IGraphics::EBufferObjectCreateFlags::BUFFER_OBJECT_CREATE_FLAGS_ONE_TIME_USE_BIT) != 0;
6923 if(!CreateBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex, pUploadData: pCommand->m_pUploadData, BufferDataSize: (VkDeviceSize)pCommand->m_DataSize, IsOneFrameBuffer))
6924 return false;
6925 if(pCommand->m_DeletePointer)
6926 free(ptr: pCommand->m_pUploadData);
6927
6928 return true;
6929 }
6930
6931 [[nodiscard]] bool Cmd_UpdateBufferObject(const CCommandBuffer::SCommand_UpdateBufferObject *pCommand)
6932 {
6933 size_t BufferIndex = (size_t)pCommand->m_BufferIndex;
6934 bool DeletePointer = pCommand->m_DeletePointer;
6935 VkDeviceSize Offset = (VkDeviceSize)((intptr_t)pCommand->m_pOffset);
6936 void *pUploadData = pCommand->m_pUploadData;
6937 VkDeviceSize DataSize = (VkDeviceSize)pCommand->m_DataSize;
6938
6939 SMemoryBlock<STAGING_BUFFER_CACHE_ID> StagingBuffer;
6940 if(!GetStagingBuffer(ResBlock&: StagingBuffer, pBufferData: pUploadData, RequiredSize: DataSize))
6941 return false;
6942
6943 const auto &MemBlock = m_vBufferObjects[BufferIndex].m_BufferObject.m_Mem;
6944 VkBuffer VertexBuffer = MemBlock.m_Buffer;
6945 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6946 return false;
6947 if(!CopyBuffer(SrcBuffer: StagingBuffer.m_Buffer, DstBuffer: VertexBuffer, SrcOffset: StagingBuffer.m_HeapData.m_OffsetToAlign, DstOffset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, CopySize: DataSize))
6948 return false;
6949 if(!MemoryBarrier(Buffer: VertexBuffer, Offset: Offset + MemBlock.m_HeapData.m_OffsetToAlign, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6950 return false;
6951
6952 UploadAndFreeStagingMemBlock(Block&: StagingBuffer);
6953
6954 if(DeletePointer)
6955 free(ptr: pUploadData);
6956
6957 return true;
6958 }
6959
6960 [[nodiscard]] bool Cmd_RecreateBufferObject(const CCommandBuffer::SCommand_RecreateBufferObject *pCommand)
6961 {
6962 DeleteBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex);
6963 bool IsOneFrameBuffer = (pCommand->m_Flags & IGraphics::EBufferObjectCreateFlags::BUFFER_OBJECT_CREATE_FLAGS_ONE_TIME_USE_BIT) != 0;
6964 return CreateBufferObject(BufferIndex: (size_t)pCommand->m_BufferIndex, pUploadData: pCommand->m_pUploadData, BufferDataSize: (VkDeviceSize)pCommand->m_DataSize, IsOneFrameBuffer);
6965 }
6966
6967 [[nodiscard]] bool Cmd_CopyBufferObject(const CCommandBuffer::SCommand_CopyBufferObject *pCommand)
6968 {
6969 size_t ReadBufferIndex = (size_t)pCommand->m_ReadBufferIndex;
6970 size_t WriteBufferIndex = (size_t)pCommand->m_WriteBufferIndex;
6971 auto &ReadMemBlock = m_vBufferObjects[ReadBufferIndex].m_BufferObject.m_Mem;
6972 auto &WriteMemBlock = m_vBufferObjects[WriteBufferIndex].m_BufferObject.m_Mem;
6973 VkBuffer ReadBuffer = ReadMemBlock.m_Buffer;
6974 VkBuffer WriteBuffer = WriteMemBlock.m_Buffer;
6975
6976 VkDeviceSize DataSize = (VkDeviceSize)pCommand->m_CopySize;
6977 VkDeviceSize ReadOffset = (VkDeviceSize)pCommand->m_ReadOffset + ReadMemBlock.m_HeapData.m_OffsetToAlign;
6978 VkDeviceSize WriteOffset = (VkDeviceSize)pCommand->m_WriteOffset + WriteMemBlock.m_HeapData.m_OffsetToAlign;
6979
6980 if(!MemoryBarrier(Buffer: ReadBuffer, Offset: ReadOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6981 return false;
6982 if(!MemoryBarrier(Buffer: WriteBuffer, Offset: WriteOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: true))
6983 return false;
6984 if(!CopyBuffer(SrcBuffer: ReadBuffer, DstBuffer: WriteBuffer, SrcOffset: ReadOffset, DstOffset: WriteOffset, CopySize: DataSize))
6985 return false;
6986 if(!MemoryBarrier(Buffer: WriteBuffer, Offset: WriteOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6987 return false;
6988 if(!MemoryBarrier(Buffer: ReadBuffer, Offset: ReadOffset, Size: DataSize, BufferAccessType: VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, BeforeCommand: false))
6989 return false;
6990
6991 return true;
6992 }
6993
6994 [[nodiscard]] bool Cmd_DeleteBufferObject(const CCommandBuffer::SCommand_DeleteBufferObject *pCommand)
6995 {
6996 size_t BufferIndex = (size_t)pCommand->m_BufferIndex;
6997 DeleteBufferObject(BufferIndex);
6998
6999 return true;
7000 }
7001
7002 [[nodiscard]] bool Cmd_CreateBufferContainer(const CCommandBuffer::SCommand_CreateBufferContainer *pCommand)
7003 {
7004 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7005 while(ContainerIndex >= m_vBufferContainers.size())
7006 m_vBufferContainers.resize(new_size: (m_vBufferContainers.size() * 2) + 1);
7007
7008 m_vBufferContainers[ContainerIndex].m_BufferObjectIndex = pCommand->m_VertBufferBindingIndex;
7009
7010 return true;
7011 }
7012
7013 [[nodiscard]] bool Cmd_UpdateBufferContainer(const CCommandBuffer::SCommand_UpdateBufferContainer *pCommand)
7014 {
7015 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7016 m_vBufferContainers[ContainerIndex].m_BufferObjectIndex = pCommand->m_VertBufferBindingIndex;
7017
7018 return true;
7019 }
7020
7021 [[nodiscard]] bool Cmd_DeleteBufferContainer(const CCommandBuffer::SCommand_DeleteBufferContainer *pCommand)
7022 {
7023 size_t ContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7024 bool DeleteAllBO = pCommand->m_DestroyAllBO;
7025 if(DeleteAllBO)
7026 {
7027 size_t BufferIndex = (size_t)m_vBufferContainers[ContainerIndex].m_BufferObjectIndex;
7028 DeleteBufferObject(BufferIndex);
7029 }
7030
7031 return true;
7032 }
7033
7034 [[nodiscard]] bool Cmd_IndicesRequiredNumNotify(const CCommandBuffer::SCommand_IndicesRequiredNumNotify *pCommand)
7035 {
7036 size_t IndicesCount = pCommand->m_RequiredIndicesNum;
7037 if(m_CurRenderIndexPrimitiveCount < IndicesCount / 6)
7038 {
7039 m_vvFrameDelayedBufferCleanup[m_CurImageIndex].push_back(x: {.m_Buffer: m_RenderIndexBuffer, .m_Mem: m_RenderIndexBufferMemory});
7040 std::vector<uint32_t> vIndices(IndicesCount);
7041 uint32_t Primq = 0;
7042 for(size_t i = 0; i < IndicesCount; i += 6)
7043 {
7044 vIndices[i] = Primq;
7045 vIndices[i + 1] = Primq + 1;
7046 vIndices[i + 2] = Primq + 2;
7047 vIndices[i + 3] = Primq;
7048 vIndices[i + 4] = Primq + 2;
7049 vIndices[i + 5] = Primq + 3;
7050 Primq += 4;
7051 }
7052 if(!CreateIndexBuffer(pData: vIndices.data(), DataSize: vIndices.size() * sizeof(uint32_t), Buffer&: m_RenderIndexBuffer, Memory&: m_RenderIndexBufferMemory))
7053 return false;
7054 m_CurRenderIndexPrimitiveCount = IndicesCount / 6;
7055 }
7056
7057 return true;
7058 }
7059
7060 void Cmd_RenderTileLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderTileLayer *pCommand)
7061 {
7062 RenderTileLayer_FillExecuteBuffer(ExecBuffer, DrawCalls: pCommand->m_IndicesDrawNum, State: pCommand->m_State, BufferContainerIndex: pCommand->m_BufferContainerIndex);
7063 }
7064
7065 [[nodiscard]] bool Cmd_RenderTileLayer(const CCommandBuffer::SCommand_RenderTileLayer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7066 {
7067 vec2 Scale{};
7068 vec2 Off{};
7069 return RenderTileLayer(ExecBuffer, State: pCommand->m_State, IsBorder: false, Color: pCommand->m_Color, Scale, Off, IndicesDrawNum: (size_t)pCommand->m_IndicesDrawNum, pIndicesOffsets: pCommand->m_pIndicesOffsets, pDrawCount: pCommand->m_pDrawCount);
7070 }
7071
7072 void Cmd_RenderBorderTile_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderBorderTile *pCommand)
7073 {
7074 RenderTileLayer_FillExecuteBuffer(ExecBuffer, DrawCalls: 1, State: pCommand->m_State, BufferContainerIndex: pCommand->m_BufferContainerIndex);
7075 }
7076
7077 [[nodiscard]] bool Cmd_RenderBorderTile(const CCommandBuffer::SCommand_RenderBorderTile *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7078 {
7079 vec2 Scale = pCommand->m_Scale;
7080 vec2 Off = pCommand->m_Offset;
7081 unsigned int DrawNum = pCommand->m_DrawNum * 6;
7082 return RenderTileLayer(ExecBuffer, State: pCommand->m_State, IsBorder: true, Color: pCommand->m_Color, Scale, Off, IndicesDrawNum: 1, pIndicesOffsets: &pCommand->m_pIndicesOffset, pDrawCount: &DrawNum);
7083 }
7084
7085 void Cmd_RenderQuadLayer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadLayer *pCommand)
7086 {
7087 size_t BufferContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7088 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7089 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7090
7091 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7092 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7093
7094 bool IsTextured = GetIsTextured(State: pCommand->m_State);
7095 if(IsTextured)
7096 {
7097 size_t AddressModeIndex = GetAddressModeIndex(State: pCommand->m_State);
7098 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
7099 }
7100
7101 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7102
7103 ExecBuffer.m_EstimatedRenderCallCount = ((pCommand->m_QuadNum - 1) / GRAPHICS_MAX_QUADS_RENDER_COUNT) + 1;
7104
7105 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
7106 }
7107
7108 [[nodiscard]] bool Cmd_RenderQuadLayer(const CCommandBuffer::SCommand_RenderQuadLayer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer, bool Grouped)
7109 {
7110 std::array<float, (size_t)4 * 2> m;
7111 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7112
7113 bool CanBeGrouped = Grouped || pCommand->m_QuadNum == 1;
7114
7115 bool IsTextured;
7116 size_t BlendModeIndex;
7117 size_t DynamicIndex;
7118 size_t AddressModeIndex;
7119 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7120 auto &PipeLayout = GetPipeLayout(Container&: CanBeGrouped ? m_QuadGroupedPipeline : m_QuadPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7121 auto &PipeLine = GetPipeline(Container&: CanBeGrouped ? m_QuadGroupedPipeline : m_QuadPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7122
7123 VkCommandBuffer *pCommandBuffer;
7124 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7125 return false;
7126 auto &CommandBuffer = *pCommandBuffer;
7127
7128 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7129
7130 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7131 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7132 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7133
7134 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
7135
7136 if(IsTextured)
7137 {
7138 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7139 }
7140
7141 uint32_t DrawCount = (uint32_t)pCommand->m_QuadNum;
7142
7143 if(CanBeGrouped)
7144 {
7145 SUniformQuadGroupedGPos PushConstantVertex;
7146 mem_copy(dest: &PushConstantVertex.m_BOPush, source: &pCommand->m_pQuadInfo[0], size: sizeof(PushConstantVertex.m_BOPush));
7147
7148 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7149 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, offset: 0, size: sizeof(SUniformQuadGroupedGPos), pValues: &PushConstantVertex);
7150
7151 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)(pCommand->m_QuadOffset) * 6);
7152 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(DrawCount * 6), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
7153 }
7154 else
7155 {
7156 SUniformQuadGPos PushConstantVertex;
7157 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7158 PushConstantVertex.m_QuadOffset = pCommand->m_QuadOffset;
7159
7160 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(PushConstantVertex), pValues: &PushConstantVertex);
7161
7162 size_t RenderOffset = 0;
7163 while(DrawCount > 0)
7164 {
7165 uint32_t RealDrawCount = (DrawCount > GRAPHICS_MAX_QUADS_RENDER_COUNT ? GRAPHICS_MAX_QUADS_RENDER_COUNT : DrawCount);
7166 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)(pCommand->m_QuadOffset + RenderOffset) * 6);
7167
7168 // create uniform buffer
7169 SDeviceDescriptorSet UniDescrSet;
7170 if(!GetUniformBufferObject(RenderThreadIndex: ExecBuffer.m_ThreadIndex, RequiresSharedStagesDescriptor: true, DescrSet&: UniDescrSet, ParticleCount: RealDrawCount, pData: (const float *)(pCommand->m_pQuadInfo + RenderOffset), DataSize: RealDrawCount * sizeof(SQuadRenderInfo)))
7171 return false;
7172
7173 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: IsTextured ? 1 : 0, descriptorSetCount: 1, pDescriptorSets: &UniDescrSet.m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7174 if(RenderOffset > 0)
7175 {
7176 int32_t QuadOffset = pCommand->m_QuadOffset + RenderOffset;
7177 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: sizeof(SUniformQuadGPos) - sizeof(int32_t), size: sizeof(int32_t), pValues: &QuadOffset);
7178 }
7179
7180 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(RealDrawCount * 6), instanceCount: 1, firstIndex: IndexOffset, vertexOffset: 0, firstInstance: 0);
7181 RenderOffset += RealDrawCount;
7182 DrawCount -= RealDrawCount;
7183 }
7184 }
7185
7186 return true;
7187 }
7188
7189 void Cmd_RenderText_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderText *pCommand)
7190 {
7191 size_t BufferContainerIndex = (size_t)pCommand->m_BufferContainerIndex;
7192 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7193 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7194
7195 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7196 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7197
7198 ExecBuffer.m_aDescriptors[0] = m_vTextures[pCommand->m_TextTextureIndex].m_VKTextDescrSet;
7199
7200 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7201
7202 ExecBuffer.m_EstimatedRenderCallCount = 1;
7203
7204 ExecBufferFillDynamicStates(State: pCommand->m_State, ExecBuffer);
7205 }
7206
7207 [[nodiscard]] bool Cmd_RenderText(const CCommandBuffer::SCommand_RenderText *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7208 {
7209 std::array<float, (size_t)4 * 2> m;
7210 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7211
7212 bool IsTextured;
7213 size_t BlendModeIndex;
7214 size_t DynamicIndex;
7215 size_t AddressModeIndex;
7216 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7217 IsTextured = true; // text is always textured
7218 auto &PipeLayout = GetPipeLayout(Container&: m_TextPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7219 auto &PipeLine = GetPipeline(Container&: m_TextPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7220
7221 VkCommandBuffer *pCommandBuffer;
7222 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7223 return false;
7224 auto &CommandBuffer = *pCommandBuffer;
7225
7226 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7227
7228 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7229 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7230 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7231
7232 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: 0, indexType: VK_INDEX_TYPE_UINT32);
7233
7234 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7235
7236 SUniformGTextPos PosTexSizeConstant;
7237 mem_copy(dest: PosTexSizeConstant.m_aPos, source: m.data(), size: m.size() * sizeof(float));
7238 PosTexSizeConstant.m_TextureSize = pCommand->m_TextureSize;
7239
7240 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGTextPos), pValues: &PosTexSizeConstant);
7241
7242 SUniformTextFragment FragmentConstants;
7243
7244 FragmentConstants.m_Constants.m_TextColor = pCommand->m_TextColor;
7245 FragmentConstants.m_Constants.m_TextOutlineColor = pCommand->m_TextOutlineColor;
7246 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformGTextPos) + sizeof(SUniformTextGFragmentOffset), size: sizeof(SUniformTextFragment), pValues: &FragmentConstants);
7247
7248 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7249
7250 return true;
7251 }
7252
7253 void BufferContainer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SState &State, size_t BufferContainerIndex, size_t DrawCalls)
7254 {
7255 size_t BufferObjectIndex = (size_t)m_vBufferContainers[BufferContainerIndex].m_BufferObjectIndex;
7256 const auto &BufferObject = m_vBufferObjects[BufferObjectIndex];
7257
7258 ExecBuffer.m_Buffer = BufferObject.m_CurBuffer;
7259 ExecBuffer.m_BufferOff = BufferObject.m_CurBufferOffset;
7260
7261 bool IsTextured = GetIsTextured(State);
7262 if(IsTextured)
7263 {
7264 size_t AddressModeIndex = GetAddressModeIndex(State);
7265 ExecBuffer.m_aDescriptors[0] = m_vTextures[State.m_Texture].m_aVKStandardTexturedDescrSets[AddressModeIndex];
7266 }
7267
7268 ExecBuffer.m_IndexBuffer = m_RenderIndexBuffer;
7269
7270 ExecBuffer.m_EstimatedRenderCallCount = DrawCalls;
7271
7272 ExecBufferFillDynamicStates(State, ExecBuffer);
7273 }
7274
7275 void Cmd_RenderQuadContainer_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainer *pCommand)
7276 {
7277 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: 1);
7278 }
7279
7280 [[nodiscard]] bool Cmd_RenderQuadContainer(const CCommandBuffer::SCommand_RenderQuadContainer *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7281 {
7282 std::array<float, (size_t)4 * 2> m;
7283 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7284
7285 bool IsTextured;
7286 size_t BlendModeIndex;
7287 size_t DynamicIndex;
7288 size_t AddressModeIndex;
7289 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7290 auto &PipeLayout = GetStandardPipeLayout(IsLineGeometry: false, IsTextured, BlendModeIndex, DynamicIndex);
7291 auto &PipeLine = GetStandardPipe(IsLineGeometry: false, IsTextured, BlendModeIndex, DynamicIndex);
7292
7293 VkCommandBuffer *pCommandBuffer;
7294 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7295 return false;
7296 auto &CommandBuffer = *pCommandBuffer;
7297
7298 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7299
7300 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7301 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7302 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7303
7304 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7305
7306 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7307
7308 if(IsTextured)
7309 {
7310 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7311 }
7312
7313 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformGPos), pValues: m.data());
7314
7315 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7316
7317 return true;
7318 }
7319
7320 void Cmd_RenderQuadContainerEx_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainerEx *pCommand)
7321 {
7322 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: 1);
7323 }
7324
7325 [[nodiscard]] bool Cmd_RenderQuadContainerEx(const CCommandBuffer::SCommand_RenderQuadContainerEx *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7326 {
7327 std::array<float, (size_t)4 * 2> m;
7328 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7329
7330 bool IsRotationless = !(pCommand->m_Rotation != 0);
7331 bool IsTextured;
7332 size_t BlendModeIndex;
7333 size_t DynamicIndex;
7334 size_t AddressModeIndex;
7335 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7336 auto &PipeLayout = GetPipeLayout(Container&: IsRotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7337 auto &PipeLine = GetPipeline(Container&: IsRotationless ? m_PrimExRotationlessPipeline : m_PrimExPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7338
7339 VkCommandBuffer *pCommandBuffer;
7340 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7341 return false;
7342 auto &CommandBuffer = *pCommandBuffer;
7343
7344 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7345
7346 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7347 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7348 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7349
7350 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7351
7352 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7353
7354 if(IsTextured)
7355 {
7356 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7357 }
7358
7359 SUniformPrimExGVertColor PushConstantColor;
7360 SUniformPrimExGPos PushConstantVertex;
7361 size_t VertexPushConstantSize = sizeof(PushConstantVertex);
7362
7363 PushConstantColor = pCommand->m_VertexColor;
7364 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7365
7366 if(!IsRotationless)
7367 {
7368 PushConstantVertex.m_Rotation = pCommand->m_Rotation;
7369 PushConstantVertex.m_Center = {pCommand->m_Center.x, pCommand->m_Center.y};
7370 }
7371 else
7372 {
7373 VertexPushConstantSize = sizeof(SUniformPrimExGPosRotationless);
7374 }
7375
7376 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: VertexPushConstantSize, pValues: &PushConstantVertex);
7377 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformPrimExGPos) + sizeof(SUniformPrimExGVertColorAlign), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7378
7379 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: 1, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7380
7381 return true;
7382 }
7383
7384 void Cmd_RenderQuadContainerAsSpriteMultiple_FillExecuteBuffer(SRenderCommandExecuteBuffer &ExecBuffer, const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *pCommand)
7385 {
7386 BufferContainer_FillExecuteBuffer(ExecBuffer, State: pCommand->m_State, BufferContainerIndex: (size_t)pCommand->m_BufferContainerIndex, DrawCalls: ((pCommand->m_DrawCount - 1) / GRAPHICS_MAX_PARTICLES_RENDER_COUNT) + 1);
7387 }
7388
7389 [[nodiscard]] bool Cmd_RenderQuadContainerAsSpriteMultiple(const CCommandBuffer::SCommand_RenderQuadContainerAsSpriteMultiple *pCommand, SRenderCommandExecuteBuffer &ExecBuffer)
7390 {
7391 std::array<float, (size_t)4 * 2> m;
7392 GetStateMatrix(State: pCommand->m_State, Matrix&: m);
7393
7394 bool CanBePushed = pCommand->m_DrawCount <= 1;
7395
7396 bool IsTextured;
7397 size_t BlendModeIndex;
7398 size_t DynamicIndex;
7399 size_t AddressModeIndex;
7400 GetStateIndices(ExecBuffer, State: pCommand->m_State, IsTextured, BlendModeIndex, DynamicIndex, AddressModeIndex);
7401 auto &PipeLayout = GetPipeLayout(Container&: CanBePushed ? m_SpriteMultiPushPipeline : m_SpriteMultiPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7402 auto &PipeLine = GetPipeline(Container&: CanBePushed ? m_SpriteMultiPushPipeline : m_SpriteMultiPipeline, IsTextured, BlendModeIndex, DynamicIndex);
7403
7404 VkCommandBuffer *pCommandBuffer;
7405 if(!GetGraphicCommandBuffer(pDrawCommandBuffer&: pCommandBuffer, RenderThreadIndex: ExecBuffer.m_ThreadIndex))
7406 return false;
7407 auto &CommandBuffer = *pCommandBuffer;
7408
7409 BindPipeline(RenderThreadIndex: ExecBuffer.m_ThreadIndex, CommandBuffer, ExecBuffer, BindingPipe&: PipeLine, State: pCommand->m_State);
7410
7411 std::array<VkBuffer, 1> aVertexBuffers = {ExecBuffer.m_Buffer};
7412 std::array<VkDeviceSize, 1> aOffsets = {(VkDeviceSize)ExecBuffer.m_BufferOff};
7413 vkCmdBindVertexBuffers(commandBuffer: CommandBuffer, firstBinding: 0, bindingCount: 1, pBuffers: aVertexBuffers.data(), pOffsets: aOffsets.data());
7414
7415 VkDeviceSize IndexOffset = (VkDeviceSize)((ptrdiff_t)pCommand->m_pOffset);
7416 vkCmdBindIndexBuffer(commandBuffer: CommandBuffer, buffer: ExecBuffer.m_IndexBuffer, offset: IndexOffset, indexType: VK_INDEX_TYPE_UINT32);
7417
7418 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 0, descriptorSetCount: 1, pDescriptorSets: &ExecBuffer.m_aDescriptors[0].m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7419
7420 if(CanBePushed)
7421 {
7422 SUniformSpriteMultiPushGVertColor PushConstantColor;
7423 SUniformSpriteMultiPushGPos PushConstantVertex;
7424
7425 PushConstantColor = pCommand->m_VertexColor;
7426
7427 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7428 PushConstantVertex.m_Center = pCommand->m_Center;
7429
7430 for(size_t i = 0; i < pCommand->m_DrawCount; ++i)
7431 PushConstantVertex.m_aPSR[i] = vec4(pCommand->m_pRenderInfo[i].m_Pos.x, pCommand->m_pRenderInfo[i].m_Pos.y, pCommand->m_pRenderInfo[i].m_Scale, pCommand->m_pRenderInfo[i].m_Rotation);
7432
7433 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(SUniformSpriteMultiPushGPosBase) + sizeof(vec4) * pCommand->m_DrawCount, pValues: &PushConstantVertex);
7434 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformSpriteMultiPushGPos), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7435 }
7436 else
7437 {
7438 SUniformSpriteMultiGVertColor PushConstantColor;
7439 SUniformSpriteMultiGPos PushConstantVertex;
7440
7441 PushConstantColor = pCommand->m_VertexColor;
7442
7443 mem_copy(dest: PushConstantVertex.m_aPos, source: m.data(), size: sizeof(PushConstantVertex.m_aPos));
7444 PushConstantVertex.m_Center = pCommand->m_Center;
7445
7446 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_VERTEX_BIT, offset: 0, size: sizeof(PushConstantVertex), pValues: &PushConstantVertex);
7447 vkCmdPushConstants(commandBuffer: CommandBuffer, layout: PipeLayout, stageFlags: VK_SHADER_STAGE_FRAGMENT_BIT, offset: sizeof(SUniformSpriteMultiGPos) + sizeof(SUniformSpriteMultiGVertColorAlign), size: sizeof(PushConstantColor), pValues: &PushConstantColor);
7448 }
7449
7450 const int RSPCount = 512;
7451 int DrawCount = pCommand->m_DrawCount;
7452 size_t RenderOffset = 0;
7453
7454 while(DrawCount > 0)
7455 {
7456 int UniformCount = (DrawCount > RSPCount ? RSPCount : DrawCount);
7457
7458 if(!CanBePushed)
7459 {
7460 // create uniform buffer
7461 SDeviceDescriptorSet UniDescrSet;
7462 if(!GetUniformBufferObject(RenderThreadIndex: ExecBuffer.m_ThreadIndex, RequiresSharedStagesDescriptor: false, DescrSet&: UniDescrSet, ParticleCount: UniformCount, pData: (const float *)(pCommand->m_pRenderInfo + RenderOffset), DataSize: UniformCount * sizeof(IGraphics::SRenderSpriteInfo)))
7463 return false;
7464
7465 vkCmdBindDescriptorSets(commandBuffer: CommandBuffer, pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS, layout: PipeLayout, firstSet: 1, descriptorSetCount: 1, pDescriptorSets: &UniDescrSet.m_Descriptor, dynamicOffsetCount: 0, pDynamicOffsets: nullptr);
7466 }
7467
7468 vkCmdDrawIndexed(commandBuffer: CommandBuffer, indexCount: static_cast<uint32_t>(pCommand->m_DrawNum), instanceCount: UniformCount, firstIndex: 0, vertexOffset: 0, firstInstance: 0);
7469
7470 RenderOffset += RSPCount;
7471 DrawCount -= RSPCount;
7472 }
7473
7474 return true;
7475 }
7476
7477 [[nodiscard]] bool Cmd_WindowCreateNtf(const CCommandBuffer::SCommand_WindowCreateNtf *pCommand)
7478 {
7479 if(IsVerbose())
7480 {
7481 log_debug("gfx/vulkan", "Creating new surface.");
7482 }
7483 m_pWindow = SDL_GetWindowFromID(id: pCommand->m_WindowId);
7484 if(m_RenderingPaused)
7485 {
7486#ifdef CONF_PLATFORM_ANDROID
7487 if(!CreateSurface(m_pWindow))
7488 return false;
7489 m_RecreateSwapChain = true;
7490#endif
7491 m_RenderingPaused = false;
7492 if(!PureMemoryFrame())
7493 return false;
7494 if(!PrepareFrame())
7495 return false;
7496 }
7497
7498 return true;
7499 }
7500
7501 [[nodiscard]] bool Cmd_WindowDestroyNtf(const CCommandBuffer::SCommand_WindowDestroyNtf *pCommand)
7502 {
7503 if(IsVerbose())
7504 {
7505 log_debug("gfx/vulkan", "Surface got destroyed.");
7506 }
7507 if(!m_RenderingPaused)
7508 {
7509 if(!WaitFrame())
7510 return false;
7511 m_RenderingPaused = true;
7512 vkDeviceWaitIdle(device: m_VKDevice);
7513#ifdef CONF_PLATFORM_ANDROID
7514 CleanupVulkanSwapChain(true);
7515#endif
7516 }
7517
7518 return true;
7519 }
7520
7521 [[nodiscard]] bool Cmd_PreInit(const CCommandProcessorFragment_GLBase::SCommand_PreInit *pCommand)
7522 {
7523 m_pGpuList = pCommand->m_pGpuList;
7524 if(InitVulkanSDL(pWindow: pCommand->m_pWindow, CanvasWidth: pCommand->m_Width, CanvasHeight: pCommand->m_Height, pRendererString: pCommand->m_pRendererString, pVendorString: pCommand->m_pVendorString, pVersionString: pCommand->m_pVersionString) != 0)
7525 {
7526 m_VKInstance = VK_NULL_HANDLE;
7527 }
7528
7529 RegisterCommands();
7530
7531 m_ThreadCount = g_Config.m_GfxRenderThreadCount;
7532 if(m_ThreadCount <= 1)
7533 m_ThreadCount = 1;
7534 else
7535 {
7536 m_ThreadCount = std::clamp<decltype(m_ThreadCount)>(val: m_ThreadCount, lo: 3, hi: std::max<decltype(m_ThreadCount)>(a: 3, b: std::thread::hardware_concurrency()));
7537 }
7538
7539 // start threads
7540 dbg_assert(m_ThreadCount != 2, "Either use 1 main thread or at least 2 extra rendering threads.");
7541 if(m_ThreadCount > 1)
7542 {
7543 m_vvThreadCommandLists.resize(new_size: m_ThreadCount - 1);
7544 m_vThreadHelperHadCommands.resize(new_size: m_ThreadCount - 1, x: false);
7545 for(auto &ThreadCommandList : m_vvThreadCommandLists)
7546 {
7547 ThreadCommandList.reserve(n: 256);
7548 }
7549
7550 m_vpRenderThreads.reserve(n: m_ThreadCount - 1);
7551 for(size_t i = 0; i < m_ThreadCount - 1; ++i)
7552 {
7553 auto *pRenderThread = new SRenderThread();
7554 std::unique_lock<std::mutex> Lock(pRenderThread->m_Mutex);
7555 m_vpRenderThreads.emplace_back(args&: pRenderThread);
7556 pRenderThread->m_Thread = std::thread([this, i]() { RunThread(ThreadIndex: i); });
7557 // wait until thread started
7558 pRenderThread->m_Cond.wait(lock&: Lock, p: [pRenderThread]() -> bool { return pRenderThread->m_Started; });
7559 }
7560 }
7561
7562 return true;
7563 }
7564
7565 [[nodiscard]] bool Cmd_PostShutdown(const CCommandProcessorFragment_GLBase::SCommand_PostShutdown *pCommand)
7566 {
7567 for(size_t i = 0; i < m_ThreadCount - 1; ++i)
7568 {
7569 auto *pThread = m_vpRenderThreads[i].get();
7570 {
7571 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
7572 pThread->m_Finished = true;
7573 pThread->m_Cond.notify_one();
7574 }
7575 pThread->m_Thread.join();
7576 }
7577 m_vpRenderThreads.clear();
7578 m_vvThreadCommandLists.clear();
7579 m_vThreadHelperHadCommands.clear();
7580
7581 m_ThreadCount = 1;
7582
7583 CleanupVulkanSDL();
7584
7585 return true;
7586 }
7587
7588 void StartCommands(size_t CommandCount, size_t EstimatedRenderCallCount) override
7589 {
7590 m_CommandsInPipe = CommandCount;
7591 m_RenderCallsInPipe = EstimatedRenderCallCount;
7592 m_CurCommandInPipe = 0;
7593 m_CurRenderCallCountInPipe = 0;
7594 }
7595
7596 void EndCommands() override
7597 {
7598 FinishRenderThreads();
7599 m_CommandsInPipe = 0;
7600 m_RenderCallsInPipe = 0;
7601 }
7602
7603 /****************
7604 * RENDER THREADS
7605 *****************/
7606
7607 void RunThread(size_t ThreadIndex)
7608 {
7609 auto *pThread = m_vpRenderThreads[ThreadIndex].get();
7610 std::unique_lock<std::mutex> Lock(pThread->m_Mutex);
7611 pThread->m_Started = true;
7612 pThread->m_Cond.notify_one();
7613
7614 while(!pThread->m_Finished)
7615 {
7616 pThread->m_Cond.wait(lock&: Lock, p: [pThread]() -> bool { return pThread->m_IsRendering || pThread->m_Finished; });
7617 pThread->m_Cond.notify_one();
7618
7619 // set this to true, if you want to benchmark the render thread times
7620 static constexpr bool BENCHMARK_RENDER_THREADS = false;
7621 std::chrono::nanoseconds ThreadRenderTime = 0ns;
7622 if(IsVerbose() && BENCHMARK_RENDER_THREADS)
7623 {
7624 ThreadRenderTime = time_get_nanoseconds();
7625 }
7626
7627 if(!pThread->m_Finished)
7628 {
7629 bool HasErrorFromCmd = false;
7630 for(auto &NextCmd : m_vvThreadCommandLists[ThreadIndex])
7631 {
7632 if(!m_aCommandCallbacks[CommandBufferCMDOff(CommandBufferCMD: NextCmd.m_Command)].m_CommandCB(NextCmd.m_pRawCommand, NextCmd))
7633 {
7634 // an error occurred, the thread will not continue execution
7635 HasErrorFromCmd = true;
7636 break;
7637 }
7638 }
7639 m_vvThreadCommandLists[ThreadIndex].clear();
7640
7641 if(!HasErrorFromCmd && m_vvUsedThreadDrawCommandBuffer[ThreadIndex + 1][m_CurImageIndex])
7642 {
7643 auto &GraphicThreadCommandBuffer = m_vvThreadDrawCommandBuffers[ThreadIndex + 1][m_CurImageIndex];
7644 vkEndCommandBuffer(commandBuffer: GraphicThreadCommandBuffer);
7645 }
7646 }
7647
7648 if(IsVerbose() && BENCHMARK_RENDER_THREADS)
7649 {
7650 log_debug("gfx/vulkan", "Render thread %" PRIzu " took %" PRId64 " ns to finish.", ThreadIndex, (int64_t)(time_get_nanoseconds() - ThreadRenderTime).count());
7651 }
7652
7653 pThread->m_IsRendering = false;
7654 }
7655 }
7656};
7657
7658CCommandProcessorFragment_GLBase *CreateVulkanCommandProcessorFragment()
7659{
7660 return new CCommandProcessorFragment_Vulkan();
7661}
7662
7663#endif
7664