blob: dd1988b2d9fcb914d00afdf36c5e4b88c936cb7b [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
Adam Sawicki50882502020-02-07 16:51:31 +01002// Copyright (c) 2017-2020 Advanced Micro Devices, Inc. All rights reserved.
Adam Sawickiae5c4662019-01-02 10:23:35 +01003//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickie73e9882020-03-20 18:05:42 +010037extern bool g_BufferDeviceAddressEnabled;
38extern PFN_vkGetBufferDeviceAddressEXT g_vkGetBufferDeviceAddressEXT;
Adam Sawickif2975342018-10-16 13:49:02 +020039void BeginSingleTimeCommands();
40void EndSingleTimeCommands();
41
Adam Sawickibdb89a92018-12-13 11:56:30 +010042#ifndef VMA_DEBUG_MARGIN
43 #define VMA_DEBUG_MARGIN 0
44#endif
45
Adam Sawicki0a607132018-08-24 11:18:41 +020046enum CONFIG_TYPE {
47 CONFIG_TYPE_MINIMUM,
48 CONFIG_TYPE_SMALL,
49 CONFIG_TYPE_AVERAGE,
50 CONFIG_TYPE_LARGE,
51 CONFIG_TYPE_MAXIMUM,
52 CONFIG_TYPE_COUNT
53};
54
Adam Sawickif2975342018-10-16 13:49:02 +020055static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
56//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020057
Adam Sawickib8333fb2018-03-13 16:15:53 +010058enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
59
Adam Sawicki0667e332018-08-24 17:26:44 +020060static const char* FREE_ORDER_NAMES[] = {
61 "FORWARD",
62 "BACKWARD",
63 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020064};
65
Adam Sawicki80927152018-09-07 17:27:23 +020066// Copy of internal VmaAlgorithmToStr.
67static const char* AlgorithmToStr(uint32_t algorithm)
68{
69 switch(algorithm)
70 {
71 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
72 return "Linear";
73 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
74 return "Buddy";
75 case 0:
76 return "Default";
77 default:
78 assert(0);
79 return "";
80 }
81}
82
Adam Sawickib8333fb2018-03-13 16:15:53 +010083struct AllocationSize
84{
85 uint32_t Probability;
86 VkDeviceSize BufferSizeMin, BufferSizeMax;
87 uint32_t ImageSizeMin, ImageSizeMax;
88};
89
90struct Config
91{
92 uint32_t RandSeed;
93 VkDeviceSize BeginBytesToAllocate;
94 uint32_t AdditionalOperationCount;
95 VkDeviceSize MaxBytesToAllocate;
96 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
97 std::vector<AllocationSize> AllocationSizes;
98 uint32_t ThreadCount;
99 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
100 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +0200101 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100102};
103
104struct Result
105{
106 duration TotalTime;
107 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
108 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
109 VkDeviceSize TotalMemoryAllocated;
110 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
111};
112
113void TestDefragmentationSimple();
114void TestDefragmentationFull();
115
116struct PoolTestConfig
117{
118 uint32_t RandSeed;
119 uint32_t ThreadCount;
120 VkDeviceSize PoolSize;
121 uint32_t FrameCount;
122 uint32_t TotalItemCount;
123 // Range for number of items used in each frame.
124 uint32_t UsedItemCountMin, UsedItemCountMax;
125 // Percent of items to make unused, and possibly make some others used in each frame.
126 uint32_t ItemsToMakeUnusedPercent;
127 std::vector<AllocationSize> AllocationSizes;
128
129 VkDeviceSize CalcAvgResourceSize() const
130 {
131 uint32_t probabilitySum = 0;
132 VkDeviceSize sizeSum = 0;
133 for(size_t i = 0; i < AllocationSizes.size(); ++i)
134 {
135 const AllocationSize& allocSize = AllocationSizes[i];
136 if(allocSize.BufferSizeMax > 0)
137 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
138 else
139 {
140 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
141 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
142 }
143 probabilitySum += allocSize.Probability;
144 }
145 return sizeSum / probabilitySum;
146 }
147
148 bool UsesBuffers() const
149 {
150 for(size_t i = 0; i < AllocationSizes.size(); ++i)
151 if(AllocationSizes[i].BufferSizeMax > 0)
152 return true;
153 return false;
154 }
155
156 bool UsesImages() const
157 {
158 for(size_t i = 0; i < AllocationSizes.size(); ++i)
159 if(AllocationSizes[i].ImageSizeMax > 0)
160 return true;
161 return false;
162 }
163};
164
165struct PoolTestResult
166{
167 duration TotalTime;
168 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
169 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
170 size_t LostAllocationCount, LostAllocationTotalSize;
171 size_t FailedAllocationCount, FailedAllocationTotalSize;
172};
173
174static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
175
Adam Sawicki51fa9662018-10-03 13:44:29 +0200176uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200177
Adam Sawickib8333fb2018-03-13 16:15:53 +0100178struct BufferInfo
179{
180 VkBuffer Buffer = VK_NULL_HANDLE;
181 VmaAllocation Allocation = VK_NULL_HANDLE;
182};
183
Adam Sawicki40ffe982019-10-11 15:56:02 +0200184static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
185{
186 const VkPhysicalDeviceMemoryProperties* props;
187 vmaGetMemoryProperties(g_hAllocator, &props);
188 return props->memoryTypes[memoryTypeIndex].heapIndex;
189}
190
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200191static uint32_t GetAllocationStrategyCount()
192{
193 uint32_t strategyCount = 0;
194 switch(ConfigType)
195 {
196 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
197 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
198 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
199 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
200 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
201 default: assert(0);
202 }
203 return strategyCount;
204}
205
206static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
207{
208 switch(allocStrategy)
209 {
210 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
211 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
212 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
213 case 0: return "Default"; break;
214 default: assert(0); return "";
215 }
216}
217
Adam Sawickib8333fb2018-03-13 16:15:53 +0100218static void InitResult(Result& outResult)
219{
220 outResult.TotalTime = duration::zero();
221 outResult.AllocationTimeMin = duration::max();
222 outResult.AllocationTimeAvg = duration::zero();
223 outResult.AllocationTimeMax = duration::min();
224 outResult.DeallocationTimeMin = duration::max();
225 outResult.DeallocationTimeAvg = duration::zero();
226 outResult.DeallocationTimeMax = duration::min();
227 outResult.TotalMemoryAllocated = 0;
228 outResult.FreeRangeSizeAvg = 0;
229 outResult.FreeRangeSizeMax = 0;
230}
231
232class TimeRegisterObj
233{
234public:
235 TimeRegisterObj(duration& min, duration& sum, duration& max) :
236 m_Min(min),
237 m_Sum(sum),
238 m_Max(max),
239 m_TimeBeg(std::chrono::high_resolution_clock::now())
240 {
241 }
242
243 ~TimeRegisterObj()
244 {
245 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
246 m_Sum += d;
247 if(d < m_Min) m_Min = d;
248 if(d > m_Max) m_Max = d;
249 }
250
251private:
252 duration& m_Min;
253 duration& m_Sum;
254 duration& m_Max;
255 time_point m_TimeBeg;
256};
257
258struct PoolTestThreadResult
259{
260 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
261 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
262 size_t AllocationCount, DeallocationCount;
263 size_t LostAllocationCount, LostAllocationTotalSize;
264 size_t FailedAllocationCount, FailedAllocationTotalSize;
265};
266
267class AllocationTimeRegisterObj : public TimeRegisterObj
268{
269public:
270 AllocationTimeRegisterObj(Result& result) :
271 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
272 {
273 }
274};
275
276class DeallocationTimeRegisterObj : public TimeRegisterObj
277{
278public:
279 DeallocationTimeRegisterObj(Result& result) :
280 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
281 {
282 }
283};
284
285class PoolAllocationTimeRegisterObj : public TimeRegisterObj
286{
287public:
288 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
289 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
290 {
291 }
292};
293
294class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
295{
296public:
297 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
298 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
299 {
300 }
301};
302
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200303static void CurrentTimeToStr(std::string& out)
304{
305 time_t rawTime; time(&rawTime);
306 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
307 char timeStr[128];
308 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
309 out = timeStr;
310}
311
Adam Sawickib8333fb2018-03-13 16:15:53 +0100312VkResult MainTest(Result& outResult, const Config& config)
313{
314 assert(config.ThreadCount > 0);
315
316 InitResult(outResult);
317
318 RandomNumberGenerator mainRand{config.RandSeed};
319
320 time_point timeBeg = std::chrono::high_resolution_clock::now();
321
322 std::atomic<size_t> allocationCount = 0;
323 VkResult res = VK_SUCCESS;
324
325 uint32_t memUsageProbabilitySum =
326 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
327 config.MemUsageProbability[2] + config.MemUsageProbability[3];
328 assert(memUsageProbabilitySum > 0);
329
330 uint32_t allocationSizeProbabilitySum = std::accumulate(
331 config.AllocationSizes.begin(),
332 config.AllocationSizes.end(),
333 0u,
334 [](uint32_t sum, const AllocationSize& allocSize) {
335 return sum + allocSize.Probability;
336 });
337
338 struct Allocation
339 {
340 VkBuffer Buffer;
341 VkImage Image;
342 VmaAllocation Alloc;
343 };
344
345 std::vector<Allocation> commonAllocations;
346 std::mutex commonAllocationsMutex;
347
348 auto Allocate = [&](
349 VkDeviceSize bufferSize,
350 const VkExtent2D imageExtent,
351 RandomNumberGenerator& localRand,
352 VkDeviceSize& totalAllocatedBytes,
353 std::vector<Allocation>& allocations) -> VkResult
354 {
355 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
356
357 uint32_t memUsageIndex = 0;
358 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
359 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
360 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
361
362 VmaAllocationCreateInfo memReq = {};
363 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200364 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100365
366 Allocation allocation = {};
367 VmaAllocationInfo allocationInfo;
368
369 // Buffer
370 if(bufferSize > 0)
371 {
372 assert(imageExtent.width == 0);
373 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
374 bufferInfo.size = bufferSize;
375 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
376
377 {
378 AllocationTimeRegisterObj timeRegisterObj{outResult};
379 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
380 }
381 }
382 // Image
383 else
384 {
385 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
386 imageInfo.imageType = VK_IMAGE_TYPE_2D;
387 imageInfo.extent.width = imageExtent.width;
388 imageInfo.extent.height = imageExtent.height;
389 imageInfo.extent.depth = 1;
390 imageInfo.mipLevels = 1;
391 imageInfo.arrayLayers = 1;
392 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
393 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
394 VK_IMAGE_TILING_OPTIMAL :
395 VK_IMAGE_TILING_LINEAR;
396 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
397 switch(memReq.usage)
398 {
399 case VMA_MEMORY_USAGE_GPU_ONLY:
400 switch(localRand.Generate() % 3)
401 {
402 case 0:
403 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
404 break;
405 case 1:
406 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
407 break;
408 case 2:
409 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
410 break;
411 }
412 break;
413 case VMA_MEMORY_USAGE_CPU_ONLY:
414 case VMA_MEMORY_USAGE_CPU_TO_GPU:
415 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
416 break;
417 case VMA_MEMORY_USAGE_GPU_TO_CPU:
418 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
419 break;
420 }
421 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
422 imageInfo.flags = 0;
423
424 {
425 AllocationTimeRegisterObj timeRegisterObj{outResult};
426 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
427 }
428 }
429
430 if(res == VK_SUCCESS)
431 {
432 ++allocationCount;
433 totalAllocatedBytes += allocationInfo.size;
434 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
435 if(useCommonAllocations)
436 {
437 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
438 commonAllocations.push_back(allocation);
439 }
440 else
441 allocations.push_back(allocation);
442 }
443 else
444 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200445 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100446 }
447 return res;
448 };
449
450 auto GetNextAllocationSize = [&](
451 VkDeviceSize& outBufSize,
452 VkExtent2D& outImageSize,
453 RandomNumberGenerator& localRand)
454 {
455 outBufSize = 0;
456 outImageSize = {0, 0};
457
458 uint32_t allocSizeIndex = 0;
459 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
460 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
461 r -= config.AllocationSizes[allocSizeIndex++].Probability;
462
463 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
464 if(allocSize.BufferSizeMax > 0)
465 {
466 assert(allocSize.ImageSizeMax == 0);
467 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
468 outBufSize = allocSize.BufferSizeMin;
469 else
470 {
471 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
472 outBufSize = outBufSize / 16 * 16;
473 }
474 }
475 else
476 {
477 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
478 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
479 else
480 {
481 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
482 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
483 }
484 }
485 };
486
487 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
488 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
489
490 auto ThreadProc = [&](uint32_t randSeed) -> void
491 {
492 RandomNumberGenerator threadRand(randSeed);
493 VkDeviceSize threadTotalAllocatedBytes = 0;
494 std::vector<Allocation> threadAllocations;
495 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
496 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
497 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
498
499 // BEGIN ALLOCATIONS
500 for(;;)
501 {
502 VkDeviceSize bufferSize = 0;
503 VkExtent2D imageExtent = {};
504 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
505 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
506 threadBeginBytesToAllocate)
507 {
508 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
509 break;
510 }
511 else
512 break;
513 }
514
515 // ADDITIONAL ALLOCATIONS AND FREES
516 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
517 {
518 VkDeviceSize bufferSize = 0;
519 VkExtent2D imageExtent = {};
520 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
521
522 // true = allocate, false = free
523 bool allocate = threadRand.Generate() % 2 != 0;
524
525 if(allocate)
526 {
527 if(threadTotalAllocatedBytes +
528 bufferSize +
529 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
530 threadMaxBytesToAllocate)
531 {
532 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
533 break;
534 }
535 }
536 else
537 {
538 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
539 if(useCommonAllocations)
540 {
541 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
542 if(!commonAllocations.empty())
543 {
544 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
545 VmaAllocationInfo allocationInfo;
546 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
547 if(threadTotalAllocatedBytes >= allocationInfo.size)
548 {
549 DeallocationTimeRegisterObj timeRegisterObj{outResult};
550 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
551 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
552 else
553 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
554 threadTotalAllocatedBytes -= allocationInfo.size;
555 commonAllocations.erase(commonAllocations.begin() + indexToFree);
556 }
557 }
558 }
559 else
560 {
561 if(!threadAllocations.empty())
562 {
563 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
564 VmaAllocationInfo allocationInfo;
565 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
566 if(threadTotalAllocatedBytes >= allocationInfo.size)
567 {
568 DeallocationTimeRegisterObj timeRegisterObj{outResult};
569 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
570 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
571 else
572 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
573 threadTotalAllocatedBytes -= allocationInfo.size;
574 threadAllocations.erase(threadAllocations.begin() + indexToFree);
575 }
576 }
577 }
578 }
579 }
580
581 ++numThreadsReachedMaxAllocations;
582
583 WaitForSingleObject(threadsFinishEvent, INFINITE);
584
585 // DEALLOCATION
586 while(!threadAllocations.empty())
587 {
588 size_t indexToFree = 0;
589 switch(config.FreeOrder)
590 {
591 case FREE_ORDER::FORWARD:
592 indexToFree = 0;
593 break;
594 case FREE_ORDER::BACKWARD:
595 indexToFree = threadAllocations.size() - 1;
596 break;
597 case FREE_ORDER::RANDOM:
598 indexToFree = mainRand.Generate() % threadAllocations.size();
599 break;
600 }
601
602 {
603 DeallocationTimeRegisterObj timeRegisterObj{outResult};
604 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
605 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
606 else
607 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
608 }
609 threadAllocations.erase(threadAllocations.begin() + indexToFree);
610 }
611 };
612
613 uint32_t threadRandSeed = mainRand.Generate();
614 std::vector<std::thread> bkgThreads;
615 for(size_t i = 0; i < config.ThreadCount; ++i)
616 {
617 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
618 }
619
620 // Wait for threads reached max allocations
621 while(numThreadsReachedMaxAllocations < config.ThreadCount)
622 Sleep(0);
623
624 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
625 VmaStats vmaStats = {};
626 vmaCalculateStats(g_hAllocator, &vmaStats);
627 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
628 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
629 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
630
631 // Signal threads to deallocate
632 SetEvent(threadsFinishEvent);
633
634 // Wait for threads finished
635 for(size_t i = 0; i < bkgThreads.size(); ++i)
636 bkgThreads[i].join();
637 bkgThreads.clear();
638
639 CloseHandle(threadsFinishEvent);
640
641 // Deallocate remaining common resources
642 while(!commonAllocations.empty())
643 {
644 size_t indexToFree = 0;
645 switch(config.FreeOrder)
646 {
647 case FREE_ORDER::FORWARD:
648 indexToFree = 0;
649 break;
650 case FREE_ORDER::BACKWARD:
651 indexToFree = commonAllocations.size() - 1;
652 break;
653 case FREE_ORDER::RANDOM:
654 indexToFree = mainRand.Generate() % commonAllocations.size();
655 break;
656 }
657
658 {
659 DeallocationTimeRegisterObj timeRegisterObj{outResult};
660 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
661 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
662 else
663 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
664 }
665 commonAllocations.erase(commonAllocations.begin() + indexToFree);
666 }
667
668 if(allocationCount)
669 {
670 outResult.AllocationTimeAvg /= allocationCount;
671 outResult.DeallocationTimeAvg /= allocationCount;
672 }
673
674 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
675
676 return res;
677}
678
Adam Sawicki51fa9662018-10-03 13:44:29 +0200679void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100680{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100681 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200683 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100684 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200685 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100686}
687
688struct AllocInfo
689{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200690 VmaAllocation m_Allocation = VK_NULL_HANDLE;
691 VkBuffer m_Buffer = VK_NULL_HANDLE;
692 VkImage m_Image = VK_NULL_HANDLE;
Adam Sawickia52012d2019-12-23 15:28:51 +0100693 VkImageLayout m_ImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200694 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100695 union
696 {
697 VkBufferCreateInfo m_BufferInfo;
698 VkImageCreateInfo m_ImageInfo;
699 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200700
Adam Sawickic467e282019-12-23 16:38:31 +0100701 // After defragmentation.
702 VkBuffer m_NewBuffer = VK_NULL_HANDLE;
703 VkImage m_NewImage = VK_NULL_HANDLE;
704
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200705 void CreateBuffer(
706 const VkBufferCreateInfo& bufCreateInfo,
707 const VmaAllocationCreateInfo& allocCreateInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +0100708 void CreateImage(
709 const VkImageCreateInfo& imageCreateInfo,
710 const VmaAllocationCreateInfo& allocCreateInfo,
711 VkImageLayout layout);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200712 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100713};
714
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200715void AllocInfo::CreateBuffer(
716 const VkBufferCreateInfo& bufCreateInfo,
717 const VmaAllocationCreateInfo& allocCreateInfo)
718{
719 m_BufferInfo = bufCreateInfo;
720 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
721 TEST(res == VK_SUCCESS);
722}
Adam Sawickia52012d2019-12-23 15:28:51 +0100723void AllocInfo::CreateImage(
724 const VkImageCreateInfo& imageCreateInfo,
725 const VmaAllocationCreateInfo& allocCreateInfo,
726 VkImageLayout layout)
727{
728 m_ImageInfo = imageCreateInfo;
729 m_ImageLayout = layout;
730 VkResult res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &m_Image, &m_Allocation, nullptr);
731 TEST(res == VK_SUCCESS);
732}
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200733
734void AllocInfo::Destroy()
735{
736 if(m_Image)
737 {
Adam Sawickic467e282019-12-23 16:38:31 +0100738 assert(!m_Buffer);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200739 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100740 m_Image = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200741 }
742 if(m_Buffer)
743 {
Adam Sawickic467e282019-12-23 16:38:31 +0100744 assert(!m_Image);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200745 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100746 m_Buffer = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200747 }
748 if(m_Allocation)
749 {
750 vmaFreeMemory(g_hAllocator, m_Allocation);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100751 m_Allocation = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200752 }
753}
754
Adam Sawickif2975342018-10-16 13:49:02 +0200755class StagingBufferCollection
756{
757public:
758 StagingBufferCollection() { }
759 ~StagingBufferCollection();
760 // Returns false if maximum total size of buffers would be exceeded.
761 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
762 void ReleaseAllBuffers();
763
764private:
765 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
766 struct BufInfo
767 {
768 VmaAllocation Allocation = VK_NULL_HANDLE;
769 VkBuffer Buffer = VK_NULL_HANDLE;
770 VkDeviceSize Size = VK_WHOLE_SIZE;
771 void* MappedPtr = nullptr;
772 bool Used = false;
773 };
774 std::vector<BufInfo> m_Bufs;
775 // Including both used and unused.
776 VkDeviceSize m_TotalSize = 0;
777};
778
779StagingBufferCollection::~StagingBufferCollection()
780{
781 for(size_t i = m_Bufs.size(); i--; )
782 {
783 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
784 }
785}
786
787bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
788{
789 assert(size <= MAX_TOTAL_SIZE);
790
791 // Try to find existing unused buffer with best size.
792 size_t bestIndex = SIZE_MAX;
793 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
794 {
795 BufInfo& currBufInfo = m_Bufs[i];
796 if(!currBufInfo.Used && currBufInfo.Size >= size &&
797 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
798 {
799 bestIndex = i;
800 }
801 }
802
803 if(bestIndex != SIZE_MAX)
804 {
805 m_Bufs[bestIndex].Used = true;
806 outBuffer = m_Bufs[bestIndex].Buffer;
807 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
808 return true;
809 }
810
811 // Allocate new buffer with requested size.
812 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
813 {
814 BufInfo bufInfo;
815 bufInfo.Size = size;
816 bufInfo.Used = true;
817
818 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
819 bufCreateInfo.size = size;
820 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
821
822 VmaAllocationCreateInfo allocCreateInfo = {};
823 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
824 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
825
826 VmaAllocationInfo allocInfo;
827 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
828 bufInfo.MappedPtr = allocInfo.pMappedData;
829 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
830
831 outBuffer = bufInfo.Buffer;
832 outMappedPtr = bufInfo.MappedPtr;
833
834 m_Bufs.push_back(std::move(bufInfo));
835
836 m_TotalSize += size;
837
838 return true;
839 }
840
841 // There are some unused but smaller buffers: Free them and try again.
842 bool hasUnused = false;
843 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
844 {
845 if(!m_Bufs[i].Used)
846 {
847 hasUnused = true;
848 break;
849 }
850 }
851 if(hasUnused)
852 {
853 for(size_t i = m_Bufs.size(); i--; )
854 {
855 if(!m_Bufs[i].Used)
856 {
857 m_TotalSize -= m_Bufs[i].Size;
858 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
859 m_Bufs.erase(m_Bufs.begin() + i);
860 }
861 }
862
863 return AcquireBuffer(size, outBuffer, outMappedPtr);
864 }
865
866 return false;
867}
868
869void StagingBufferCollection::ReleaseAllBuffers()
870{
871 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
872 {
873 m_Bufs[i].Used = false;
874 }
875}
876
877static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
878{
879 StagingBufferCollection stagingBufs;
880
881 bool cmdBufferStarted = false;
882 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
883 {
884 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
885 if(currAllocInfo.m_Buffer)
886 {
887 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
888
889 VkBuffer stagingBuf = VK_NULL_HANDLE;
890 void* stagingBufMappedPtr = nullptr;
891 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
892 {
893 TEST(cmdBufferStarted);
894 EndSingleTimeCommands();
895 stagingBufs.ReleaseAllBuffers();
896 cmdBufferStarted = false;
897
898 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
899 TEST(ok);
900 }
901
902 // Fill staging buffer.
903 {
904 assert(size % sizeof(uint32_t) == 0);
905 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
906 uint32_t val = currAllocInfo.m_StartValue;
907 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
908 {
909 *stagingValPtr = val;
910 ++stagingValPtr;
911 ++val;
912 }
913 }
914
915 // Issue copy command from staging buffer to destination buffer.
916 if(!cmdBufferStarted)
917 {
918 cmdBufferStarted = true;
919 BeginSingleTimeCommands();
920 }
921
922 VkBufferCopy copy = {};
923 copy.srcOffset = 0;
924 copy.dstOffset = 0;
925 copy.size = size;
926 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
927 }
928 else
929 {
Adam Sawickia52012d2019-12-23 15:28:51 +0100930 TEST(currAllocInfo.m_ImageInfo.format == VK_FORMAT_R8G8B8A8_UNORM && "Only RGBA8 images are currently supported.");
931 TEST(currAllocInfo.m_ImageInfo.mipLevels == 1 && "Only single mip images are currently supported.");
932
Adam Sawickic467e282019-12-23 16:38:31 +0100933 const VkDeviceSize size = (VkDeviceSize)currAllocInfo.m_ImageInfo.extent.width * currAllocInfo.m_ImageInfo.extent.height * sizeof(uint32_t);
Adam Sawickia52012d2019-12-23 15:28:51 +0100934
935 VkBuffer stagingBuf = VK_NULL_HANDLE;
936 void* stagingBufMappedPtr = nullptr;
937 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
938 {
939 TEST(cmdBufferStarted);
940 EndSingleTimeCommands();
941 stagingBufs.ReleaseAllBuffers();
942 cmdBufferStarted = false;
943
944 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
945 TEST(ok);
946 }
947
948 // Fill staging buffer.
949 {
950 assert(size % sizeof(uint32_t) == 0);
951 uint32_t *stagingValPtr = (uint32_t *)stagingBufMappedPtr;
952 uint32_t val = currAllocInfo.m_StartValue;
953 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
954 {
955 *stagingValPtr = val;
956 ++stagingValPtr;
957 ++val;
958 }
959 }
960
961 // Issue copy command from staging buffer to destination buffer.
962 if(!cmdBufferStarted)
963 {
964 cmdBufferStarted = true;
965 BeginSingleTimeCommands();
966 }
967
968
969 // Transfer to transfer dst layout
970 VkImageSubresourceRange subresourceRange = {
971 VK_IMAGE_ASPECT_COLOR_BIT,
972 0, VK_REMAINING_MIP_LEVELS,
973 0, VK_REMAINING_ARRAY_LAYERS
974 };
975
976 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
977 barrier.srcAccessMask = 0;
978 barrier.dstAccessMask = 0;
979 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
980 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
981 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
982 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
983 barrier.image = currAllocInfo.m_Image;
984 barrier.subresourceRange = subresourceRange;
985
986 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
987 0, nullptr,
988 0, nullptr,
989 1, &barrier);
990
991 // Copy image date
992 VkBufferImageCopy copy = {};
993 copy.bufferOffset = 0;
994 copy.bufferRowLength = 0;
995 copy.bufferImageHeight = 0;
996 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
997 copy.imageSubresource.layerCount = 1;
998 copy.imageExtent = currAllocInfo.m_ImageInfo.extent;
999
1000 vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy);
1001
1002 // Transfer to desired layout
1003 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1004 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1005 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1006 barrier.newLayout = currAllocInfo.m_ImageLayout;
1007
1008 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
1009 0, nullptr,
1010 0, nullptr,
1011 1, &barrier);
Adam Sawickif2975342018-10-16 13:49:02 +02001012 }
1013 }
1014
1015 if(cmdBufferStarted)
1016 {
1017 EndSingleTimeCommands();
1018 stagingBufs.ReleaseAllBuffers();
1019 }
1020}
1021
1022static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
1023{
1024 StagingBufferCollection stagingBufs;
1025
1026 bool cmdBufferStarted = false;
1027 size_t validateAllocIndexOffset = 0;
1028 std::vector<void*> validateStagingBuffers;
1029 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
1030 {
1031 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
1032 if(currAllocInfo.m_Buffer)
1033 {
1034 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
1035
1036 VkBuffer stagingBuf = VK_NULL_HANDLE;
1037 void* stagingBufMappedPtr = nullptr;
1038 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
1039 {
1040 TEST(cmdBufferStarted);
1041 EndSingleTimeCommands();
1042 cmdBufferStarted = false;
1043
1044 for(size_t validateIndex = 0;
1045 validateIndex < validateStagingBuffers.size();
1046 ++validateIndex)
1047 {
1048 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1049 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1050 TEST(validateSize % sizeof(uint32_t) == 0);
1051 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1052 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1053 bool valid = true;
1054 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1055 {
1056 if(*stagingValPtr != val)
1057 {
1058 valid = false;
1059 break;
1060 }
1061 ++stagingValPtr;
1062 ++val;
1063 }
1064 TEST(valid);
1065 }
1066
1067 stagingBufs.ReleaseAllBuffers();
1068
1069 validateAllocIndexOffset = allocInfoIndex;
1070 validateStagingBuffers.clear();
1071
1072 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
1073 TEST(ok);
1074 }
1075
1076 // Issue copy command from staging buffer to destination buffer.
1077 if(!cmdBufferStarted)
1078 {
1079 cmdBufferStarted = true;
1080 BeginSingleTimeCommands();
1081 }
1082
1083 VkBufferCopy copy = {};
1084 copy.srcOffset = 0;
1085 copy.dstOffset = 0;
1086 copy.size = size;
1087 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
1088
1089 // Sava mapped pointer for later validation.
1090 validateStagingBuffers.push_back(stagingBufMappedPtr);
1091 }
1092 else
1093 {
1094 TEST(0 && "Images not currently supported.");
1095 }
1096 }
1097
1098 if(cmdBufferStarted)
1099 {
1100 EndSingleTimeCommands();
1101
1102 for(size_t validateIndex = 0;
1103 validateIndex < validateStagingBuffers.size();
1104 ++validateIndex)
1105 {
1106 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1107 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1108 TEST(validateSize % sizeof(uint32_t) == 0);
1109 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1110 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1111 bool valid = true;
1112 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1113 {
1114 if(*stagingValPtr != val)
1115 {
1116 valid = false;
1117 break;
1118 }
1119 ++stagingValPtr;
1120 ++val;
1121 }
1122 TEST(valid);
1123 }
1124
1125 stagingBufs.ReleaseAllBuffers();
1126 }
1127}
1128
Adam Sawickib8333fb2018-03-13 16:15:53 +01001129static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1130{
1131 outMemReq = {};
1132 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1133 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1134}
1135
1136static void CreateBuffer(
1137 VmaPool pool,
1138 const VkBufferCreateInfo& bufCreateInfo,
1139 bool persistentlyMapped,
1140 AllocInfo& outAllocInfo)
1141{
1142 outAllocInfo = {};
1143 outAllocInfo.m_BufferInfo = bufCreateInfo;
1144
1145 VmaAllocationCreateInfo allocCreateInfo = {};
1146 allocCreateInfo.pool = pool;
1147 if(persistentlyMapped)
1148 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1149
1150 VmaAllocationInfo vmaAllocInfo = {};
1151 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1152
1153 // Setup StartValue and fill.
1154 {
1155 outAllocInfo.m_StartValue = (uint32_t)rand();
1156 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001157 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001158 if(!persistentlyMapped)
1159 {
1160 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1161 }
1162
1163 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1166 data[i] = value++;
1167
1168 if(!persistentlyMapped)
1169 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1170 }
1171}
1172
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001173static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001174{
1175 outAllocation.m_Allocation = nullptr;
1176 outAllocation.m_Buffer = nullptr;
1177 outAllocation.m_Image = nullptr;
1178 outAllocation.m_StartValue = (uint32_t)rand();
1179
1180 VmaAllocationCreateInfo vmaMemReq;
1181 GetMemReq(vmaMemReq);
1182
1183 VmaAllocationInfo allocInfo;
1184
1185 const bool isBuffer = true;//(rand() & 0x1) != 0;
1186 const bool isLarge = (rand() % 16) == 0;
1187 if(isBuffer)
1188 {
1189 const uint32_t bufferSize = isLarge ?
1190 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1191 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1192
1193 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1194 bufferInfo.size = bufferSize;
1195 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1196
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001197 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001199 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001200 }
1201 else
1202 {
1203 const uint32_t imageSizeX = isLarge ?
1204 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1205 rand() % 1024 + 1; // 1 ... 1024
1206 const uint32_t imageSizeY = isLarge ?
1207 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1208 rand() % 1024 + 1; // 1 ... 1024
1209
1210 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1211 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1212 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1213 imageInfo.extent.width = imageSizeX;
1214 imageInfo.extent.height = imageSizeY;
1215 imageInfo.extent.depth = 1;
1216 imageInfo.mipLevels = 1;
1217 imageInfo.arrayLayers = 1;
1218 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1219 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1220 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1221 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1222
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001223 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001224 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001225 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001226 }
1227
1228 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1229 if(allocInfo.pMappedData == nullptr)
1230 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001231 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001232 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001233 }
1234
1235 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001236 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001237 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1238 data[i] = value++;
1239
1240 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001241 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001242}
1243
1244static void DestroyAllocation(const AllocInfo& allocation)
1245{
1246 if(allocation.m_Buffer)
1247 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1248 else
1249 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1250}
1251
1252static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1253{
1254 for(size_t i = allocations.size(); i--; )
1255 DestroyAllocation(allocations[i]);
1256 allocations.clear();
1257}
1258
1259static void ValidateAllocationData(const AllocInfo& allocation)
1260{
1261 VmaAllocationInfo allocInfo;
1262 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1263
1264 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1265 if(allocInfo.pMappedData == nullptr)
1266 {
1267 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001268 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001269 }
1270
1271 uint32_t value = allocation.m_StartValue;
1272 bool ok = true;
1273 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001274 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001275 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1276 {
1277 if(data[i] != value++)
1278 {
1279 ok = false;
1280 break;
1281 }
1282 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001283 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001284
1285 if(allocInfo.pMappedData == nullptr)
1286 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1287}
1288
1289static void RecreateAllocationResource(AllocInfo& allocation)
1290{
1291 VmaAllocationInfo allocInfo;
1292 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1293
1294 if(allocation.m_Buffer)
1295 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001296 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001297
Adam Sawicki1f84f622019-07-02 13:40:01 +02001298 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001299 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001300
1301 // Just to silence validation layer warnings.
1302 VkMemoryRequirements vkMemReq;
1303 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001304 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001305
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001306 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001307 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001308 }
1309 else
1310 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001311 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001312
Adam Sawicki1f84f622019-07-02 13:40:01 +02001313 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001314 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001315
1316 // Just to silence validation layer warnings.
1317 VkMemoryRequirements vkMemReq;
1318 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1319
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001320 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001321 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001322 }
1323}
1324
1325static void Defragment(AllocInfo* allocs, size_t allocCount,
1326 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1327 VmaDefragmentationStats* defragmentationStats = nullptr)
1328{
1329 std::vector<VmaAllocation> vmaAllocs(allocCount);
1330 for(size_t i = 0; i < allocCount; ++i)
1331 vmaAllocs[i] = allocs[i].m_Allocation;
1332
1333 std::vector<VkBool32> allocChanged(allocCount);
1334
1335 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1336 defragmentationInfo, defragmentationStats) );
1337
1338 for(size_t i = 0; i < allocCount; ++i)
1339 {
1340 if(allocChanged[i])
1341 {
1342 RecreateAllocationResource(allocs[i]);
1343 }
1344 }
1345}
1346
1347static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1348{
1349 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1350 ValidateAllocationData(allocInfo);
1351 });
1352}
1353
1354void TestDefragmentationSimple()
1355{
1356 wprintf(L"Test defragmentation simple\n");
1357
1358 RandomNumberGenerator rand(667);
1359
1360 const VkDeviceSize BUF_SIZE = 0x10000;
1361 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1362
1363 const VkDeviceSize MIN_BUF_SIZE = 32;
1364 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1365 auto RandomBufSize = [&]() -> VkDeviceSize {
1366 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1367 };
1368
1369 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1370 bufCreateInfo.size = BUF_SIZE;
1371 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1372
1373 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1374 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1375
1376 uint32_t memTypeIndex = UINT32_MAX;
1377 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1378
1379 VmaPoolCreateInfo poolCreateInfo = {};
1380 poolCreateInfo.blockSize = BLOCK_SIZE;
1381 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1382
1383 VmaPool pool;
1384 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1385
Adam Sawickie1681912018-11-23 17:50:12 +01001386 // Defragmentation of empty pool.
1387 {
1388 VmaDefragmentationInfo2 defragInfo = {};
1389 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1390 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1391 defragInfo.poolCount = 1;
1392 defragInfo.pPools = &pool;
1393
1394 VmaDefragmentationStats defragStats = {};
1395 VmaDefragmentationContext defragCtx = nullptr;
1396 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1397 TEST(res >= VK_SUCCESS);
1398 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1399 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1400 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1401 }
1402
Adam Sawickib8333fb2018-03-13 16:15:53 +01001403 std::vector<AllocInfo> allocations;
1404
1405 // persistentlyMappedOption = 0 - not persistently mapped.
1406 // persistentlyMappedOption = 1 - persistently mapped.
1407 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1408 {
1409 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1410 const bool persistentlyMapped = persistentlyMappedOption != 0;
1411
1412 // # Test 1
1413 // Buffers of fixed size.
1414 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1415 // Expected result: at least 1 block freed.
1416 {
1417 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1418 {
1419 AllocInfo allocInfo;
1420 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1421 allocations.push_back(allocInfo);
1422 }
1423
1424 for(size_t i = 1; i < allocations.size(); ++i)
1425 {
1426 DestroyAllocation(allocations[i]);
1427 allocations.erase(allocations.begin() + i);
1428 }
1429
1430 VmaDefragmentationStats defragStats;
1431 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001432 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1433 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001434
1435 ValidateAllocationsData(allocations.data(), allocations.size());
1436
1437 DestroyAllAllocations(allocations);
1438 }
1439
1440 // # Test 2
1441 // Buffers of fixed size.
1442 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1443 // Expected result: Each of 4 interations makes some progress.
1444 {
1445 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1446 {
1447 AllocInfo allocInfo;
1448 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1449 allocations.push_back(allocInfo);
1450 }
1451
1452 for(size_t i = 1; i < allocations.size(); ++i)
1453 {
1454 DestroyAllocation(allocations[i]);
1455 allocations.erase(allocations.begin() + i);
1456 }
1457
1458 VmaDefragmentationInfo defragInfo = {};
1459 defragInfo.maxAllocationsToMove = 1;
1460 defragInfo.maxBytesToMove = BUF_SIZE;
1461
1462 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1463 {
1464 VmaDefragmentationStats defragStats;
1465 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001466 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001467 }
1468
1469 ValidateAllocationsData(allocations.data(), allocations.size());
1470
1471 DestroyAllAllocations(allocations);
1472 }
1473
1474 // # Test 3
1475 // Buffers of variable size.
1476 // Create a number of buffers. Remove some percent of them.
1477 // Defragment while having some percent of them unmovable.
1478 // Expected result: Just simple validation.
1479 {
1480 for(size_t i = 0; i < 100; ++i)
1481 {
1482 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1483 localBufCreateInfo.size = RandomBufSize();
1484
1485 AllocInfo allocInfo;
1486 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1487 allocations.push_back(allocInfo);
1488 }
1489
1490 const uint32_t percentToDelete = 60;
1491 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1492 for(size_t i = 0; i < numberToDelete; ++i)
1493 {
1494 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1495 DestroyAllocation(allocations[indexToDelete]);
1496 allocations.erase(allocations.begin() + indexToDelete);
1497 }
1498
1499 // Non-movable allocations will be at the beginning of allocations array.
1500 const uint32_t percentNonMovable = 20;
1501 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1502 for(size_t i = 0; i < numberNonMovable; ++i)
1503 {
1504 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1505 if(indexNonMovable != i)
1506 std::swap(allocations[i], allocations[indexNonMovable]);
1507 }
1508
1509 VmaDefragmentationStats defragStats;
1510 Defragment(
1511 allocations.data() + numberNonMovable,
1512 allocations.size() - numberNonMovable,
1513 nullptr, &defragStats);
1514
1515 ValidateAllocationsData(allocations.data(), allocations.size());
1516
1517 DestroyAllAllocations(allocations);
1518 }
1519 }
1520
Adam Sawicki647cf242018-11-23 17:58:00 +01001521 /*
1522 Allocation that must be move to an overlapping place using memmove().
1523 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1524 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001525 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001526 {
1527 AllocInfo allocInfo[2];
1528
1529 bufCreateInfo.size = BUF_SIZE;
1530 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1531 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1532 bufCreateInfo.size = biggerBufSize;
1533 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1534
1535 DestroyAllocation(allocInfo[0]);
1536
1537 VmaDefragmentationStats defragStats;
1538 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1539 // If this fails, it means we couldn't do memmove with overlapping regions.
1540 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1541
1542 ValidateAllocationsData(&allocInfo[1], 1);
1543 DestroyAllocation(allocInfo[1]);
1544 }
1545
Adam Sawickib8333fb2018-03-13 16:15:53 +01001546 vmaDestroyPool(g_hAllocator, pool);
1547}
1548
Adam Sawicki52076eb2018-11-22 16:14:50 +01001549void TestDefragmentationWholePool()
1550{
1551 wprintf(L"Test defragmentation whole pool\n");
1552
1553 RandomNumberGenerator rand(668);
1554
1555 const VkDeviceSize BUF_SIZE = 0x10000;
1556 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1557
1558 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1559 bufCreateInfo.size = BUF_SIZE;
1560 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1561
1562 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1563 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1564
1565 uint32_t memTypeIndex = UINT32_MAX;
1566 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1567
1568 VmaPoolCreateInfo poolCreateInfo = {};
1569 poolCreateInfo.blockSize = BLOCK_SIZE;
1570 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1571
1572 VmaDefragmentationStats defragStats[2];
1573 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1574 {
1575 VmaPool pool;
1576 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1577
1578 std::vector<AllocInfo> allocations;
1579
1580 // Buffers of fixed size.
1581 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1582 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1583 {
1584 AllocInfo allocInfo;
1585 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1586 allocations.push_back(allocInfo);
1587 }
1588
1589 for(size_t i = 1; i < allocations.size(); ++i)
1590 {
1591 DestroyAllocation(allocations[i]);
1592 allocations.erase(allocations.begin() + i);
1593 }
1594
1595 VmaDefragmentationInfo2 defragInfo = {};
1596 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1597 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1598 std::vector<VmaAllocation> allocationsToDefrag;
1599 if(caseIndex == 0)
1600 {
1601 defragInfo.poolCount = 1;
1602 defragInfo.pPools = &pool;
1603 }
1604 else
1605 {
1606 const size_t allocCount = allocations.size();
1607 allocationsToDefrag.resize(allocCount);
1608 std::transform(
1609 allocations.begin(), allocations.end(),
1610 allocationsToDefrag.begin(),
1611 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1612 defragInfo.allocationCount = (uint32_t)allocCount;
1613 defragInfo.pAllocations = allocationsToDefrag.data();
1614 }
1615
1616 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1617 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1618 TEST(res >= VK_SUCCESS);
1619 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1620
1621 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1622
1623 ValidateAllocationsData(allocations.data(), allocations.size());
1624
1625 DestroyAllAllocations(allocations);
1626
1627 vmaDestroyPool(g_hAllocator, pool);
1628 }
1629
1630 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1631 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1632 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1633 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1634}
1635
Adam Sawickib8333fb2018-03-13 16:15:53 +01001636void TestDefragmentationFull()
1637{
1638 std::vector<AllocInfo> allocations;
1639
1640 // Create initial allocations.
1641 for(size_t i = 0; i < 400; ++i)
1642 {
1643 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001644 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001645 allocations.push_back(allocation);
1646 }
1647
1648 // Delete random allocations
1649 const size_t allocationsToDeletePercent = 80;
1650 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1651 for(size_t i = 0; i < allocationsToDelete; ++i)
1652 {
1653 size_t index = (size_t)rand() % allocations.size();
1654 DestroyAllocation(allocations[index]);
1655 allocations.erase(allocations.begin() + index);
1656 }
1657
1658 for(size_t i = 0; i < allocations.size(); ++i)
1659 ValidateAllocationData(allocations[i]);
1660
Adam Sawicki0667e332018-08-24 17:26:44 +02001661 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001662
1663 {
1664 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1665 for(size_t i = 0; i < allocations.size(); ++i)
1666 vmaAllocations[i] = allocations[i].m_Allocation;
1667
1668 const size_t nonMovablePercent = 0;
1669 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1670 for(size_t i = 0; i < nonMovableCount; ++i)
1671 {
1672 size_t index = (size_t)rand() % vmaAllocations.size();
1673 vmaAllocations.erase(vmaAllocations.begin() + index);
1674 }
1675
1676 const uint32_t defragCount = 1;
1677 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1678 {
1679 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1680
1681 VmaDefragmentationInfo defragmentationInfo;
1682 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1683 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1684
1685 wprintf(L"Defragmentation #%u\n", defragIndex);
1686
1687 time_point begTime = std::chrono::high_resolution_clock::now();
1688
1689 VmaDefragmentationStats stats;
1690 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001691 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001692
1693 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1694
1695 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1696 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1697 wprintf(L"Time: %.2f s\n", defragmentDuration);
1698
1699 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1700 {
1701 if(allocationsChanged[i])
1702 {
1703 RecreateAllocationResource(allocations[i]);
1704 }
1705 }
1706
1707 for(size_t i = 0; i < allocations.size(); ++i)
1708 ValidateAllocationData(allocations[i]);
1709
Adam Sawicki0667e332018-08-24 17:26:44 +02001710 //wchar_t fileName[MAX_PATH];
1711 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1712 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001713 }
1714 }
1715
1716 // Destroy all remaining allocations.
1717 DestroyAllAllocations(allocations);
1718}
1719
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001720static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001721{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001722 wprintf(L"Test defragmentation GPU\n");
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001723
1724 std::vector<AllocInfo> allocations;
1725
1726 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001727 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1728 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001729 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001730 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1731 const size_t percentToLeave = 30;
1732 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001733 RandomNumberGenerator rand = { 234522 };
1734
1735 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001736
1737 VmaAllocationCreateInfo allocCreateInfo = {};
1738 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001739 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001740
1741 // Create all intended buffers.
1742 for(size_t i = 0; i < bufCount; ++i)
1743 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001744 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1745
1746 if(rand.Generate() % 100 < percentNonMovable)
1747 {
1748 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1749 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1750 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1751 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1752 }
1753 else
1754 {
1755 // Different usage just to see different color in output from VmaDumpVis.
1756 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1757 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1758 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1759 // And in JSON dump.
1760 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1761 }
1762
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001763 AllocInfo alloc;
1764 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1765 alloc.m_StartValue = rand.Generate();
1766 allocations.push_back(alloc);
1767 }
1768
1769 // Destroy some percentage of them.
1770 {
1771 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1772 for(size_t i = 0; i < buffersToDestroy; ++i)
1773 {
1774 const size_t index = rand.Generate() % allocations.size();
1775 allocations[index].Destroy();
1776 allocations.erase(allocations.begin() + index);
1777 }
1778 }
1779
1780 // Fill them with meaningful data.
1781 UploadGpuData(allocations.data(), allocations.size());
1782
Adam Sawickic6ede152018-11-16 17:04:14 +01001783 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001784 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001785 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001786
1787 // Defragment using GPU only.
1788 {
1789 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001790
Adam Sawickic6ede152018-11-16 17:04:14 +01001791 std::vector<VmaAllocation> allocationPtrs;
1792 std::vector<VkBool32> allocationChanged;
1793 std::vector<size_t> allocationOriginalIndex;
1794
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001795 for(size_t i = 0; i < allocCount; ++i)
1796 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001797 VmaAllocationInfo allocInfo = {};
1798 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1799 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1800 {
1801 allocationPtrs.push_back(allocations[i].m_Allocation);
1802 allocationChanged.push_back(VK_FALSE);
1803 allocationOriginalIndex.push_back(i);
1804 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001805 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001806
1807 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001808
1809 BeginSingleTimeCommands();
1810
1811 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001812 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001813 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001814 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001815 defragInfo.pAllocationsChanged = allocationChanged.data();
1816 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001817 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1818 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1819
1820 VmaDefragmentationStats stats = {};
1821 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1822 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1823 TEST(res >= VK_SUCCESS);
1824
1825 EndSingleTimeCommands();
1826
1827 vmaDefragmentationEnd(g_hAllocator, ctx);
1828
Adam Sawickic6ede152018-11-16 17:04:14 +01001829 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001830 {
1831 if(allocationChanged[i])
1832 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001833 const size_t origAllocIndex = allocationOriginalIndex[i];
1834 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001835 }
1836 }
1837
Adam Sawicki4d844e22019-01-24 16:21:05 +01001838 // If corruption detection is enabled, GPU defragmentation may not work on
1839 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001840 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001841 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1842 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001843 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001844 }
1845
1846 ValidateGpuData(allocations.data(), allocations.size());
1847
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001848 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001849 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001850
1851 // Destroy all remaining buffers.
1852 for(size_t i = allocations.size(); i--; )
1853 {
1854 allocations[i].Destroy();
1855 }
1856}
1857
Adam Sawickic467e282019-12-23 16:38:31 +01001858static void ProcessDefragmentationStepInfo(VmaDefragmentationPassInfo &stepInfo)
Adam Sawickia52012d2019-12-23 15:28:51 +01001859{
1860 std::vector<VkImageMemoryBarrier> beginImageBarriers;
1861 std::vector<VkImageMemoryBarrier> finalizeImageBarriers;
1862
1863 VkPipelineStageFlags beginSrcStageMask = 0;
1864 VkPipelineStageFlags beginDstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1865
1866 VkPipelineStageFlags finalizeSrcStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1867 VkPipelineStageFlags finalizeDstStageMask = 0;
1868
1869 bool wantsMemoryBarrier = false;
1870
1871 VkMemoryBarrier beginMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1872 VkMemoryBarrier finalizeMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1873
Adam Sawickic467e282019-12-23 16:38:31 +01001874 for(uint32_t i = 0; i < stepInfo.moveCount; ++i)
Adam Sawickia52012d2019-12-23 15:28:51 +01001875 {
1876 VmaAllocationInfo info;
1877 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1878
1879 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1880
1881 if(allocInfo->m_Image)
1882 {
1883 VkImage newImage;
1884
1885 const VkResult result = vkCreateImage(g_hDevice, &allocInfo->m_ImageInfo, g_Allocs, &newImage);
1886 TEST(result >= VK_SUCCESS);
1887
1888 vkBindImageMemory(g_hDevice, newImage, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001889 allocInfo->m_NewImage = newImage;
Adam Sawickia52012d2019-12-23 15:28:51 +01001890
1891 // Keep track of our pipeline stages that we need to wait/signal on
1892 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1893 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1894
1895 // We need one pipeline barrier and two image layout transitions here
1896 // First we'll have to turn our newly created image into VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
1897 // And the second one is turning the old image into VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
1898
1899 VkImageSubresourceRange subresourceRange = {
1900 VK_IMAGE_ASPECT_COLOR_BIT,
1901 0, VK_REMAINING_MIP_LEVELS,
1902 0, VK_REMAINING_ARRAY_LAYERS
1903 };
1904
1905 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
1906 barrier.srcAccessMask = 0;
1907 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1908 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1909 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1910 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1911 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1912 barrier.image = newImage;
1913 barrier.subresourceRange = subresourceRange;
1914
1915 beginImageBarriers.push_back(barrier);
1916
1917 // Second barrier to convert the existing image. This one actually needs a real barrier
1918 barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
1919 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1920 barrier.oldLayout = allocInfo->m_ImageLayout;
1921 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
1922 barrier.image = allocInfo->m_Image;
1923
1924 beginImageBarriers.push_back(barrier);
1925
1926 // And lastly we need a barrier that turns our new image into the layout of the old one
1927 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1928 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1929 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1930 barrier.newLayout = allocInfo->m_ImageLayout;
1931 barrier.image = newImage;
1932
1933 finalizeImageBarriers.push_back(barrier);
1934 }
1935 else if(allocInfo->m_Buffer)
1936 {
1937 VkBuffer newBuffer;
1938
1939 const VkResult result = vkCreateBuffer(g_hDevice, &allocInfo->m_BufferInfo, g_Allocs, &newBuffer);
1940 TEST(result >= VK_SUCCESS);
1941
1942 vkBindBufferMemory(g_hDevice, newBuffer, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001943 allocInfo->m_NewBuffer = newBuffer;
Adam Sawickia52012d2019-12-23 15:28:51 +01001944
1945 // Keep track of our pipeline stages that we need to wait/signal on
1946 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1947 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1948
1949 beginMemoryBarrier.srcAccessMask |= VK_ACCESS_MEMORY_WRITE_BIT;
1950 beginMemoryBarrier.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
1951
1952 finalizeMemoryBarrier.srcAccessMask |= VK_ACCESS_TRANSFER_WRITE_BIT;
1953 finalizeMemoryBarrier.dstAccessMask |= VK_ACCESS_MEMORY_READ_BIT;
1954
1955 wantsMemoryBarrier = true;
1956 }
1957 }
1958
1959 if(!beginImageBarriers.empty() || wantsMemoryBarrier)
1960 {
1961 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
1962
1963 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, beginSrcStageMask, beginDstStageMask, 0,
1964 memoryBarrierCount, &beginMemoryBarrier,
1965 0, nullptr,
1966 (uint32_t)beginImageBarriers.size(), beginImageBarriers.data());
1967 }
1968
1969 for(uint32_t i = 0; i < stepInfo.moveCount; ++ i)
1970 {
1971 VmaAllocationInfo info;
1972 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1973
1974 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1975
1976 if(allocInfo->m_Image)
1977 {
1978 std::vector<VkImageCopy> imageCopies;
1979
1980 // Copy all mips of the source image into the target image
1981 VkOffset3D offset = { 0, 0, 0 };
1982 VkExtent3D extent = allocInfo->m_ImageInfo.extent;
1983
1984 VkImageSubresourceLayers subresourceLayers = {
1985 VK_IMAGE_ASPECT_COLOR_BIT,
1986 0,
1987 0, 1
1988 };
1989
1990 for(uint32_t mip = 0; mip < allocInfo->m_ImageInfo.mipLevels; ++ mip)
1991 {
1992 subresourceLayers.mipLevel = mip;
1993
1994 VkImageCopy imageCopy{
1995 subresourceLayers,
1996 offset,
1997 subresourceLayers,
1998 offset,
1999 extent
2000 };
2001
2002 imageCopies.push_back(imageCopy);
2003
2004 extent.width = std::max(uint32_t(1), extent.width >> 1);
2005 extent.height = std::max(uint32_t(1), extent.height >> 1);
2006 extent.depth = std::max(uint32_t(1), extent.depth >> 1);
2007 }
2008
2009 vkCmdCopyImage(
2010 g_hTemporaryCommandBuffer,
2011 allocInfo->m_Image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Adam Sawickic467e282019-12-23 16:38:31 +01002012 allocInfo->m_NewImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
Adam Sawickia52012d2019-12-23 15:28:51 +01002013 (uint32_t)imageCopies.size(), imageCopies.data());
Adam Sawickia52012d2019-12-23 15:28:51 +01002014 }
2015 else if(allocInfo->m_Buffer)
2016 {
2017 VkBufferCopy region = {
2018 0,
2019 0,
2020 allocInfo->m_BufferInfo.size };
2021
2022 vkCmdCopyBuffer(g_hTemporaryCommandBuffer,
Adam Sawickic467e282019-12-23 16:38:31 +01002023 allocInfo->m_Buffer, allocInfo->m_NewBuffer,
Adam Sawickia52012d2019-12-23 15:28:51 +01002024 1, &region);
Adam Sawickia52012d2019-12-23 15:28:51 +01002025 }
2026 }
2027
Adam Sawickia52012d2019-12-23 15:28:51 +01002028 if(!finalizeImageBarriers.empty() || wantsMemoryBarrier)
2029 {
2030 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
2031
2032 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, finalizeSrcStageMask, finalizeDstStageMask, 0,
2033 memoryBarrierCount, &finalizeMemoryBarrier,
2034 0, nullptr,
2035 (uint32_t)finalizeImageBarriers.size(), finalizeImageBarriers.data());
2036 }
2037}
2038
2039
2040static void TestDefragmentationIncrementalBasic()
2041{
2042 wprintf(L"Test defragmentation incremental basic\n");
Adam Sawickia52012d2019-12-23 15:28:51 +01002043
2044 std::vector<AllocInfo> allocations;
2045
2046 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2047 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2048 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2049 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2050 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic467e282019-12-23 16:38:31 +01002051 const size_t imageCount = totalSize / ((size_t)imageSizes[0] * imageSizes[0] * 4) / 2;
Adam Sawickia52012d2019-12-23 15:28:51 +01002052 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2053 const size_t percentToLeave = 30;
2054 RandomNumberGenerator rand = { 234522 };
2055
2056 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2057 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2058 imageInfo.extent.depth = 1;
2059 imageInfo.mipLevels = 1;
2060 imageInfo.arrayLayers = 1;
2061 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2062 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2063 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2064 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2065 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2066
2067 VmaAllocationCreateInfo allocCreateInfo = {};
2068 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2069 allocCreateInfo.flags = 0;
2070
2071 // Create all intended images.
2072 for(size_t i = 0; i < imageCount; ++i)
2073 {
2074 const uint32_t size = imageSizes[rand.Generate() % 3];
2075
2076 imageInfo.extent.width = size;
2077 imageInfo.extent.height = size;
2078
2079 AllocInfo alloc;
2080 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2081 alloc.m_StartValue = 0;
2082
2083 allocations.push_back(alloc);
2084 }
2085
2086 // And all buffers
2087 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2088
2089 for(size_t i = 0; i < bufCount; ++i)
2090 {
2091 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2092 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2093
2094 AllocInfo alloc;
2095 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2096 alloc.m_StartValue = 0;
2097
2098 allocations.push_back(alloc);
2099 }
2100
2101 // Destroy some percentage of them.
2102 {
2103 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2104 for(size_t i = 0; i < allocationsToDestroy; ++i)
2105 {
2106 const size_t index = rand.Generate() % allocations.size();
2107 allocations[index].Destroy();
2108 allocations.erase(allocations.begin() + index);
2109 }
2110 }
2111
2112 {
2113 // Set our user data pointers. A real application should probably be more clever here
2114 const size_t allocationCount = allocations.size();
2115 for(size_t i = 0; i < allocationCount; ++i)
2116 {
2117 AllocInfo &alloc = allocations[i];
2118 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2119 }
2120 }
2121
2122 // Fill them with meaningful data.
2123 UploadGpuData(allocations.data(), allocations.size());
2124
2125 wchar_t fileName[MAX_PATH];
2126 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_A_before.json");
2127 SaveAllocatorStatsToFile(fileName);
2128
2129 // Defragment using GPU only.
2130 {
2131 const size_t allocCount = allocations.size();
2132
2133 std::vector<VmaAllocation> allocationPtrs;
2134
2135 for(size_t i = 0; i < allocCount; ++i)
2136 {
Adam Sawickia52012d2019-12-23 15:28:51 +01002137 allocationPtrs.push_back(allocations[i].m_Allocation);
2138 }
2139
2140 const size_t movableAllocCount = allocationPtrs.size();
2141
2142 VmaDefragmentationInfo2 defragInfo = {};
2143 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2144 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2145 defragInfo.pAllocations = allocationPtrs.data();
2146 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2147 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2148
2149 VmaDefragmentationStats stats = {};
2150 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2151 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2152 TEST(res >= VK_SUCCESS);
2153
2154 res = VK_NOT_READY;
2155
Adam Sawickic467e282019-12-23 16:38:31 +01002156 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002157 moveInfo.resize(movableAllocCount);
2158
2159 while(res == VK_NOT_READY)
2160 {
Adam Sawickic467e282019-12-23 16:38:31 +01002161 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002162 stepInfo.pMoves = moveInfo.data();
2163 stepInfo.moveCount = (uint32_t)moveInfo.size();
2164
Adam Sawickic467e282019-12-23 16:38:31 +01002165 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002166 TEST(res >= VK_SUCCESS);
2167
2168 BeginSingleTimeCommands();
Adam Sawickic467e282019-12-23 16:38:31 +01002169 std::vector<void*> newHandles;
Adam Sawickia52012d2019-12-23 15:28:51 +01002170 ProcessDefragmentationStepInfo(stepInfo);
2171 EndSingleTimeCommands();
2172
Adam Sawickic467e282019-12-23 16:38:31 +01002173 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2174
2175 // Destroy old buffers/images and replace them with new handles.
2176 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2177 {
2178 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2179 VmaAllocationInfo vmaAllocInfo;
2180 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2181 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2182 if(allocInfo->m_Buffer)
2183 {
2184 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2185 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2186 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2187 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2188 }
2189 else if(allocInfo->m_Image)
2190 {
2191 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2192 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2193 allocInfo->m_Image = allocInfo->m_NewImage;
2194 allocInfo->m_NewImage = VK_NULL_HANDLE;
2195 }
2196 else
2197 assert(0);
2198 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002199 }
2200
2201 TEST(res >= VK_SUCCESS);
2202 vmaDefragmentationEnd(g_hAllocator, ctx);
2203
2204 // If corruption detection is enabled, GPU defragmentation may not work on
2205 // memory types that have this detection active, e.g. on Intel.
2206#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2207 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2208 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2209#endif
2210 }
2211
2212 //ValidateGpuData(allocations.data(), allocations.size());
2213
2214 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_B_after.json");
2215 SaveAllocatorStatsToFile(fileName);
2216
Adam Sawickic467e282019-12-23 16:38:31 +01002217 // Destroy all remaining buffers and images.
Adam Sawickia52012d2019-12-23 15:28:51 +01002218 for(size_t i = allocations.size(); i--; )
2219 {
2220 allocations[i].Destroy();
2221 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002222}
2223
2224void TestDefragmentationIncrementalComplex()
2225{
2226 wprintf(L"Test defragmentation incremental complex\n");
Adam Sawickidb4c1632020-07-16 16:41:53 +02002227
Adam Sawickia52012d2019-12-23 15:28:51 +01002228 std::vector<AllocInfo> allocations;
2229
2230 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2231 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2232 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2233 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2234 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
2235 const size_t imageCount = (size_t)(totalSize / (imageSizes[0] * imageSizes[0] * 4)) / 2;
2236 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2237 const size_t percentToLeave = 30;
2238 RandomNumberGenerator rand = { 234522 };
2239
2240 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2241 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2242 imageInfo.extent.depth = 1;
2243 imageInfo.mipLevels = 1;
2244 imageInfo.arrayLayers = 1;
2245 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2246 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2247 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2248 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2249 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2250
2251 VmaAllocationCreateInfo allocCreateInfo = {};
2252 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2253 allocCreateInfo.flags = 0;
2254
2255 // Create all intended images.
2256 for(size_t i = 0; i < imageCount; ++i)
2257 {
2258 const uint32_t size = imageSizes[rand.Generate() % 3];
2259
2260 imageInfo.extent.width = size;
2261 imageInfo.extent.height = size;
2262
2263 AllocInfo alloc;
2264 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2265 alloc.m_StartValue = 0;
2266
2267 allocations.push_back(alloc);
2268 }
2269
2270 // And all buffers
2271 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2272
2273 for(size_t i = 0; i < bufCount; ++i)
2274 {
2275 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2276 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2277
2278 AllocInfo alloc;
2279 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2280 alloc.m_StartValue = 0;
2281
2282 allocations.push_back(alloc);
2283 }
2284
2285 // Destroy some percentage of them.
2286 {
2287 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2288 for(size_t i = 0; i < allocationsToDestroy; ++i)
2289 {
2290 const size_t index = rand.Generate() % allocations.size();
2291 allocations[index].Destroy();
2292 allocations.erase(allocations.begin() + index);
2293 }
2294 }
2295
2296 {
2297 // Set our user data pointers. A real application should probably be more clever here
2298 const size_t allocationCount = allocations.size();
2299 for(size_t i = 0; i < allocationCount; ++i)
2300 {
2301 AllocInfo &alloc = allocations[i];
2302 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2303 }
2304 }
2305
2306 // Fill them with meaningful data.
2307 UploadGpuData(allocations.data(), allocations.size());
2308
2309 wchar_t fileName[MAX_PATH];
2310 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_A_before.json");
2311 SaveAllocatorStatsToFile(fileName);
2312
2313 std::vector<AllocInfo> additionalAllocations;
2314
2315#define MakeAdditionalAllocation() \
2316 do { \
2317 { \
2318 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16); \
2319 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; \
2320 \
2321 AllocInfo alloc; \
2322 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo); \
2323 \
2324 additionalAllocations.push_back(alloc); \
2325 } \
2326 } while(0)
2327
2328 // Defragment using GPU only.
2329 {
2330 const size_t allocCount = allocations.size();
2331
2332 std::vector<VmaAllocation> allocationPtrs;
2333
2334 for(size_t i = 0; i < allocCount; ++i)
2335 {
2336 VmaAllocationInfo allocInfo = {};
2337 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
2338
2339 allocationPtrs.push_back(allocations[i].m_Allocation);
2340 }
2341
2342 const size_t movableAllocCount = allocationPtrs.size();
2343
2344 VmaDefragmentationInfo2 defragInfo = {};
2345 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2346 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2347 defragInfo.pAllocations = allocationPtrs.data();
2348 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2349 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2350
2351 VmaDefragmentationStats stats = {};
2352 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2353 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2354 TEST(res >= VK_SUCCESS);
2355
2356 res = VK_NOT_READY;
2357
Adam Sawickic467e282019-12-23 16:38:31 +01002358 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002359 moveInfo.resize(movableAllocCount);
2360
2361 MakeAdditionalAllocation();
2362
2363 while(res == VK_NOT_READY)
2364 {
Adam Sawickic467e282019-12-23 16:38:31 +01002365 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002366 stepInfo.pMoves = moveInfo.data();
2367 stepInfo.moveCount = (uint32_t)moveInfo.size();
2368
Adam Sawickic467e282019-12-23 16:38:31 +01002369 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002370 TEST(res >= VK_SUCCESS);
2371
2372 MakeAdditionalAllocation();
2373
2374 BeginSingleTimeCommands();
2375 ProcessDefragmentationStepInfo(stepInfo);
2376 EndSingleTimeCommands();
2377
Adam Sawickic467e282019-12-23 16:38:31 +01002378 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2379
2380 // Destroy old buffers/images and replace them with new handles.
2381 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2382 {
2383 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2384 VmaAllocationInfo vmaAllocInfo;
2385 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2386 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2387 if(allocInfo->m_Buffer)
2388 {
2389 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2390 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2391 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2392 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2393 }
2394 else if(allocInfo->m_Image)
2395 {
2396 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2397 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2398 allocInfo->m_Image = allocInfo->m_NewImage;
2399 allocInfo->m_NewImage = VK_NULL_HANDLE;
2400 }
2401 else
2402 assert(0);
2403 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002404
2405 MakeAdditionalAllocation();
2406 }
2407
2408 TEST(res >= VK_SUCCESS);
2409 vmaDefragmentationEnd(g_hAllocator, ctx);
2410
2411 // If corruption detection is enabled, GPU defragmentation may not work on
2412 // memory types that have this detection active, e.g. on Intel.
2413#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2414 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2415 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2416#endif
2417 }
2418
2419 //ValidateGpuData(allocations.data(), allocations.size());
2420
2421 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_B_after.json");
2422 SaveAllocatorStatsToFile(fileName);
2423
2424 // Destroy all remaining buffers.
2425 for(size_t i = allocations.size(); i--; )
2426 {
2427 allocations[i].Destroy();
2428 }
2429
2430 for(size_t i = additionalAllocations.size(); i--; )
2431 {
2432 additionalAllocations[i].Destroy();
2433 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002434}
2435
2436
Adam Sawickib8333fb2018-03-13 16:15:53 +01002437static void TestUserData()
2438{
2439 VkResult res;
2440
2441 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2442 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2443 bufCreateInfo.size = 0x10000;
2444
2445 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
2446 {
2447 // Opaque pointer
2448 {
2449
2450 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
2451 void* pointerToSomething = &res;
2452
2453 VmaAllocationCreateInfo allocCreateInfo = {};
2454 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2455 allocCreateInfo.pUserData = numberAsPointer;
2456 if(testIndex == 1)
2457 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2458
2459 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2460 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002461 TEST(res == VK_SUCCESS);
2462 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002463
2464 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002465 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002466
2467 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
2468 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002469 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002470
2471 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2472 }
2473
2474 // String
2475 {
2476 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
2477 const char* name2 = "2";
2478 const size_t name1Len = strlen(name1);
2479
2480 char* name1Buf = new char[name1Len + 1];
2481 strcpy_s(name1Buf, name1Len + 1, name1);
2482
2483 VmaAllocationCreateInfo allocCreateInfo = {};
2484 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2485 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
2486 allocCreateInfo.pUserData = name1Buf;
2487 if(testIndex == 1)
2488 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2489
2490 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2491 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002492 TEST(res == VK_SUCCESS);
2493 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
2494 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002495
2496 delete[] name1Buf;
2497
2498 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002499 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002500
2501 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
2502 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002503 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002504
2505 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
2506 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002507 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002508
2509 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2510 }
2511 }
2512}
2513
Adam Sawicki370ab182018-11-08 16:31:00 +01002514static void TestInvalidAllocations()
2515{
2516 VkResult res;
2517
2518 VmaAllocationCreateInfo allocCreateInfo = {};
2519 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2520
2521 // Try to allocate 0 bytes.
2522 {
2523 VkMemoryRequirements memReq = {};
2524 memReq.size = 0; // !!!
2525 memReq.alignment = 4;
2526 memReq.memoryTypeBits = UINT32_MAX;
2527 VmaAllocation alloc = VK_NULL_HANDLE;
2528 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
2529 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
2530 }
2531
2532 // Try to create buffer with size = 0.
2533 {
2534 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2535 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2536 bufCreateInfo.size = 0; // !!!
2537 VkBuffer buf = VK_NULL_HANDLE;
2538 VmaAllocation alloc = VK_NULL_HANDLE;
2539 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
2540 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2541 }
2542
2543 // Try to create image with one dimension = 0.
2544 {
2545 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2546 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
2547 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
2548 imageCreateInfo.extent.width = 128;
2549 imageCreateInfo.extent.height = 0; // !!!
2550 imageCreateInfo.extent.depth = 1;
2551 imageCreateInfo.mipLevels = 1;
2552 imageCreateInfo.arrayLayers = 1;
2553 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2554 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
2555 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2556 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2557 VkImage image = VK_NULL_HANDLE;
2558 VmaAllocation alloc = VK_NULL_HANDLE;
2559 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
2560 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2561 }
2562}
2563
Adam Sawickib8333fb2018-03-13 16:15:53 +01002564static void TestMemoryRequirements()
2565{
2566 VkResult res;
2567 VkBuffer buf;
2568 VmaAllocation alloc;
2569 VmaAllocationInfo allocInfo;
2570
2571 const VkPhysicalDeviceMemoryProperties* memProps;
2572 vmaGetMemoryProperties(g_hAllocator, &memProps);
2573
2574 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2575 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2576 bufInfo.size = 128;
2577
2578 VmaAllocationCreateInfo allocCreateInfo = {};
2579
2580 // No requirements.
2581 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002582 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002583 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2584
2585 // Usage.
2586 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2587 allocCreateInfo.requiredFlags = 0;
2588 allocCreateInfo.preferredFlags = 0;
2589 allocCreateInfo.memoryTypeBits = UINT32_MAX;
2590
2591 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002592 TEST(res == VK_SUCCESS);
2593 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002594 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2595
2596 // Required flags, preferred flags.
2597 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
2598 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2599 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2600 allocCreateInfo.memoryTypeBits = 0;
2601
2602 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002603 TEST(res == VK_SUCCESS);
2604 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2605 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002606 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2607
2608 // memoryTypeBits.
2609 const uint32_t memType = allocInfo.memoryType;
2610 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2611 allocCreateInfo.requiredFlags = 0;
2612 allocCreateInfo.preferredFlags = 0;
2613 allocCreateInfo.memoryTypeBits = 1u << memType;
2614
2615 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002616 TEST(res == VK_SUCCESS);
2617 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002618 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2619
2620}
2621
Adam Sawickia1d992f2020-03-02 15:32:10 +01002622static void TestGetAllocatorInfo()
2623{
2624 wprintf(L"Test vnaGetAllocatorInfo\n");
2625
2626 VmaAllocatorInfo allocInfo = {};
2627 vmaGetAllocatorInfo(g_hAllocator, &allocInfo);
2628 TEST(allocInfo.instance == g_hVulkanInstance);
2629 TEST(allocInfo.physicalDevice == g_hPhysicalDevice);
2630 TEST(allocInfo.device == g_hDevice);
2631}
2632
Adam Sawickib8333fb2018-03-13 16:15:53 +01002633static void TestBasics()
2634{
Adam Sawickiaaa1a562020-06-24 17:41:09 +02002635 wprintf(L"Test basics\n");
2636
Adam Sawickib8333fb2018-03-13 16:15:53 +01002637 VkResult res;
2638
Adam Sawickia1d992f2020-03-02 15:32:10 +01002639 TestGetAllocatorInfo();
2640
Adam Sawickib8333fb2018-03-13 16:15:53 +01002641 TestMemoryRequirements();
2642
2643 // Lost allocation
2644 {
2645 VmaAllocation alloc = VK_NULL_HANDLE;
2646 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002647 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002648
2649 VmaAllocationInfo allocInfo;
2650 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002651 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
2652 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002653
2654 vmaFreeMemory(g_hAllocator, alloc);
2655 }
2656
2657 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
2658 {
2659 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2660 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2661 bufCreateInfo.size = 128;
2662
2663 VmaAllocationCreateInfo allocCreateInfo = {};
2664 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2665 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
2666
2667 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2668 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002669 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002670
2671 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2672
2673 // Same with OWN_MEMORY.
2674 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2675
2676 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002677 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002678
2679 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2680 }
2681
2682 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01002683
2684 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01002685}
2686
Adam Sawickiaaa1a562020-06-24 17:41:09 +02002687static void TestAllocationVersusResourceSize()
2688{
2689 wprintf(L"Test allocation versus resource size\n");
2690
2691 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2692 bufCreateInfo.size = 22921; // Prime number
2693 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2694
2695 VmaAllocationCreateInfo allocCreateInfo = {};
2696 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2697
2698 for(uint32_t i = 0; i < 2; ++i)
2699 {
2700 allocCreateInfo.flags = (i == 1) ? VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT : 0;
2701
2702 AllocInfo info;
2703 info.CreateBuffer(bufCreateInfo, allocCreateInfo);
2704
2705 VmaAllocationInfo allocInfo = {};
2706 vmaGetAllocationInfo(g_hAllocator, info.m_Allocation, &allocInfo);
2707 //wprintf(L" Buffer size = %llu, allocation size = %llu\n", bufCreateInfo.size, allocInfo.size);
2708
2709 // Map and test accessing entire area of the allocation, not only the buffer.
2710 void* mappedPtr = nullptr;
2711 VkResult res = vmaMapMemory(g_hAllocator, info.m_Allocation, &mappedPtr);
2712 TEST(res == VK_SUCCESS);
2713
2714 memset(mappedPtr, 0xCC, (size_t)allocInfo.size);
2715
2716 vmaUnmapMemory(g_hAllocator, info.m_Allocation);
2717
2718 info.Destroy();
2719 }
2720}
2721
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01002722static void TestPool_MinBlockCount()
2723{
2724#if defined(VMA_DEBUG_MARGIN) && VMA_DEBUG_MARGIN > 0
2725 return;
2726#endif
2727
2728 wprintf(L"Test Pool MinBlockCount\n");
2729 VkResult res;
2730
2731 static const VkDeviceSize ALLOC_SIZE = 512ull * 1024;
2732 static const VkDeviceSize BLOCK_SIZE = ALLOC_SIZE * 2; // Each block can fit 2 allocations.
2733
2734 VmaAllocationCreateInfo allocCreateInfo = {};
2735 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_COPY;
2736
2737 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2738 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2739 bufCreateInfo.size = ALLOC_SIZE;
2740
2741 VmaPoolCreateInfo poolCreateInfo = {};
2742 poolCreateInfo.blockSize = BLOCK_SIZE;
2743 poolCreateInfo.minBlockCount = 2; // At least 2 blocks always present.
2744 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
2745 TEST(res == VK_SUCCESS);
2746
2747 VmaPool pool = VK_NULL_HANDLE;
2748 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
2749 TEST(res == VK_SUCCESS && pool != VK_NULL_HANDLE);
2750
2751 // Check that there are 2 blocks preallocated as requested.
2752 VmaPoolStats begPoolStats = {};
2753 vmaGetPoolStats(g_hAllocator, pool, &begPoolStats);
2754 TEST(begPoolStats.blockCount == 2 && begPoolStats.allocationCount == 0 && begPoolStats.size == BLOCK_SIZE * 2);
2755
2756 // Allocate 5 buffers to create 3 blocks.
2757 static const uint32_t BUF_COUNT = 5;
2758 allocCreateInfo.pool = pool;
2759 std::vector<AllocInfo> allocs(BUF_COUNT);
2760 for(uint32_t i = 0; i < BUF_COUNT; ++i)
2761 {
2762 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &allocs[i].m_Buffer, &allocs[i].m_Allocation, nullptr);
2763 TEST(res == VK_SUCCESS && allocs[i].m_Buffer != VK_NULL_HANDLE && allocs[i].m_Allocation != VK_NULL_HANDLE);
2764 }
2765
2766 // Check that there are really 3 blocks.
2767 VmaPoolStats poolStats2 = {};
2768 vmaGetPoolStats(g_hAllocator, pool, &poolStats2);
2769 TEST(poolStats2.blockCount == 3 && poolStats2.allocationCount == BUF_COUNT && poolStats2.size == BLOCK_SIZE * 3);
2770
2771 // Free two first allocations to make one block empty.
2772 allocs[0].Destroy();
2773 allocs[1].Destroy();
2774
2775 // Check that there are still 3 blocks due to hysteresis.
2776 VmaPoolStats poolStats3 = {};
2777 vmaGetPoolStats(g_hAllocator, pool, &poolStats3);
2778 TEST(poolStats3.blockCount == 3 && poolStats3.allocationCount == BUF_COUNT - 2 && poolStats2.size == BLOCK_SIZE * 3);
2779
2780 // Free the last allocation to make second block empty.
2781 allocs[BUF_COUNT - 1].Destroy();
2782
2783 // Check that there are now 2 blocks only.
2784 VmaPoolStats poolStats4 = {};
2785 vmaGetPoolStats(g_hAllocator, pool, &poolStats4);
2786 TEST(poolStats4.blockCount == 2 && poolStats4.allocationCount == BUF_COUNT - 3 && poolStats4.size == BLOCK_SIZE * 2);
2787
2788 // Cleanup.
2789 for(size_t i = allocs.size(); i--; )
2790 {
2791 allocs[i].Destroy();
2792 }
2793 vmaDestroyPool(g_hAllocator, pool);
2794}
2795
Adam Sawickib8333fb2018-03-13 16:15:53 +01002796void TestHeapSizeLimit()
2797{
Adam Sawickifbaccff2020-03-09 17:09:23 +01002798 const VkDeviceSize HEAP_SIZE_LIMIT = 100ull * 1024 * 1024; // 100 MB
2799 const VkDeviceSize BLOCK_SIZE = 10ull * 1024 * 1024; // 10 MB
Adam Sawickib8333fb2018-03-13 16:15:53 +01002800
2801 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
2802 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
2803 {
2804 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
2805 }
2806
2807 VmaAllocatorCreateInfo allocatorCreateInfo = {};
2808 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
2809 allocatorCreateInfo.device = g_hDevice;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002810 allocatorCreateInfo.instance = g_hVulkanInstance;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002811 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
2812
2813 VmaAllocator hAllocator;
2814 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002815 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002816
2817 struct Item
2818 {
2819 VkBuffer hBuf;
2820 VmaAllocation hAlloc;
2821 };
2822 std::vector<Item> items;
2823
2824 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2825 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2826
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002827 // 1. Allocate two blocks of dedicated memory, half the size of BLOCK_SIZE.
2828 VmaAllocationInfo dedicatedAllocInfo;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002829 {
2830 VmaAllocationCreateInfo allocCreateInfo = {};
2831 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2832 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2833
2834 bufCreateInfo.size = BLOCK_SIZE / 2;
2835
2836 for(size_t i = 0; i < 2; ++i)
2837 {
2838 Item item;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002839 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &dedicatedAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002840 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002841 items.push_back(item);
2842 }
2843 }
2844
2845 // Create pool to make sure allocations must be out of this memory type.
2846 VmaPoolCreateInfo poolCreateInfo = {};
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002847 poolCreateInfo.memoryTypeIndex = dedicatedAllocInfo.memoryType;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002848 poolCreateInfo.blockSize = BLOCK_SIZE;
2849
2850 VmaPool hPool;
2851 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002852 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002853
2854 // 2. Allocate normal buffers from all the remaining memory.
2855 {
2856 VmaAllocationCreateInfo allocCreateInfo = {};
2857 allocCreateInfo.pool = hPool;
2858
2859 bufCreateInfo.size = BLOCK_SIZE / 2;
2860
2861 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2862 for(size_t i = 0; i < bufCount; ++i)
2863 {
2864 Item item;
2865 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002866 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002867 items.push_back(item);
2868 }
2869 }
2870
2871 // 3. Allocation of one more (even small) buffer should fail.
2872 {
2873 VmaAllocationCreateInfo allocCreateInfo = {};
2874 allocCreateInfo.pool = hPool;
2875
2876 bufCreateInfo.size = 128;
2877
2878 VkBuffer hBuf;
2879 VmaAllocation hAlloc;
2880 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002881 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002882 }
2883
2884 // Destroy everything.
2885 for(size_t i = items.size(); i--; )
2886 {
2887 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2888 }
2889
2890 vmaDestroyPool(hAllocator, hPool);
2891
2892 vmaDestroyAllocator(hAllocator);
2893}
2894
Adam Sawicki212a4a62018-06-14 15:44:45 +02002895#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002896static void TestDebugMargin()
2897{
2898 if(VMA_DEBUG_MARGIN == 0)
2899 {
2900 return;
2901 }
2902
2903 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002904 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002905
2906 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002907 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002908
2909 // Create few buffers of different size.
2910 const size_t BUF_COUNT = 10;
2911 BufferInfo buffers[BUF_COUNT];
2912 VmaAllocationInfo allocInfo[BUF_COUNT];
2913 for(size_t i = 0; i < 10; ++i)
2914 {
2915 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002916 // Last one will be mapped.
2917 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002918
2919 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002920 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002921 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002922 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002923
2924 if(i == BUF_COUNT - 1)
2925 {
2926 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002927 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002928 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2929 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2930 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002931 }
2932
2933 // Check if their offsets preserve margin between them.
2934 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2935 {
2936 if(lhs.deviceMemory != rhs.deviceMemory)
2937 {
2938 return lhs.deviceMemory < rhs.deviceMemory;
2939 }
2940 return lhs.offset < rhs.offset;
2941 });
2942 for(size_t i = 1; i < BUF_COUNT; ++i)
2943 {
2944 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2945 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002946 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002947 }
2948 }
2949
Adam Sawicki212a4a62018-06-14 15:44:45 +02002950 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002951 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002952
Adam Sawicki73b16652018-06-11 16:39:25 +02002953 // Destroy all buffers.
2954 for(size_t i = BUF_COUNT; i--; )
2955 {
2956 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2957 }
2958}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002959#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002960
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002961static void TestLinearAllocator()
2962{
2963 wprintf(L"Test linear allocator\n");
2964
2965 RandomNumberGenerator rand{645332};
2966
2967 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2968 sampleBufCreateInfo.size = 1024; // Whatever.
2969 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2970
2971 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2972 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2973
2974 VmaPoolCreateInfo poolCreateInfo = {};
2975 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002976 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002977
Adam Sawickiee082772018-06-20 17:45:49 +02002978 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002979 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2980 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2981
2982 VmaPool pool = nullptr;
2983 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002984 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002985
2986 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2987
2988 VmaAllocationCreateInfo allocCreateInfo = {};
2989 allocCreateInfo.pool = pool;
2990
2991 constexpr size_t maxBufCount = 100;
2992 std::vector<BufferInfo> bufInfo;
2993
2994 constexpr VkDeviceSize bufSizeMin = 16;
2995 constexpr VkDeviceSize bufSizeMax = 1024;
2996 VmaAllocationInfo allocInfo;
2997 VkDeviceSize prevOffset = 0;
2998
2999 // Test one-time free.
3000 for(size_t i = 0; i < 2; ++i)
3001 {
3002 // Allocate number of buffers of varying size that surely fit into this block.
3003 VkDeviceSize bufSumSize = 0;
3004 for(size_t i = 0; i < maxBufCount; ++i)
3005 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003006 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003007 BufferInfo newBufInfo;
3008 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3009 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003010 TEST(res == VK_SUCCESS);
3011 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003012 bufInfo.push_back(newBufInfo);
3013 prevOffset = allocInfo.offset;
3014 bufSumSize += bufCreateInfo.size;
3015 }
3016
3017 // Validate pool stats.
3018 VmaPoolStats stats;
3019 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003020 TEST(stats.size == poolCreateInfo.blockSize);
3021 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
3022 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003023
3024 // Destroy the buffers in random order.
3025 while(!bufInfo.empty())
3026 {
3027 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3028 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3029 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3030 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3031 }
3032 }
3033
3034 // Test stack.
3035 {
3036 // Allocate number of buffers of varying size that surely fit into this block.
3037 for(size_t i = 0; i < maxBufCount; ++i)
3038 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003039 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003040 BufferInfo newBufInfo;
3041 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3042 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003043 TEST(res == VK_SUCCESS);
3044 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003045 bufInfo.push_back(newBufInfo);
3046 prevOffset = allocInfo.offset;
3047 }
3048
3049 // Destroy few buffers from top of the stack.
3050 for(size_t i = 0; i < maxBufCount / 5; ++i)
3051 {
3052 const BufferInfo& currBufInfo = bufInfo.back();
3053 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3054 bufInfo.pop_back();
3055 }
3056
3057 // Create some more
3058 for(size_t i = 0; i < maxBufCount / 5; ++i)
3059 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003060 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003061 BufferInfo newBufInfo;
3062 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3063 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003064 TEST(res == VK_SUCCESS);
3065 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003066 bufInfo.push_back(newBufInfo);
3067 prevOffset = allocInfo.offset;
3068 }
3069
3070 // Destroy the buffers in reverse order.
3071 while(!bufInfo.empty())
3072 {
3073 const BufferInfo& currBufInfo = bufInfo.back();
3074 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3075 bufInfo.pop_back();
3076 }
3077 }
3078
Adam Sawickiee082772018-06-20 17:45:49 +02003079 // Test ring buffer.
3080 {
3081 // Allocate number of buffers that surely fit into this block.
3082 bufCreateInfo.size = bufSizeMax;
3083 for(size_t i = 0; i < maxBufCount; ++i)
3084 {
3085 BufferInfo newBufInfo;
3086 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3087 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003088 TEST(res == VK_SUCCESS);
3089 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02003090 bufInfo.push_back(newBufInfo);
3091 prevOffset = allocInfo.offset;
3092 }
3093
3094 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
3095 const size_t buffersPerIter = maxBufCount / 10 - 1;
3096 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
3097 for(size_t iter = 0; iter < iterCount; ++iter)
3098 {
3099 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3100 {
3101 const BufferInfo& currBufInfo = bufInfo.front();
3102 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3103 bufInfo.erase(bufInfo.begin());
3104 }
3105 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3106 {
3107 BufferInfo newBufInfo;
3108 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3109 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003110 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02003111 bufInfo.push_back(newBufInfo);
3112 }
3113 }
3114
3115 // Allocate buffers until we reach out-of-memory.
3116 uint32_t debugIndex = 0;
3117 while(res == VK_SUCCESS)
3118 {
3119 BufferInfo newBufInfo;
3120 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3121 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3122 if(res == VK_SUCCESS)
3123 {
3124 bufInfo.push_back(newBufInfo);
3125 }
3126 else
3127 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003128 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02003129 }
3130 ++debugIndex;
3131 }
3132
3133 // Destroy the buffers in random order.
3134 while(!bufInfo.empty())
3135 {
3136 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3137 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3138 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3139 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3140 }
3141 }
3142
Adam Sawicki680b2252018-08-22 14:47:32 +02003143 // Test double stack.
3144 {
3145 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
3146 VkDeviceSize prevOffsetLower = 0;
3147 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
3148 for(size_t i = 0; i < maxBufCount; ++i)
3149 {
3150 const bool upperAddress = (i % 2) != 0;
3151 if(upperAddress)
3152 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3153 else
3154 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003155 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003156 BufferInfo newBufInfo;
3157 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3158 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003159 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003160 if(upperAddress)
3161 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003162 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003163 prevOffsetUpper = allocInfo.offset;
3164 }
3165 else
3166 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003167 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003168 prevOffsetLower = allocInfo.offset;
3169 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003170 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003171 bufInfo.push_back(newBufInfo);
3172 }
3173
3174 // Destroy few buffers from top of the stack.
3175 for(size_t i = 0; i < maxBufCount / 5; ++i)
3176 {
3177 const BufferInfo& currBufInfo = bufInfo.back();
3178 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3179 bufInfo.pop_back();
3180 }
3181
3182 // Create some more
3183 for(size_t i = 0; i < maxBufCount / 5; ++i)
3184 {
3185 const bool upperAddress = (i % 2) != 0;
3186 if(upperAddress)
3187 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3188 else
3189 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003190 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003191 BufferInfo newBufInfo;
3192 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3193 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003194 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003195 bufInfo.push_back(newBufInfo);
3196 }
3197
3198 // Destroy the buffers in reverse order.
3199 while(!bufInfo.empty())
3200 {
3201 const BufferInfo& currBufInfo = bufInfo.back();
3202 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3203 bufInfo.pop_back();
3204 }
3205
3206 // Create buffers on both sides until we reach out of memory.
3207 prevOffsetLower = 0;
3208 prevOffsetUpper = poolCreateInfo.blockSize;
3209 res = VK_SUCCESS;
3210 for(size_t i = 0; res == VK_SUCCESS; ++i)
3211 {
3212 const bool upperAddress = (i % 2) != 0;
3213 if(upperAddress)
3214 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3215 else
3216 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003217 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003218 BufferInfo newBufInfo;
3219 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3220 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3221 if(res == VK_SUCCESS)
3222 {
3223 if(upperAddress)
3224 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003225 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003226 prevOffsetUpper = allocInfo.offset;
3227 }
3228 else
3229 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003230 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003231 prevOffsetLower = allocInfo.offset;
3232 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003233 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003234 bufInfo.push_back(newBufInfo);
3235 }
3236 }
3237
3238 // Destroy the buffers in random order.
3239 while(!bufInfo.empty())
3240 {
3241 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3242 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3243 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3244 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3245 }
3246
3247 // Create buffers on upper side only, constant size, until we reach out of memory.
3248 prevOffsetUpper = poolCreateInfo.blockSize;
3249 res = VK_SUCCESS;
3250 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3251 bufCreateInfo.size = bufSizeMax;
3252 for(size_t i = 0; res == VK_SUCCESS; ++i)
3253 {
3254 BufferInfo newBufInfo;
3255 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3256 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3257 if(res == VK_SUCCESS)
3258 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003259 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003260 prevOffsetUpper = allocInfo.offset;
3261 bufInfo.push_back(newBufInfo);
3262 }
3263 }
3264
3265 // Destroy the buffers in reverse order.
3266 while(!bufInfo.empty())
3267 {
3268 const BufferInfo& currBufInfo = bufInfo.back();
3269 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3270 bufInfo.pop_back();
3271 }
3272 }
3273
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003274 // Test ring buffer with lost allocations.
3275 {
3276 // Allocate number of buffers until pool is full.
3277 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
3278 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
3279 res = VK_SUCCESS;
3280 for(size_t i = 0; res == VK_SUCCESS; ++i)
3281 {
3282 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3283
Adam Sawickifd366b62019-01-24 15:26:43 +01003284 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003285
3286 BufferInfo newBufInfo;
3287 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3288 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3289 if(res == VK_SUCCESS)
3290 bufInfo.push_back(newBufInfo);
3291 }
3292
3293 // Free first half of it.
3294 {
3295 const size_t buffersToDelete = bufInfo.size() / 2;
3296 for(size_t i = 0; i < buffersToDelete; ++i)
3297 {
3298 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3299 }
3300 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
3301 }
3302
3303 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003304 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003305 res = VK_SUCCESS;
3306 for(size_t i = 0; res == VK_SUCCESS; ++i)
3307 {
3308 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3309
Adam Sawickifd366b62019-01-24 15:26:43 +01003310 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003311
3312 BufferInfo newBufInfo;
3313 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3314 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3315 if(res == VK_SUCCESS)
3316 bufInfo.push_back(newBufInfo);
3317 }
3318
3319 VkDeviceSize firstNewOffset;
3320 {
3321 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3322
3323 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
3324 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3325 bufCreateInfo.size = bufSizeMax;
3326
3327 BufferInfo newBufInfo;
3328 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3329 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003330 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003331 bufInfo.push_back(newBufInfo);
3332 firstNewOffset = allocInfo.offset;
3333
3334 // Make sure at least one buffer from the beginning became lost.
3335 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003336 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003337 }
3338
Adam Sawickifd366b62019-01-24 15:26:43 +01003339#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003340 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
3341 size_t newCount = 1;
3342 for(;;)
3343 {
3344 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3345
Adam Sawickifd366b62019-01-24 15:26:43 +01003346 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003347
3348 BufferInfo newBufInfo;
3349 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3350 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01003351
Adam Sawickib8d34d52018-10-03 17:41:20 +02003352 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003353 bufInfo.push_back(newBufInfo);
3354 ++newCount;
3355 if(allocInfo.offset < firstNewOffset)
3356 break;
3357 }
Adam Sawickifd366b62019-01-24 15:26:43 +01003358#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003359
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003360 // Delete buffers that are lost.
3361 for(size_t i = bufInfo.size(); i--; )
3362 {
3363 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3364 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3365 {
3366 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3367 bufInfo.erase(bufInfo.begin() + i);
3368 }
3369 }
3370
3371 // Test vmaMakePoolAllocationsLost
3372 {
3373 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3374
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01003375 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003376 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003377 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003378
3379 size_t realLostAllocCount = 0;
3380 for(size_t i = 0; i < bufInfo.size(); ++i)
3381 {
3382 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3383 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3384 ++realLostAllocCount;
3385 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003386 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003387 }
3388
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003389 // Destroy all the buffers in forward order.
3390 for(size_t i = 0; i < bufInfo.size(); ++i)
3391 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3392 bufInfo.clear();
3393 }
3394
Adam Sawicki70a683e2018-08-24 15:36:32 +02003395 vmaDestroyPool(g_hAllocator, pool);
3396}
Adam Sawickif799c4f2018-08-23 10:40:30 +02003397
Adam Sawicki70a683e2018-08-24 15:36:32 +02003398static void TestLinearAllocatorMultiBlock()
3399{
3400 wprintf(L"Test linear allocator multi block\n");
3401
3402 RandomNumberGenerator rand{345673};
3403
3404 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3405 sampleBufCreateInfo.size = 1024 * 1024;
3406 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3407
3408 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3409 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3410
3411 VmaPoolCreateInfo poolCreateInfo = {};
3412 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3413 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003414 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003415
3416 VmaPool pool = nullptr;
3417 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003418 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003419
3420 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3421
3422 VmaAllocationCreateInfo allocCreateInfo = {};
3423 allocCreateInfo.pool = pool;
3424
3425 std::vector<BufferInfo> bufInfo;
3426 VmaAllocationInfo allocInfo;
3427
3428 // Test one-time free.
3429 {
3430 // Allocate buffers until we move to a second block.
3431 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3432 for(uint32_t i = 0; ; ++i)
3433 {
3434 BufferInfo newBufInfo;
3435 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3436 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003437 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003438 bufInfo.push_back(newBufInfo);
3439 if(lastMem && allocInfo.deviceMemory != lastMem)
3440 {
3441 break;
3442 }
3443 lastMem = allocInfo.deviceMemory;
3444 }
3445
Adam Sawickib8d34d52018-10-03 17:41:20 +02003446 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003447
3448 // Make sure that pool has now two blocks.
3449 VmaPoolStats poolStats = {};
3450 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003451 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003452
3453 // Destroy all the buffers in random order.
3454 while(!bufInfo.empty())
3455 {
3456 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3457 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3458 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3459 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3460 }
3461
3462 // Make sure that pool has now at most one block.
3463 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003464 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003465 }
3466
3467 // Test stack.
3468 {
3469 // Allocate buffers until we move to a second block.
3470 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3471 for(uint32_t i = 0; ; ++i)
3472 {
3473 BufferInfo newBufInfo;
3474 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3475 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003476 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003477 bufInfo.push_back(newBufInfo);
3478 if(lastMem && allocInfo.deviceMemory != lastMem)
3479 {
3480 break;
3481 }
3482 lastMem = allocInfo.deviceMemory;
3483 }
3484
Adam Sawickib8d34d52018-10-03 17:41:20 +02003485 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003486
3487 // Add few more buffers.
3488 for(uint32_t i = 0; i < 5; ++i)
3489 {
3490 BufferInfo newBufInfo;
3491 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3492 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003493 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003494 bufInfo.push_back(newBufInfo);
3495 }
3496
3497 // Make sure that pool has now two blocks.
3498 VmaPoolStats poolStats = {};
3499 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003500 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003501
3502 // Delete half of buffers, LIFO.
3503 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
3504 {
3505 const BufferInfo& currBufInfo = bufInfo.back();
3506 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3507 bufInfo.pop_back();
3508 }
3509
3510 // Add one more buffer.
3511 BufferInfo newBufInfo;
3512 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3513 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003514 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003515 bufInfo.push_back(newBufInfo);
3516
3517 // Make sure that pool has now one block.
3518 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003519 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003520
3521 // Delete all the remaining buffers, LIFO.
3522 while(!bufInfo.empty())
3523 {
3524 const BufferInfo& currBufInfo = bufInfo.back();
3525 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3526 bufInfo.pop_back();
3527 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02003528 }
3529
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003530 vmaDestroyPool(g_hAllocator, pool);
3531}
3532
Adam Sawickifd11d752018-08-22 15:02:10 +02003533static void ManuallyTestLinearAllocator()
3534{
3535 VmaStats origStats;
3536 vmaCalculateStats(g_hAllocator, &origStats);
3537
3538 wprintf(L"Manually test linear allocator\n");
3539
3540 RandomNumberGenerator rand{645332};
3541
3542 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3543 sampleBufCreateInfo.size = 1024; // Whatever.
3544 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3545
3546 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3547 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3548
3549 VmaPoolCreateInfo poolCreateInfo = {};
3550 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003551 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003552
3553 poolCreateInfo.blockSize = 10 * 1024;
3554 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3555 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3556
3557 VmaPool pool = nullptr;
3558 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003559 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003560
3561 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3562
3563 VmaAllocationCreateInfo allocCreateInfo = {};
3564 allocCreateInfo.pool = pool;
3565
3566 std::vector<BufferInfo> bufInfo;
3567 VmaAllocationInfo allocInfo;
3568 BufferInfo newBufInfo;
3569
3570 // Test double stack.
3571 {
3572 /*
3573 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
3574 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
3575
3576 Totally:
3577 1 block allocated
3578 10240 Vulkan bytes
3579 6 new allocations
3580 2256 bytes in allocations
3581 */
3582
3583 bufCreateInfo.size = 32;
3584 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3585 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003586 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003587 bufInfo.push_back(newBufInfo);
3588
3589 bufCreateInfo.size = 1024;
3590 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3591 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003592 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003593 bufInfo.push_back(newBufInfo);
3594
3595 bufCreateInfo.size = 32;
3596 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3597 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003598 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003599 bufInfo.push_back(newBufInfo);
3600
3601 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3602
3603 bufCreateInfo.size = 128;
3604 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3605 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003606 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003607 bufInfo.push_back(newBufInfo);
3608
3609 bufCreateInfo.size = 1024;
3610 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3611 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003612 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003613 bufInfo.push_back(newBufInfo);
3614
3615 bufCreateInfo.size = 16;
3616 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3617 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003618 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003619 bufInfo.push_back(newBufInfo);
3620
3621 VmaStats currStats;
3622 vmaCalculateStats(g_hAllocator, &currStats);
3623 VmaPoolStats poolStats;
3624 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
3625
3626 char* statsStr = nullptr;
3627 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
3628
3629 // PUT BREAKPOINT HERE TO CHECK.
3630 // Inspect: currStats versus origStats, poolStats, statsStr.
3631 int I = 0;
3632
3633 vmaFreeStatsString(g_hAllocator, statsStr);
3634
3635 // Destroy the buffers in reverse order.
3636 while(!bufInfo.empty())
3637 {
3638 const BufferInfo& currBufInfo = bufInfo.back();
3639 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3640 bufInfo.pop_back();
3641 }
3642 }
3643
3644 vmaDestroyPool(g_hAllocator, pool);
3645}
3646
Adam Sawicki80927152018-09-07 17:27:23 +02003647static void BenchmarkAlgorithmsCase(FILE* file,
3648 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003649 bool empty,
3650 VmaAllocationCreateFlags allocStrategy,
3651 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02003652{
3653 RandomNumberGenerator rand{16223};
3654
3655 const VkDeviceSize bufSizeMin = 32;
3656 const VkDeviceSize bufSizeMax = 1024;
3657 const size_t maxBufCapacity = 10000;
3658 const uint32_t iterationCount = 10;
3659
3660 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3661 sampleBufCreateInfo.size = bufSizeMax;
3662 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3663
3664 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3665 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3666
3667 VmaPoolCreateInfo poolCreateInfo = {};
3668 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003669 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003670
3671 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02003672 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02003673 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3674
3675 VmaPool pool = nullptr;
3676 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003677 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003678
3679 // Buffer created just to get memory requirements. Never bound to any memory.
3680 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003681 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003682 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02003683
3684 VkMemoryRequirements memReq = {};
3685 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3686
Adam Sawicki1f84f622019-07-02 13:40:01 +02003687 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02003688
3689 VmaAllocationCreateInfo allocCreateInfo = {};
3690 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003691 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02003692
3693 VmaAllocation alloc;
3694 std::vector<VmaAllocation> baseAllocations;
3695
3696 if(!empty)
3697 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003698 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02003699 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003700 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02003701 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01003702 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
3703 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02003704 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3705 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003706 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003707 baseAllocations.push_back(alloc);
3708 totalSize += memReq.size;
3709 }
3710
3711 // Delete half of them, choose randomly.
3712 size_t allocsToDelete = baseAllocations.size() / 2;
3713 for(size_t i = 0; i < allocsToDelete; ++i)
3714 {
3715 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
3716 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
3717 baseAllocations.erase(baseAllocations.begin() + index);
3718 }
3719 }
3720
3721 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003722 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02003723 std::vector<VmaAllocation> testAllocations;
3724 testAllocations.reserve(allocCount);
3725 duration allocTotalDuration = duration::zero();
3726 duration freeTotalDuration = duration::zero();
3727 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
3728 {
3729 // Allocations
3730 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
3731 for(size_t i = 0; i < allocCount; ++i)
3732 {
3733 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3734 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003735 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003736 testAllocations.push_back(alloc);
3737 }
3738 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
3739
3740 // Deallocations
3741 switch(freeOrder)
3742 {
3743 case FREE_ORDER::FORWARD:
3744 // Leave testAllocations unchanged.
3745 break;
3746 case FREE_ORDER::BACKWARD:
3747 std::reverse(testAllocations.begin(), testAllocations.end());
3748 break;
3749 case FREE_ORDER::RANDOM:
3750 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
3751 break;
3752 default: assert(0);
3753 }
3754
3755 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
3756 for(size_t i = 0; i < allocCount; ++i)
3757 vmaFreeMemory(g_hAllocator, testAllocations[i]);
3758 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
3759
3760 testAllocations.clear();
3761 }
3762
3763 // Delete baseAllocations
3764 while(!baseAllocations.empty())
3765 {
3766 vmaFreeMemory(g_hAllocator, baseAllocations.back());
3767 baseAllocations.pop_back();
3768 }
3769
3770 vmaDestroyPool(g_hAllocator, pool);
3771
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003772 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
3773 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
3774
Adam Sawicki80927152018-09-07 17:27:23 +02003775 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
3776 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02003777 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003778 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02003779 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003780 allocTotalSeconds,
3781 freeTotalSeconds);
3782
3783 if(file)
3784 {
3785 std::string currTime;
3786 CurrentTimeToStr(currTime);
3787
Adam Sawicki80927152018-09-07 17:27:23 +02003788 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003789 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02003790 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003791 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003792 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003793 FREE_ORDER_NAMES[(uint32_t)freeOrder],
3794 allocTotalSeconds,
3795 freeTotalSeconds);
3796 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003797}
3798
Adam Sawickie73e9882020-03-20 18:05:42 +01003799static void TestBufferDeviceAddress()
3800{
3801 wprintf(L"Test buffer device address\n");
3802
3803 assert(g_BufferDeviceAddressEnabled);
3804
3805 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3806 bufCreateInfo.size = 0x10000;
3807 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
3808 VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT; // !!!
3809
3810 VmaAllocationCreateInfo allocCreateInfo = {};
3811 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3812
3813 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
3814 {
3815 // 1st is placed, 2nd is dedicated.
3816 if(testIndex == 1)
3817 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3818
3819 BufferInfo bufInfo = {};
3820 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3821 &bufInfo.Buffer, &bufInfo.Allocation, nullptr);
3822 TEST(res == VK_SUCCESS);
3823
3824 VkBufferDeviceAddressInfoEXT bufferDeviceAddressInfo = { VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT };
3825 bufferDeviceAddressInfo.buffer = bufInfo.Buffer;
3826 //assert(g_vkGetBufferDeviceAddressEXT != nullptr);
3827 if(g_vkGetBufferDeviceAddressEXT != nullptr)
3828 {
3829 VkDeviceAddress addr = g_vkGetBufferDeviceAddressEXT(g_hDevice, &bufferDeviceAddressInfo);
3830 TEST(addr != 0);
3831 }
3832
3833 vmaDestroyBuffer(g_hAllocator, bufInfo.Buffer, bufInfo.Allocation);
3834 }
3835}
3836
Adam Sawicki80927152018-09-07 17:27:23 +02003837static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02003838{
Adam Sawicki80927152018-09-07 17:27:23 +02003839 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02003840
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003841 if(file)
3842 {
3843 fprintf(file,
3844 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02003845 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003846 "Allocation time (s),Deallocation time (s)\n");
3847 }
3848
Adam Sawicki0a607132018-08-24 11:18:41 +02003849 uint32_t freeOrderCount = 1;
3850 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
3851 freeOrderCount = 3;
3852 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3853 freeOrderCount = 2;
3854
3855 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003856 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003857
3858 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3859 {
3860 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3861 switch(freeOrderIndex)
3862 {
3863 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3864 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3865 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3866 default: assert(0);
3867 }
3868
3869 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3870 {
Adam Sawicki80927152018-09-07 17:27:23 +02003871 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003872 {
Adam Sawicki80927152018-09-07 17:27:23 +02003873 uint32_t algorithm = 0;
3874 switch(algorithmIndex)
3875 {
3876 case 0:
3877 break;
3878 case 1:
3879 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3880 break;
3881 case 2:
3882 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3883 break;
3884 default:
3885 assert(0);
3886 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003887
Adam Sawicki80927152018-09-07 17:27:23 +02003888 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003889 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3890 {
3891 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003892 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003893 {
3894 switch(allocStrategyIndex)
3895 {
3896 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3897 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3898 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3899 default: assert(0);
3900 }
3901 }
3902
Adam Sawicki80927152018-09-07 17:27:23 +02003903 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003904 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003905 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003906 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003907 strategy,
3908 freeOrder); // freeOrder
3909 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003910 }
3911 }
3912 }
3913}
3914
Adam Sawickib8333fb2018-03-13 16:15:53 +01003915static void TestPool_SameSize()
3916{
3917 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3918 const size_t BUF_COUNT = 100;
3919 VkResult res;
3920
3921 RandomNumberGenerator rand{123};
3922
3923 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3924 bufferInfo.size = BUF_SIZE;
3925 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3926
3927 uint32_t memoryTypeBits = UINT32_MAX;
3928 {
3929 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003930 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003931 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003932
3933 VkMemoryRequirements memReq;
3934 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3935 memoryTypeBits = memReq.memoryTypeBits;
3936
Adam Sawicki1f84f622019-07-02 13:40:01 +02003937 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003938 }
3939
3940 VmaAllocationCreateInfo poolAllocInfo = {};
3941 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3942 uint32_t memTypeIndex;
3943 res = vmaFindMemoryTypeIndex(
3944 g_hAllocator,
3945 memoryTypeBits,
3946 &poolAllocInfo,
3947 &memTypeIndex);
3948
3949 VmaPoolCreateInfo poolCreateInfo = {};
3950 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3951 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3952 poolCreateInfo.minBlockCount = 1;
3953 poolCreateInfo.maxBlockCount = 4;
3954 poolCreateInfo.frameInUseCount = 0;
3955
3956 VmaPool pool;
3957 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003958 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003959
Adam Sawickia020fb82019-11-02 14:43:06 +01003960 // Test pool name
3961 {
3962 static const char* const POOL_NAME = "Pool name";
3963 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3964
3965 const char* fetchedPoolName = nullptr;
3966 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3967 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
3968
Adam Sawickia020fb82019-11-02 14:43:06 +01003969 vmaSetPoolName(g_hAllocator, pool, nullptr);
3970 }
3971
Adam Sawickib8333fb2018-03-13 16:15:53 +01003972 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3973
3974 VmaAllocationCreateInfo allocInfo = {};
3975 allocInfo.pool = pool;
3976 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3977 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3978
3979 struct BufItem
3980 {
3981 VkBuffer Buf;
3982 VmaAllocation Alloc;
3983 };
3984 std::vector<BufItem> items;
3985
3986 // Fill entire pool.
3987 for(size_t i = 0; i < BUF_COUNT; ++i)
3988 {
3989 BufItem item;
3990 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003991 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003992 items.push_back(item);
3993 }
3994
3995 // Make sure that another allocation would fail.
3996 {
3997 BufItem item;
3998 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003999 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004000 }
4001
4002 // Validate that no buffer is lost. Also check that they are not mapped.
4003 for(size_t i = 0; i < items.size(); ++i)
4004 {
4005 VmaAllocationInfo allocInfo;
4006 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004007 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
4008 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004009 }
4010
4011 // Free some percent of random items.
4012 {
4013 const size_t PERCENT_TO_FREE = 10;
4014 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
4015 for(size_t i = 0; i < itemsToFree; ++i)
4016 {
4017 size_t index = (size_t)rand.Generate() % items.size();
4018 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
4019 items.erase(items.begin() + index);
4020 }
4021 }
4022
4023 // Randomly allocate and free items.
4024 {
4025 const size_t OPERATION_COUNT = BUF_COUNT;
4026 for(size_t i = 0; i < OPERATION_COUNT; ++i)
4027 {
4028 bool allocate = rand.Generate() % 2 != 0;
4029 if(allocate)
4030 {
4031 if(items.size() < BUF_COUNT)
4032 {
4033 BufItem item;
4034 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004035 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004036 items.push_back(item);
4037 }
4038 }
4039 else // Free
4040 {
4041 if(!items.empty())
4042 {
4043 size_t index = (size_t)rand.Generate() % items.size();
4044 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
4045 items.erase(items.begin() + index);
4046 }
4047 }
4048 }
4049 }
4050
4051 // Allocate up to maximum.
4052 while(items.size() < BUF_COUNT)
4053 {
4054 BufItem item;
4055 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004057 items.push_back(item);
4058 }
4059
4060 // Validate that no buffer is lost.
4061 for(size_t i = 0; i < items.size(); ++i)
4062 {
4063 VmaAllocationInfo allocInfo;
4064 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004065 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004066 }
4067
4068 // Next frame.
4069 vmaSetCurrentFrameIndex(g_hAllocator, 2);
4070
4071 // Allocate another BUF_COUNT buffers.
4072 for(size_t i = 0; i < BUF_COUNT; ++i)
4073 {
4074 BufItem item;
4075 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004076 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004077 items.push_back(item);
4078 }
4079
4080 // Make sure the first BUF_COUNT is lost. Delete them.
4081 for(size_t i = 0; i < BUF_COUNT; ++i)
4082 {
4083 VmaAllocationInfo allocInfo;
4084 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004085 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004086 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4087 }
4088 items.erase(items.begin(), items.begin() + BUF_COUNT);
4089
4090 // Validate that no buffer is lost.
4091 for(size_t i = 0; i < items.size(); ++i)
4092 {
4093 VmaAllocationInfo allocInfo;
4094 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004095 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004096 }
4097
4098 // Free one item.
4099 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
4100 items.pop_back();
4101
4102 // Validate statistics.
4103 {
4104 VmaPoolStats poolStats = {};
4105 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004106 TEST(poolStats.allocationCount == items.size());
4107 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
4108 TEST(poolStats.unusedRangeCount == 1);
4109 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
4110 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004111 }
4112
4113 // Free all remaining items.
4114 for(size_t i = items.size(); i--; )
4115 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4116 items.clear();
4117
4118 // Allocate maximum items again.
4119 for(size_t i = 0; i < BUF_COUNT; ++i)
4120 {
4121 BufItem item;
4122 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004123 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004124 items.push_back(item);
4125 }
4126
4127 // Delete every other item.
4128 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
4129 {
4130 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4131 items.erase(items.begin() + i);
4132 }
4133
4134 // Defragment!
4135 {
4136 std::vector<VmaAllocation> allocationsToDefragment(items.size());
4137 for(size_t i = 0; i < items.size(); ++i)
4138 allocationsToDefragment[i] = items[i].Alloc;
4139
4140 VmaDefragmentationStats defragmentationStats;
4141 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004142 TEST(res == VK_SUCCESS);
4143 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004144 }
4145
4146 // Free all remaining items.
4147 for(size_t i = items.size(); i--; )
4148 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4149 items.clear();
4150
4151 ////////////////////////////////////////////////////////////////////////////////
4152 // Test for vmaMakePoolAllocationsLost
4153
4154 // Allocate 4 buffers on frame 10.
4155 vmaSetCurrentFrameIndex(g_hAllocator, 10);
4156 for(size_t i = 0; i < 4; ++i)
4157 {
4158 BufItem item;
4159 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004160 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004161 items.push_back(item);
4162 }
4163
4164 // Touch first 2 of them on frame 11.
4165 vmaSetCurrentFrameIndex(g_hAllocator, 11);
4166 for(size_t i = 0; i < 2; ++i)
4167 {
4168 VmaAllocationInfo allocInfo;
4169 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
4170 }
4171
4172 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
4173 size_t lostCount = 0xDEADC0DE;
4174 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004175 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004176
4177 // Make another call. Now 0 should be lost.
4178 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004179 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004180
4181 // Make another call, with null count. Should not crash.
4182 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
4183
4184 // END: Free all remaining items.
4185 for(size_t i = items.size(); i--; )
4186 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4187
4188 items.clear();
4189
Adam Sawickid2924172018-06-11 12:48:46 +02004190 ////////////////////////////////////////////////////////////////////////////////
4191 // Test for allocation too large for pool
4192
4193 {
4194 VmaAllocationCreateInfo allocCreateInfo = {};
4195 allocCreateInfo.pool = pool;
4196
4197 VkMemoryRequirements memReq;
4198 memReq.memoryTypeBits = UINT32_MAX;
4199 memReq.alignment = 1;
4200 memReq.size = poolCreateInfo.blockSize + 4;
4201
4202 VmaAllocation alloc = nullptr;
4203 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004204 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02004205 }
4206
Adam Sawickib8333fb2018-03-13 16:15:53 +01004207 vmaDestroyPool(g_hAllocator, pool);
4208}
4209
Adam Sawickie44c6262018-06-15 14:30:39 +02004210static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
4211{
4212 const uint8_t* pBytes = (const uint8_t*)pMemory;
4213 for(size_t i = 0; i < size; ++i)
4214 {
4215 if(pBytes[i] != pattern)
4216 {
4217 return false;
4218 }
4219 }
4220 return true;
4221}
4222
4223static void TestAllocationsInitialization()
4224{
4225 VkResult res;
4226
4227 const size_t BUF_SIZE = 1024;
4228
4229 // Create pool.
4230
4231 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4232 bufInfo.size = BUF_SIZE;
4233 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4234
4235 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
4236 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4237
4238 VmaPoolCreateInfo poolCreateInfo = {};
4239 poolCreateInfo.blockSize = BUF_SIZE * 10;
4240 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
4241 poolCreateInfo.maxBlockCount = 1;
4242 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004243 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004244
4245 VmaAllocationCreateInfo bufAllocCreateInfo = {};
4246 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004247 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004248
4249 // Create one persistently mapped buffer to keep memory of this block mapped,
4250 // so that pointer to mapped data will remain (more or less...) valid even
4251 // after destruction of other allocations.
4252
4253 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4254 VkBuffer firstBuf;
4255 VmaAllocation firstAlloc;
4256 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004257 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004258
4259 // Test buffers.
4260
4261 for(uint32_t i = 0; i < 2; ++i)
4262 {
4263 const bool persistentlyMapped = i == 0;
4264 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
4265 VkBuffer buf;
4266 VmaAllocation alloc;
4267 VmaAllocationInfo allocInfo;
4268 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004269 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004270
4271 void* pMappedData;
4272 if(!persistentlyMapped)
4273 {
4274 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004275 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004276 }
4277 else
4278 {
4279 pMappedData = allocInfo.pMappedData;
4280 }
4281
4282 // Validate initialized content
4283 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004284 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004285
4286 if(!persistentlyMapped)
4287 {
4288 vmaUnmapMemory(g_hAllocator, alloc);
4289 }
4290
4291 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4292
4293 // Validate freed content
4294 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004295 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004296 }
4297
4298 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
4299 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
4300}
4301
Adam Sawickib8333fb2018-03-13 16:15:53 +01004302static void TestPool_Benchmark(
4303 PoolTestResult& outResult,
4304 const PoolTestConfig& config)
4305{
Adam Sawickib8d34d52018-10-03 17:41:20 +02004306 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004307
4308 RandomNumberGenerator mainRand{config.RandSeed};
4309
4310 uint32_t allocationSizeProbabilitySum = std::accumulate(
4311 config.AllocationSizes.begin(),
4312 config.AllocationSizes.end(),
4313 0u,
4314 [](uint32_t sum, const AllocationSize& allocSize) {
4315 return sum + allocSize.Probability;
4316 });
4317
4318 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4319 bufferInfo.size = 256; // Whatever.
4320 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4321
4322 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4323 imageInfo.imageType = VK_IMAGE_TYPE_2D;
4324 imageInfo.extent.width = 256; // Whatever.
4325 imageInfo.extent.height = 256; // Whatever.
4326 imageInfo.extent.depth = 1;
4327 imageInfo.mipLevels = 1;
4328 imageInfo.arrayLayers = 1;
4329 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4330 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
4331 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
4332 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
4333 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4334
4335 uint32_t bufferMemoryTypeBits = UINT32_MAX;
4336 {
4337 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004338 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004339 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004340
4341 VkMemoryRequirements memReq;
4342 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
4343 bufferMemoryTypeBits = memReq.memoryTypeBits;
4344
Adam Sawicki1f84f622019-07-02 13:40:01 +02004345 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004346 }
4347
4348 uint32_t imageMemoryTypeBits = UINT32_MAX;
4349 {
4350 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004351 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004352 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004353
4354 VkMemoryRequirements memReq;
4355 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
4356 imageMemoryTypeBits = memReq.memoryTypeBits;
4357
Adam Sawicki1f84f622019-07-02 13:40:01 +02004358 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004359 }
4360
4361 uint32_t memoryTypeBits = 0;
4362 if(config.UsesBuffers() && config.UsesImages())
4363 {
4364 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
4365 if(memoryTypeBits == 0)
4366 {
4367 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
4368 return;
4369 }
4370 }
4371 else if(config.UsesBuffers())
4372 memoryTypeBits = bufferMemoryTypeBits;
4373 else if(config.UsesImages())
4374 memoryTypeBits = imageMemoryTypeBits;
4375 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004376 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004377
4378 VmaPoolCreateInfo poolCreateInfo = {};
4379 poolCreateInfo.memoryTypeIndex = 0;
4380 poolCreateInfo.minBlockCount = 1;
4381 poolCreateInfo.maxBlockCount = 1;
4382 poolCreateInfo.blockSize = config.PoolSize;
4383 poolCreateInfo.frameInUseCount = 1;
4384
4385 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
4386 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4387 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4388
4389 VmaPool pool;
4390 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004391 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004392
4393 // Start time measurement - after creating pool and initializing data structures.
4394 time_point timeBeg = std::chrono::high_resolution_clock::now();
4395
4396 ////////////////////////////////////////////////////////////////////////////////
4397 // ThreadProc
4398 auto ThreadProc = [&](
4399 PoolTestThreadResult* outThreadResult,
4400 uint32_t randSeed,
4401 HANDLE frameStartEvent,
4402 HANDLE frameEndEvent) -> void
4403 {
4404 RandomNumberGenerator threadRand{randSeed};
4405
4406 outThreadResult->AllocationTimeMin = duration::max();
4407 outThreadResult->AllocationTimeSum = duration::zero();
4408 outThreadResult->AllocationTimeMax = duration::min();
4409 outThreadResult->DeallocationTimeMin = duration::max();
4410 outThreadResult->DeallocationTimeSum = duration::zero();
4411 outThreadResult->DeallocationTimeMax = duration::min();
4412 outThreadResult->AllocationCount = 0;
4413 outThreadResult->DeallocationCount = 0;
4414 outThreadResult->LostAllocationCount = 0;
4415 outThreadResult->LostAllocationTotalSize = 0;
4416 outThreadResult->FailedAllocationCount = 0;
4417 outThreadResult->FailedAllocationTotalSize = 0;
4418
4419 struct Item
4420 {
4421 VkDeviceSize BufferSize;
4422 VkExtent2D ImageSize;
4423 VkBuffer Buf;
4424 VkImage Image;
4425 VmaAllocation Alloc;
4426
4427 VkDeviceSize CalcSizeBytes() const
4428 {
4429 return BufferSize +
4430 ImageSize.width * ImageSize.height * 4;
4431 }
4432 };
4433 std::vector<Item> unusedItems, usedItems;
4434
4435 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
4436
4437 // Create all items - all unused, not yet allocated.
4438 for(size_t i = 0; i < threadTotalItemCount; ++i)
4439 {
4440 Item item = {};
4441
4442 uint32_t allocSizeIndex = 0;
4443 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
4444 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
4445 r -= config.AllocationSizes[allocSizeIndex++].Probability;
4446
4447 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
4448 if(allocSize.BufferSizeMax > 0)
4449 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004450 TEST(allocSize.BufferSizeMin > 0);
4451 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004452 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
4453 item.BufferSize = allocSize.BufferSizeMin;
4454 else
4455 {
4456 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
4457 item.BufferSize = item.BufferSize / 16 * 16;
4458 }
4459 }
4460 else
4461 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004462 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004463 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
4464 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
4465 else
4466 {
4467 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4468 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4469 }
4470 }
4471
4472 unusedItems.push_back(item);
4473 }
4474
4475 auto Allocate = [&](Item& item) -> VkResult
4476 {
4477 VmaAllocationCreateInfo allocCreateInfo = {};
4478 allocCreateInfo.pool = pool;
4479 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
4480 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4481
4482 if(item.BufferSize)
4483 {
4484 bufferInfo.size = item.BufferSize;
4485 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4486 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
4487 }
4488 else
4489 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004490 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004491
4492 imageInfo.extent.width = item.ImageSize.width;
4493 imageInfo.extent.height = item.ImageSize.height;
4494 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4495 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
4496 }
4497 };
4498
4499 ////////////////////////////////////////////////////////////////////////////////
4500 // Frames
4501 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4502 {
4503 WaitForSingleObject(frameStartEvent, INFINITE);
4504
4505 // Always make some percent of used bufs unused, to choose different used ones.
4506 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
4507 for(size_t i = 0; i < bufsToMakeUnused; ++i)
4508 {
4509 size_t index = threadRand.Generate() % usedItems.size();
4510 unusedItems.push_back(usedItems[index]);
4511 usedItems.erase(usedItems.begin() + index);
4512 }
4513
4514 // Determine which bufs we want to use in this frame.
4515 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
4516 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004517 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01004518 // Move some used to unused.
4519 while(usedBufCount < usedItems.size())
4520 {
4521 size_t index = threadRand.Generate() % usedItems.size();
4522 unusedItems.push_back(usedItems[index]);
4523 usedItems.erase(usedItems.begin() + index);
4524 }
4525 // Move some unused to used.
4526 while(usedBufCount > usedItems.size())
4527 {
4528 size_t index = threadRand.Generate() % unusedItems.size();
4529 usedItems.push_back(unusedItems[index]);
4530 unusedItems.erase(unusedItems.begin() + index);
4531 }
4532
4533 uint32_t touchExistingCount = 0;
4534 uint32_t touchLostCount = 0;
4535 uint32_t createSucceededCount = 0;
4536 uint32_t createFailedCount = 0;
4537
4538 // Touch all used bufs. If not created or lost, allocate.
4539 for(size_t i = 0; i < usedItems.size(); ++i)
4540 {
4541 Item& item = usedItems[i];
4542 // Not yet created.
4543 if(item.Alloc == VK_NULL_HANDLE)
4544 {
4545 res = Allocate(item);
4546 ++outThreadResult->AllocationCount;
4547 if(res != VK_SUCCESS)
4548 {
4549 item.Alloc = VK_NULL_HANDLE;
4550 item.Buf = VK_NULL_HANDLE;
4551 ++outThreadResult->FailedAllocationCount;
4552 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4553 ++createFailedCount;
4554 }
4555 else
4556 ++createSucceededCount;
4557 }
4558 else
4559 {
4560 // Touch.
4561 VmaAllocationInfo allocInfo;
4562 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
4563 // Lost.
4564 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
4565 {
4566 ++touchLostCount;
4567
4568 // Destroy.
4569 {
4570 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4571 if(item.Buf)
4572 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
4573 else
4574 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
4575 ++outThreadResult->DeallocationCount;
4576 }
4577 item.Alloc = VK_NULL_HANDLE;
4578 item.Buf = VK_NULL_HANDLE;
4579
4580 ++outThreadResult->LostAllocationCount;
4581 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
4582
4583 // Recreate.
4584 res = Allocate(item);
4585 ++outThreadResult->AllocationCount;
4586 // Creation failed.
4587 if(res != VK_SUCCESS)
4588 {
4589 ++outThreadResult->FailedAllocationCount;
4590 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4591 ++createFailedCount;
4592 }
4593 else
4594 ++createSucceededCount;
4595 }
4596 else
4597 ++touchExistingCount;
4598 }
4599 }
4600
4601 /*
4602 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
4603 randSeed, frameIndex,
4604 touchExistingCount, touchLostCount,
4605 createSucceededCount, createFailedCount);
4606 */
4607
4608 SetEvent(frameEndEvent);
4609 }
4610
4611 // Free all remaining items.
4612 for(size_t i = usedItems.size(); i--; )
4613 {
4614 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4615 if(usedItems[i].Buf)
4616 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
4617 else
4618 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
4619 ++outThreadResult->DeallocationCount;
4620 }
4621 for(size_t i = unusedItems.size(); i--; )
4622 {
4623 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
4624 if(unusedItems[i].Buf)
4625 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
4626 else
4627 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
4628 ++outThreadResult->DeallocationCount;
4629 }
4630 };
4631
4632 // Launch threads.
4633 uint32_t threadRandSeed = mainRand.Generate();
4634 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
4635 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
4636 std::vector<std::thread> bkgThreads;
4637 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
4638 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4639 {
4640 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4641 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4642 bkgThreads.emplace_back(std::bind(
4643 ThreadProc,
4644 &threadResults[threadIndex],
4645 threadRandSeed + threadIndex,
4646 frameStartEvents[threadIndex],
4647 frameEndEvents[threadIndex]));
4648 }
4649
4650 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02004651 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004652 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4653 {
4654 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
4655 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4656 SetEvent(frameStartEvents[threadIndex]);
4657 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
4658 }
4659
4660 // Wait for threads finished
4661 for(size_t i = 0; i < bkgThreads.size(); ++i)
4662 {
4663 bkgThreads[i].join();
4664 CloseHandle(frameEndEvents[i]);
4665 CloseHandle(frameStartEvents[i]);
4666 }
4667 bkgThreads.clear();
4668
4669 // Finish time measurement - before destroying pool.
4670 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
4671
4672 vmaDestroyPool(g_hAllocator, pool);
4673
4674 outResult.AllocationTimeMin = duration::max();
4675 outResult.AllocationTimeAvg = duration::zero();
4676 outResult.AllocationTimeMax = duration::min();
4677 outResult.DeallocationTimeMin = duration::max();
4678 outResult.DeallocationTimeAvg = duration::zero();
4679 outResult.DeallocationTimeMax = duration::min();
4680 outResult.LostAllocationCount = 0;
4681 outResult.LostAllocationTotalSize = 0;
4682 outResult.FailedAllocationCount = 0;
4683 outResult.FailedAllocationTotalSize = 0;
4684 size_t allocationCount = 0;
4685 size_t deallocationCount = 0;
4686 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4687 {
4688 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
4689 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
4690 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
4691 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
4692 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
4693 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
4694 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
4695 allocationCount += threadResult.AllocationCount;
4696 deallocationCount += threadResult.DeallocationCount;
4697 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
4698 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
4699 outResult.LostAllocationCount += threadResult.LostAllocationCount;
4700 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
4701 }
4702 if(allocationCount)
4703 outResult.AllocationTimeAvg /= allocationCount;
4704 if(deallocationCount)
4705 outResult.DeallocationTimeAvg /= deallocationCount;
4706}
4707
4708static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
4709{
4710 if(ptr1 < ptr2)
4711 return ptr1 + size1 > ptr2;
4712 else if(ptr2 < ptr1)
4713 return ptr2 + size2 > ptr1;
4714 else
4715 return true;
4716}
4717
Adam Sawickiefa88c42019-11-18 16:33:56 +01004718static void TestMemoryUsage()
4719{
4720 wprintf(L"Testing memory usage:\n");
4721
Adam Sawicki69185552019-11-18 17:03:34 +01004722 static const VmaMemoryUsage lastUsage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004723 for(uint32_t usage = 0; usage <= lastUsage; ++usage)
4724 {
4725 switch(usage)
4726 {
4727 case VMA_MEMORY_USAGE_UNKNOWN: printf(" VMA_MEMORY_USAGE_UNKNOWN:\n"); break;
4728 case VMA_MEMORY_USAGE_GPU_ONLY: printf(" VMA_MEMORY_USAGE_GPU_ONLY:\n"); break;
4729 case VMA_MEMORY_USAGE_CPU_ONLY: printf(" VMA_MEMORY_USAGE_CPU_ONLY:\n"); break;
4730 case VMA_MEMORY_USAGE_CPU_TO_GPU: printf(" VMA_MEMORY_USAGE_CPU_TO_GPU:\n"); break;
4731 case VMA_MEMORY_USAGE_GPU_TO_CPU: printf(" VMA_MEMORY_USAGE_GPU_TO_CPU:\n"); break;
4732 case VMA_MEMORY_USAGE_CPU_COPY: printf(" VMA_MEMORY_USAGE_CPU_COPY:\n"); break;
Adam Sawicki69185552019-11-18 17:03:34 +01004733 case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: printf(" VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:\n"); break;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004734 default: assert(0);
4735 }
4736
4737 auto printResult = [](const char* testName, VkResult res, uint32_t memoryTypeBits, uint32_t memoryTypeIndex)
4738 {
4739 if(res == VK_SUCCESS)
4740 printf(" %s: memoryTypeBits=0x%X, memoryTypeIndex=%u\n", testName, memoryTypeBits, memoryTypeIndex);
4741 else
4742 printf(" %s: memoryTypeBits=0x%X, FAILED with res=%d\n", testName, memoryTypeBits, (int32_t)res);
4743 };
4744
4745 // 1: Buffer for copy
4746 {
4747 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4748 bufCreateInfo.size = 65536;
4749 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4750
4751 VkBuffer buf = VK_NULL_HANDLE;
4752 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4753 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4754
4755 VkMemoryRequirements memReq = {};
4756 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4757
4758 VmaAllocationCreateInfo allocCreateInfo = {};
4759 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4760 VmaAllocation alloc = VK_NULL_HANDLE;
4761 VmaAllocationInfo allocInfo = {};
4762 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4763 if(res == VK_SUCCESS)
4764 {
4765 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4766 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4767 TEST(res == VK_SUCCESS);
4768 }
4769 printResult("Buffer TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4770 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4771 }
4772
4773 // 2: Vertex buffer
4774 {
4775 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4776 bufCreateInfo.size = 65536;
4777 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4778
4779 VkBuffer buf = VK_NULL_HANDLE;
4780 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4781 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4782
4783 VkMemoryRequirements memReq = {};
4784 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4785
4786 VmaAllocationCreateInfo allocCreateInfo = {};
4787 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4788 VmaAllocation alloc = VK_NULL_HANDLE;
4789 VmaAllocationInfo allocInfo = {};
4790 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4791 if(res == VK_SUCCESS)
4792 {
4793 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4794 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4795 TEST(res == VK_SUCCESS);
4796 }
4797 printResult("Buffer TRANSFER_DST + VERTEX_BUFFER", res, memReq.memoryTypeBits, allocInfo.memoryType);
4798 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4799 }
4800
4801 // 3: Image for copy, OPTIMAL
4802 {
4803 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4804 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4805 imgCreateInfo.extent.width = 256;
4806 imgCreateInfo.extent.height = 256;
4807 imgCreateInfo.extent.depth = 1;
4808 imgCreateInfo.mipLevels = 1;
4809 imgCreateInfo.arrayLayers = 1;
4810 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4811 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4812 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4813 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
4814 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4815
4816 VkImage img = VK_NULL_HANDLE;
4817 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4818 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4819
4820 VkMemoryRequirements memReq = {};
4821 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4822
4823 VmaAllocationCreateInfo allocCreateInfo = {};
4824 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4825 VmaAllocation alloc = VK_NULL_HANDLE;
4826 VmaAllocationInfo allocInfo = {};
4827 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4828 if(res == VK_SUCCESS)
4829 {
4830 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4831 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4832 TEST(res == VK_SUCCESS);
4833 }
4834 printResult("Image OPTIMAL TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4835
4836 vmaDestroyImage(g_hAllocator, img, alloc);
4837 }
4838
4839 // 4: Image SAMPLED, OPTIMAL
4840 {
4841 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4842 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4843 imgCreateInfo.extent.width = 256;
4844 imgCreateInfo.extent.height = 256;
4845 imgCreateInfo.extent.depth = 1;
4846 imgCreateInfo.mipLevels = 1;
4847 imgCreateInfo.arrayLayers = 1;
4848 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4849 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4850 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4851 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
4852 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4853
4854 VkImage img = VK_NULL_HANDLE;
4855 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4856 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4857
4858 VkMemoryRequirements memReq = {};
4859 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4860
4861 VmaAllocationCreateInfo allocCreateInfo = {};
4862 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4863 VmaAllocation alloc = VK_NULL_HANDLE;
4864 VmaAllocationInfo allocInfo = {};
4865 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4866 if(res == VK_SUCCESS)
4867 {
4868 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4869 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4870 TEST(res == VK_SUCCESS);
4871 }
4872 printResult("Image OPTIMAL TRANSFER_DST + SAMPLED", res, memReq.memoryTypeBits, allocInfo.memoryType);
4873 vmaDestroyImage(g_hAllocator, img, alloc);
4874 }
4875
4876 // 5: Image COLOR_ATTACHMENT, OPTIMAL
4877 {
4878 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4879 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4880 imgCreateInfo.extent.width = 256;
4881 imgCreateInfo.extent.height = 256;
4882 imgCreateInfo.extent.depth = 1;
4883 imgCreateInfo.mipLevels = 1;
4884 imgCreateInfo.arrayLayers = 1;
4885 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4886 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4887 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4888 imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4889 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4890
4891 VkImage img = VK_NULL_HANDLE;
4892 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4893 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4894
4895 VkMemoryRequirements memReq = {};
4896 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4897
4898 VmaAllocationCreateInfo allocCreateInfo = {};
4899 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4900 VmaAllocation alloc = VK_NULL_HANDLE;
4901 VmaAllocationInfo allocInfo = {};
4902 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4903 if(res == VK_SUCCESS)
4904 {
4905 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4906 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4907 TEST(res == VK_SUCCESS);
4908 }
4909 printResult("Image OPTIMAL SAMPLED + COLOR_ATTACHMENT", res, memReq.memoryTypeBits, allocInfo.memoryType);
4910 vmaDestroyImage(g_hAllocator, img, alloc);
4911 }
4912 }
4913}
4914
Adam Sawicki50882502020-02-07 16:51:31 +01004915static uint32_t FindDeviceCoherentMemoryTypeBits()
4916{
4917 VkPhysicalDeviceMemoryProperties memProps;
4918 vkGetPhysicalDeviceMemoryProperties(g_hPhysicalDevice, &memProps);
4919
4920 uint32_t memTypeBits = 0;
4921 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
4922 {
4923 if(memProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD)
4924 memTypeBits |= 1u << i;
4925 }
4926 return memTypeBits;
4927}
4928
4929static void TestDeviceCoherentMemory()
4930{
4931 if(!VK_AMD_device_coherent_memory_enabled)
4932 return;
4933
4934 uint32_t deviceCoherentMemoryTypeBits = FindDeviceCoherentMemoryTypeBits();
4935 // Extension is enabled, feature is enabled, and the device still doesn't support any such memory type?
4936 // OK then, so it's just fake!
4937 if(deviceCoherentMemoryTypeBits == 0)
4938 return;
4939
4940 wprintf(L"Testing device coherent memory...\n");
4941
4942 // 1. Try to allocate buffer from a memory type that is DEVICE_COHERENT.
4943
4944 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4945 bufCreateInfo.size = 0x10000;
4946 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4947
4948 VmaAllocationCreateInfo allocCreateInfo = {};
4949 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4950 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD;
4951
4952 AllocInfo alloc = {};
4953 VmaAllocationInfo allocInfo = {};
4954 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
4955
4956 // Make sure it succeeded and was really created in such memory type.
4957 TEST(res == VK_SUCCESS);
4958 TEST((1u << allocInfo.memoryType) & deviceCoherentMemoryTypeBits);
4959
4960 alloc.Destroy();
4961
4962 // 2. Try to create a pool in such memory type.
4963 {
4964 VmaPoolCreateInfo poolCreateInfo = {};
4965
4966 res = vmaFindMemoryTypeIndex(g_hAllocator, UINT32_MAX, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4967 TEST(res == VK_SUCCESS);
4968 TEST((1u << poolCreateInfo.memoryTypeIndex) & deviceCoherentMemoryTypeBits);
4969
4970 VmaPool pool = VK_NULL_HANDLE;
4971 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
4972 TEST(res == VK_SUCCESS);
4973
4974 vmaDestroyPool(g_hAllocator, pool);
4975 }
4976
4977 // 3. Try the same with a local allocator created without VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT.
4978
4979 VmaAllocatorCreateInfo allocatorCreateInfo = {};
4980 SetAllocatorCreateInfo(allocatorCreateInfo);
4981 allocatorCreateInfo.flags &= ~VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
4982
4983 VmaAllocator localAllocator = VK_NULL_HANDLE;
4984 res = vmaCreateAllocator(&allocatorCreateInfo, &localAllocator);
4985 TEST(res == VK_SUCCESS && localAllocator);
4986
4987 res = vmaCreateBuffer(localAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
4988
4989 // Make sure it failed.
4990 TEST(res != VK_SUCCESS && !alloc.m_Buffer && !alloc.m_Allocation);
4991
4992 // 4. Try to find memory type.
4993 {
4994 uint32_t memTypeIndex = UINT_MAX;
4995 res = vmaFindMemoryTypeIndex(localAllocator, UINT32_MAX, &allocCreateInfo, &memTypeIndex);
4996 TEST(res != VK_SUCCESS);
4997 }
4998
4999 vmaDestroyAllocator(localAllocator);
5000}
5001
Adam Sawicki40ffe982019-10-11 15:56:02 +02005002static void TestBudget()
5003{
5004 wprintf(L"Testing budget...\n");
5005
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005006 static const VkDeviceSize BUF_SIZE = 10ull * 1024 * 1024;
Adam Sawicki353e3672019-11-02 14:12:05 +01005007 static const uint32_t BUF_COUNT = 4;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005008
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005009 const VkPhysicalDeviceMemoryProperties* memProps = {};
5010 vmaGetMemoryProperties(g_hAllocator, &memProps);
5011
Adam Sawicki40ffe982019-10-11 15:56:02 +02005012 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
5013 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005014 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
5015
5016 VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
5017 vmaGetBudget(g_hAllocator, budgetBeg);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005018
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005019 for(uint32_t i = 0; i < memProps->memoryHeapCount; ++i)
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01005020 {
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005021 TEST(budgetBeg[i].budget > 0);
5022 TEST(budgetBeg[i].budget <= memProps->memoryHeaps[i].size);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01005023 TEST(budgetBeg[i].allocationBytes <= budgetBeg[i].blockBytes);
5024 }
5025
Adam Sawicki40ffe982019-10-11 15:56:02 +02005026 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5027 bufInfo.size = BUF_SIZE;
5028 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5029
5030 VmaAllocationCreateInfo allocCreateInfo = {};
5031 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5032 if(testIndex == 0)
5033 {
5034 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5035 }
5036
5037 // CREATE BUFFERS
5038 uint32_t heapIndex = 0;
5039 BufferInfo bufInfos[BUF_COUNT] = {};
5040 for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
5041 {
5042 VmaAllocationInfo allocInfo;
5043 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5044 &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
5045 TEST(res == VK_SUCCESS);
5046 if(bufIndex == 0)
5047 {
5048 heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
5049 }
5050 else
5051 {
5052 // All buffers need to fall into the same heap.
5053 TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
5054 }
5055 }
5056
Adam Sawicki353e3672019-11-02 14:12:05 +01005057 VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
5058 vmaGetBudget(g_hAllocator, budgetWithBufs);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005059
5060 // DESTROY BUFFERS
5061 for(size_t bufIndex = BUF_COUNT; bufIndex--; )
5062 {
5063 vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
5064 }
5065
Adam Sawicki353e3672019-11-02 14:12:05 +01005066 VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
5067 vmaGetBudget(g_hAllocator, budgetEnd);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005068
5069 // CHECK
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005070 for(uint32_t i = 0; i < memProps->memoryHeapCount; ++i)
Adam Sawicki40ffe982019-10-11 15:56:02 +02005071 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005072 TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005073 if(i == heapIndex)
5074 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005075 TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
5076 TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
5077 TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005078 }
5079 else
5080 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005081 TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
5082 budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
5083 TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
5084 budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005085 }
5086 }
5087 }
5088}
5089
Adam Sawickib8333fb2018-03-13 16:15:53 +01005090static void TestMapping()
5091{
5092 wprintf(L"Testing mapping...\n");
5093
5094 VkResult res;
5095 uint32_t memTypeIndex = UINT32_MAX;
5096
5097 enum TEST
5098 {
5099 TEST_NORMAL,
5100 TEST_POOL,
5101 TEST_DEDICATED,
5102 TEST_COUNT
5103 };
5104 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5105 {
5106 VmaPool pool = nullptr;
5107 if(testIndex == TEST_POOL)
5108 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005109 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005110 VmaPoolCreateInfo poolInfo = {};
5111 poolInfo.memoryTypeIndex = memTypeIndex;
5112 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005113 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005114 }
5115
5116 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5117 bufInfo.size = 0x10000;
5118 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005119
Adam Sawickib8333fb2018-03-13 16:15:53 +01005120 VmaAllocationCreateInfo allocCreateInfo = {};
5121 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5122 allocCreateInfo.pool = pool;
5123 if(testIndex == TEST_DEDICATED)
5124 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005125
Adam Sawickib8333fb2018-03-13 16:15:53 +01005126 VmaAllocationInfo allocInfo;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005127
Adam Sawickib8333fb2018-03-13 16:15:53 +01005128 // Mapped manually
5129
5130 // Create 2 buffers.
5131 BufferInfo bufferInfos[3];
5132 for(size_t i = 0; i < 2; ++i)
5133 {
5134 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5135 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005136 TEST(res == VK_SUCCESS);
5137 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005138 memTypeIndex = allocInfo.memoryType;
5139 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02005140
Adam Sawickib8333fb2018-03-13 16:15:53 +01005141 // Map buffer 0.
5142 char* data00 = nullptr;
5143 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005144 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005145 data00[0xFFFF] = data00[0];
5146
5147 // Map buffer 0 second time.
5148 char* data01 = nullptr;
5149 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005150 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005151
5152 // Map buffer 1.
5153 char* data1 = nullptr;
5154 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005155 TEST(res == VK_SUCCESS && data1 != nullptr);
5156 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01005157 data1[0xFFFF] = data1[0];
5158
5159 // Unmap buffer 0 two times.
5160 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5161 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5162 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005163 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005164
5165 // Unmap buffer 1.
5166 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
5167 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005168 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005169
5170 // Create 3rd buffer - persistently mapped.
5171 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5172 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5173 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005174 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005175
5176 // Map buffer 2.
5177 char* data2 = nullptr;
5178 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005179 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005180 data2[0xFFFF] = data2[0];
5181
5182 // Unmap buffer 2.
5183 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
5184 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005185 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005186
5187 // Destroy all buffers.
5188 for(size_t i = 3; i--; )
5189 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
5190
5191 vmaDestroyPool(g_hAllocator, pool);
5192 }
5193}
5194
Adam Sawickidaa6a552019-06-25 15:26:37 +02005195// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
5196static void TestDeviceLocalMapped()
5197{
5198 VkResult res;
5199
5200 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
5201 {
5202 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5203 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5204 bufCreateInfo.size = 4096;
5205
5206 VmaPool pool = VK_NULL_HANDLE;
5207 VmaAllocationCreateInfo allocCreateInfo = {};
5208 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5209 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5210 if(testIndex == 2)
5211 {
5212 VmaPoolCreateInfo poolCreateInfo = {};
5213 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
5214 TEST(res == VK_SUCCESS);
5215 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
5216 TEST(res == VK_SUCCESS);
5217 allocCreateInfo.pool = pool;
5218 }
5219 else if(testIndex == 1)
5220 {
5221 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
5222 }
5223
5224 VkBuffer buf = VK_NULL_HANDLE;
5225 VmaAllocation alloc = VK_NULL_HANDLE;
5226 VmaAllocationInfo allocInfo = {};
5227 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
5228 TEST(res == VK_SUCCESS && alloc);
5229
5230 VkMemoryPropertyFlags memTypeFlags = 0;
5231 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
5232 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
5233 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
5234
5235 vmaDestroyBuffer(g_hAllocator, buf, alloc);
5236 vmaDestroyPool(g_hAllocator, pool);
5237 }
5238}
5239
Adam Sawickib8333fb2018-03-13 16:15:53 +01005240static void TestMappingMultithreaded()
5241{
5242 wprintf(L"Testing mapping multithreaded...\n");
5243
5244 static const uint32_t threadCount = 16;
5245 static const uint32_t bufferCount = 1024;
5246 static const uint32_t threadBufferCount = bufferCount / threadCount;
5247
5248 VkResult res;
5249 volatile uint32_t memTypeIndex = UINT32_MAX;
5250
5251 enum TEST
5252 {
5253 TEST_NORMAL,
5254 TEST_POOL,
5255 TEST_DEDICATED,
5256 TEST_COUNT
5257 };
5258 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5259 {
5260 VmaPool pool = nullptr;
5261 if(testIndex == TEST_POOL)
5262 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005263 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005264 VmaPoolCreateInfo poolInfo = {};
5265 poolInfo.memoryTypeIndex = memTypeIndex;
5266 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005267 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005268 }
5269
5270 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5271 bufCreateInfo.size = 0x10000;
5272 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5273
5274 VmaAllocationCreateInfo allocCreateInfo = {};
5275 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5276 allocCreateInfo.pool = pool;
5277 if(testIndex == TEST_DEDICATED)
5278 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5279
5280 std::thread threads[threadCount];
5281 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5282 {
5283 threads[threadIndex] = std::thread([=, &memTypeIndex](){
5284 // ======== THREAD FUNCTION ========
5285
5286 RandomNumberGenerator rand{threadIndex};
5287
5288 enum class MODE
5289 {
5290 // Don't map this buffer at all.
5291 DONT_MAP,
5292 // Map and quickly unmap.
5293 MAP_FOR_MOMENT,
5294 // Map and unmap before destruction.
5295 MAP_FOR_LONGER,
5296 // Map two times. Quickly unmap, second unmap before destruction.
5297 MAP_TWO_TIMES,
5298 // Create this buffer as persistently mapped.
5299 PERSISTENTLY_MAPPED,
5300 COUNT
5301 };
5302 std::vector<BufferInfo> bufInfos{threadBufferCount};
5303 std::vector<MODE> bufModes{threadBufferCount};
5304
5305 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
5306 {
5307 BufferInfo& bufInfo = bufInfos[bufferIndex];
5308 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
5309 bufModes[bufferIndex] = mode;
5310
5311 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
5312 if(mode == MODE::PERSISTENTLY_MAPPED)
5313 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5314
5315 VmaAllocationInfo allocInfo;
5316 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
5317 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005318 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005319
5320 if(memTypeIndex == UINT32_MAX)
5321 memTypeIndex = allocInfo.memoryType;
5322
5323 char* data = nullptr;
5324
5325 if(mode == MODE::PERSISTENTLY_MAPPED)
5326 {
5327 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02005328 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005329 }
5330 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
5331 mode == MODE::MAP_TWO_TIMES)
5332 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005333 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005334 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005335 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005336
5337 if(mode == MODE::MAP_TWO_TIMES)
5338 {
5339 char* data2 = nullptr;
5340 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005341 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005342 }
5343 }
5344 else if(mode == MODE::DONT_MAP)
5345 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005346 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005347 }
5348 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005349 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005350
5351 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5352 if(data)
5353 data[0xFFFF] = data[0];
5354
5355 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
5356 {
5357 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
5358
5359 VmaAllocationInfo allocInfo;
5360 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
5361 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02005362 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005363 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005364 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005365 }
5366
5367 switch(rand.Generate() % 3)
5368 {
5369 case 0: Sleep(0); break; // Yield.
5370 case 1: Sleep(10); break; // 10 ms
5371 // default: No sleep.
5372 }
5373
5374 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5375 if(data)
5376 data[0xFFFF] = data[0];
5377 }
5378
5379 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
5380 {
5381 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
5382 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
5383 {
5384 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
5385
5386 VmaAllocationInfo allocInfo;
5387 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005388 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005389 }
5390
5391 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
5392 }
5393 });
5394 }
5395
5396 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5397 threads[threadIndex].join();
5398
5399 vmaDestroyPool(g_hAllocator, pool);
5400 }
5401}
5402
5403static void WriteMainTestResultHeader(FILE* file)
5404{
5405 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02005406 "Code,Time,"
5407 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005408 "Total Time (us),"
5409 "Allocation Time Min (us),"
5410 "Allocation Time Avg (us),"
5411 "Allocation Time Max (us),"
5412 "Deallocation Time Min (us),"
5413 "Deallocation Time Avg (us),"
5414 "Deallocation Time Max (us),"
5415 "Total Memory Allocated (B),"
5416 "Free Range Size Avg (B),"
5417 "Free Range Size Max (B)\n");
5418}
5419
5420static void WriteMainTestResult(
5421 FILE* file,
5422 const char* codeDescription,
5423 const char* testDescription,
5424 const Config& config, const Result& result)
5425{
5426 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5427 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5428 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5429 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5430 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5431 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5432 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5433
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005434 std::string currTime;
5435 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005436
5437 fprintf(file,
5438 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005439 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
5440 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005441 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02005442 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01005443 totalTimeSeconds * 1e6f,
5444 allocationTimeMinSeconds * 1e6f,
5445 allocationTimeAvgSeconds * 1e6f,
5446 allocationTimeMaxSeconds * 1e6f,
5447 deallocationTimeMinSeconds * 1e6f,
5448 deallocationTimeAvgSeconds * 1e6f,
5449 deallocationTimeMaxSeconds * 1e6f,
5450 result.TotalMemoryAllocated,
5451 result.FreeRangeSizeAvg,
5452 result.FreeRangeSizeMax);
5453}
5454
5455static void WritePoolTestResultHeader(FILE* file)
5456{
5457 fprintf(file,
5458 "Code,Test,Time,"
5459 "Config,"
5460 "Total Time (us),"
5461 "Allocation Time Min (us),"
5462 "Allocation Time Avg (us),"
5463 "Allocation Time Max (us),"
5464 "Deallocation Time Min (us),"
5465 "Deallocation Time Avg (us),"
5466 "Deallocation Time Max (us),"
5467 "Lost Allocation Count,"
5468 "Lost Allocation Total Size (B),"
5469 "Failed Allocation Count,"
5470 "Failed Allocation Total Size (B)\n");
5471}
5472
5473static void WritePoolTestResult(
5474 FILE* file,
5475 const char* codeDescription,
5476 const char* testDescription,
5477 const PoolTestConfig& config,
5478 const PoolTestResult& result)
5479{
5480 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5481 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5482 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5483 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5484 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5485 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5486 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5487
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005488 std::string currTime;
5489 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005490
5491 fprintf(file,
5492 "%s,%s,%s,"
5493 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
5494 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
5495 // General
5496 codeDescription,
5497 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005498 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01005499 // Config
5500 config.ThreadCount,
5501 (unsigned long long)config.PoolSize,
5502 config.FrameCount,
5503 config.TotalItemCount,
5504 config.UsedItemCountMin,
5505 config.UsedItemCountMax,
5506 config.ItemsToMakeUnusedPercent,
5507 // Results
5508 totalTimeSeconds * 1e6f,
5509 allocationTimeMinSeconds * 1e6f,
5510 allocationTimeAvgSeconds * 1e6f,
5511 allocationTimeMaxSeconds * 1e6f,
5512 deallocationTimeMinSeconds * 1e6f,
5513 deallocationTimeAvgSeconds * 1e6f,
5514 deallocationTimeMaxSeconds * 1e6f,
5515 result.LostAllocationCount,
5516 result.LostAllocationTotalSize,
5517 result.FailedAllocationCount,
5518 result.FailedAllocationTotalSize);
5519}
5520
5521static void PerformCustomMainTest(FILE* file)
5522{
5523 Config config{};
5524 config.RandSeed = 65735476;
5525 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
5526 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5527 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5528 config.FreeOrder = FREE_ORDER::FORWARD;
5529 config.ThreadCount = 16;
5530 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02005531 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005532
5533 // Buffers
5534 //config.AllocationSizes.push_back({4, 16, 1024});
5535 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5536
5537 // Images
5538 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5539 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5540
5541 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5542 config.AdditionalOperationCount = 1024;
5543
5544 Result result{};
5545 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005546 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005547 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
5548}
5549
5550static void PerformCustomPoolTest(FILE* file)
5551{
5552 PoolTestConfig config;
5553 config.PoolSize = 100 * 1024 * 1024;
5554 config.RandSeed = 2345764;
5555 config.ThreadCount = 1;
5556 config.FrameCount = 200;
5557 config.ItemsToMakeUnusedPercent = 2;
5558
5559 AllocationSize allocSize = {};
5560 allocSize.BufferSizeMin = 1024;
5561 allocSize.BufferSizeMax = 1024 * 1024;
5562 allocSize.Probability = 1;
5563 config.AllocationSizes.push_back(allocSize);
5564
5565 allocSize.BufferSizeMin = 0;
5566 allocSize.BufferSizeMax = 0;
5567 allocSize.ImageSizeMin = 128;
5568 allocSize.ImageSizeMax = 1024;
5569 allocSize.Probability = 1;
5570 config.AllocationSizes.push_back(allocSize);
5571
5572 config.PoolSize = config.CalcAvgResourceSize() * 200;
5573 config.UsedItemCountMax = 160;
5574 config.TotalItemCount = config.UsedItemCountMax * 10;
5575 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5576
Adam Sawickib8333fb2018-03-13 16:15:53 +01005577 PoolTestResult result = {};
5578 TestPool_Benchmark(result, config);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005579
5580 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
5581}
5582
Adam Sawickib8333fb2018-03-13 16:15:53 +01005583static void PerformMainTests(FILE* file)
5584{
5585 uint32_t repeatCount = 1;
5586 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5587
5588 Config config{};
5589 config.RandSeed = 65735476;
5590 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5591 config.FreeOrder = FREE_ORDER::FORWARD;
5592
5593 size_t threadCountCount = 1;
5594 switch(ConfigType)
5595 {
5596 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5597 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5598 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
5599 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
5600 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
5601 default: assert(0);
5602 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005603
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005604 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02005605
Adam Sawickib8333fb2018-03-13 16:15:53 +01005606 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5607 {
5608 std::string desc1;
5609
5610 switch(threadCountIndex)
5611 {
5612 case 0:
5613 desc1 += "1_thread";
5614 config.ThreadCount = 1;
5615 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5616 break;
5617 case 1:
5618 desc1 += "16_threads+0%_common";
5619 config.ThreadCount = 16;
5620 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5621 break;
5622 case 2:
5623 desc1 += "16_threads+50%_common";
5624 config.ThreadCount = 16;
5625 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5626 break;
5627 case 3:
5628 desc1 += "16_threads+100%_common";
5629 config.ThreadCount = 16;
5630 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5631 break;
5632 case 4:
5633 desc1 += "2_threads+0%_common";
5634 config.ThreadCount = 2;
5635 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5636 break;
5637 case 5:
5638 desc1 += "2_threads+50%_common";
5639 config.ThreadCount = 2;
5640 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5641 break;
5642 case 6:
5643 desc1 += "2_threads+100%_common";
5644 config.ThreadCount = 2;
5645 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5646 break;
5647 default:
5648 assert(0);
5649 }
5650
5651 // 0 = buffers, 1 = images, 2 = buffers and images
5652 size_t buffersVsImagesCount = 2;
5653 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5654 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5655 {
5656 std::string desc2 = desc1;
5657 switch(buffersVsImagesIndex)
5658 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005659 case 0: desc2 += ",Buffers"; break;
5660 case 1: desc2 += ",Images"; break;
5661 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005662 default: assert(0);
5663 }
5664
5665 // 0 = small, 1 = large, 2 = small and large
5666 size_t smallVsLargeCount = 2;
5667 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5668 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5669 {
5670 std::string desc3 = desc2;
5671 switch(smallVsLargeIndex)
5672 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005673 case 0: desc3 += ",Small"; break;
5674 case 1: desc3 += ",Large"; break;
5675 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005676 default: assert(0);
5677 }
5678
5679 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5680 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5681 else
5682 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
5683
5684 // 0 = varying sizes min...max, 1 = set of constant sizes
5685 size_t constantSizesCount = 1;
5686 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5687 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5688 {
5689 std::string desc4 = desc3;
5690 switch(constantSizesIndex)
5691 {
5692 case 0: desc4 += " Varying_sizes"; break;
5693 case 1: desc4 += " Constant_sizes"; break;
5694 default: assert(0);
5695 }
5696
5697 config.AllocationSizes.clear();
5698 // Buffers present
5699 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5700 {
5701 // Small
5702 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5703 {
5704 // Varying size
5705 if(constantSizesIndex == 0)
5706 config.AllocationSizes.push_back({4, 16, 1024});
5707 // Constant sizes
5708 else
5709 {
5710 config.AllocationSizes.push_back({1, 16, 16});
5711 config.AllocationSizes.push_back({1, 64, 64});
5712 config.AllocationSizes.push_back({1, 256, 256});
5713 config.AllocationSizes.push_back({1, 1024, 1024});
5714 }
5715 }
5716 // Large
5717 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5718 {
5719 // Varying size
5720 if(constantSizesIndex == 0)
5721 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5722 // Constant sizes
5723 else
5724 {
5725 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5726 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5727 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5728 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5729 }
5730 }
5731 }
5732 // Images present
5733 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5734 {
5735 // Small
5736 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5737 {
5738 // Varying size
5739 if(constantSizesIndex == 0)
5740 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5741 // Constant sizes
5742 else
5743 {
5744 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5745 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5746 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5747 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5748 }
5749 }
5750 // Large
5751 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5752 {
5753 // Varying size
5754 if(constantSizesIndex == 0)
5755 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5756 // Constant sizes
5757 else
5758 {
5759 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5760 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5761 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5762 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5763 }
5764 }
5765 }
5766
5767 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
5768 size_t beginBytesToAllocateCount = 1;
5769 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
5770 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
5771 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
5772 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
5773 {
5774 std::string desc5 = desc4;
5775
5776 switch(beginBytesToAllocateIndex)
5777 {
5778 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005779 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005780 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
5781 config.AdditionalOperationCount = 0;
5782 break;
5783 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005784 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005785 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
5786 config.AdditionalOperationCount = 1024;
5787 break;
5788 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005789 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005790 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5791 config.AdditionalOperationCount = 1024;
5792 break;
5793 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005794 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005795 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
5796 config.AdditionalOperationCount = 1024;
5797 break;
5798 default:
5799 assert(0);
5800 }
5801
Adam Sawicki0667e332018-08-24 17:26:44 +02005802 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01005803 {
Adam Sawicki0667e332018-08-24 17:26:44 +02005804 std::string desc6 = desc5;
5805 switch(strategyIndex)
5806 {
5807 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005808 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005809 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
5810 break;
5811 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005812 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005813 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
5814 break;
5815 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005816 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005817 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
5818 break;
5819 default:
5820 assert(0);
5821 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005822
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005823 desc6 += ',';
5824 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02005825
5826 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02005827
5828 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5829 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005830 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02005831
5832 Result result{};
5833 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005834 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02005835 if(file)
5836 {
5837 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5838 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005839 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005840 }
5841 }
5842 }
5843 }
5844 }
5845 }
5846}
5847
5848static void PerformPoolTests(FILE* file)
5849{
5850 const size_t AVG_RESOURCES_PER_POOL = 300;
5851
5852 uint32_t repeatCount = 1;
5853 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5854
5855 PoolTestConfig config{};
5856 config.RandSeed = 2346343;
5857 config.FrameCount = 200;
5858 config.ItemsToMakeUnusedPercent = 2;
5859
5860 size_t threadCountCount = 1;
5861 switch(ConfigType)
5862 {
5863 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5864 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5865 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
5866 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
5867 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
5868 default: assert(0);
5869 }
5870 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5871 {
5872 std::string desc1;
5873
5874 switch(threadCountIndex)
5875 {
5876 case 0:
5877 desc1 += "1_thread";
5878 config.ThreadCount = 1;
5879 break;
5880 case 1:
5881 desc1 += "16_threads";
5882 config.ThreadCount = 16;
5883 break;
5884 case 2:
5885 desc1 += "2_threads";
5886 config.ThreadCount = 2;
5887 break;
5888 default:
5889 assert(0);
5890 }
5891
5892 // 0 = buffers, 1 = images, 2 = buffers and images
5893 size_t buffersVsImagesCount = 2;
5894 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5895 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5896 {
5897 std::string desc2 = desc1;
5898 switch(buffersVsImagesIndex)
5899 {
5900 case 0: desc2 += " Buffers"; break;
5901 case 1: desc2 += " Images"; break;
5902 case 2: desc2 += " Buffers+Images"; break;
5903 default: assert(0);
5904 }
5905
5906 // 0 = small, 1 = large, 2 = small and large
5907 size_t smallVsLargeCount = 2;
5908 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5909 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5910 {
5911 std::string desc3 = desc2;
5912 switch(smallVsLargeIndex)
5913 {
5914 case 0: desc3 += " Small"; break;
5915 case 1: desc3 += " Large"; break;
5916 case 2: desc3 += " Small+Large"; break;
5917 default: assert(0);
5918 }
5919
5920 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5921 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
5922 else
5923 config.PoolSize = 4ull * 1024 * 1024;
5924
5925 // 0 = varying sizes min...max, 1 = set of constant sizes
5926 size_t constantSizesCount = 1;
5927 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5928 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5929 {
5930 std::string desc4 = desc3;
5931 switch(constantSizesIndex)
5932 {
5933 case 0: desc4 += " Varying_sizes"; break;
5934 case 1: desc4 += " Constant_sizes"; break;
5935 default: assert(0);
5936 }
5937
5938 config.AllocationSizes.clear();
5939 // Buffers present
5940 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5941 {
5942 // Small
5943 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5944 {
5945 // Varying size
5946 if(constantSizesIndex == 0)
5947 config.AllocationSizes.push_back({4, 16, 1024});
5948 // Constant sizes
5949 else
5950 {
5951 config.AllocationSizes.push_back({1, 16, 16});
5952 config.AllocationSizes.push_back({1, 64, 64});
5953 config.AllocationSizes.push_back({1, 256, 256});
5954 config.AllocationSizes.push_back({1, 1024, 1024});
5955 }
5956 }
5957 // Large
5958 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5959 {
5960 // Varying size
5961 if(constantSizesIndex == 0)
5962 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5963 // Constant sizes
5964 else
5965 {
5966 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5967 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5968 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5969 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5970 }
5971 }
5972 }
5973 // Images present
5974 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5975 {
5976 // Small
5977 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5978 {
5979 // Varying size
5980 if(constantSizesIndex == 0)
5981 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5982 // Constant sizes
5983 else
5984 {
5985 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5986 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5987 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5988 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5989 }
5990 }
5991 // Large
5992 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5993 {
5994 // Varying size
5995 if(constantSizesIndex == 0)
5996 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5997 // Constant sizes
5998 else
5999 {
6000 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
6001 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
6002 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
6003 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
6004 }
6005 }
6006 }
6007
6008 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
6009 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
6010
6011 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
6012 size_t subscriptionModeCount;
6013 switch(ConfigType)
6014 {
6015 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
6016 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
6017 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
6018 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
6019 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
6020 default: assert(0);
6021 }
6022 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
6023 {
6024 std::string desc5 = desc4;
6025
6026 switch(subscriptionModeIndex)
6027 {
6028 case 0:
6029 desc5 += " Subscription_66%";
6030 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
6031 break;
6032 case 1:
6033 desc5 += " Subscription_133%";
6034 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
6035 break;
6036 case 2:
6037 desc5 += " Subscription_100%";
6038 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
6039 break;
6040 case 3:
6041 desc5 += " Subscription_33%";
6042 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
6043 break;
6044 case 4:
6045 desc5 += " Subscription_166%";
6046 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
6047 break;
6048 default:
6049 assert(0);
6050 }
6051
6052 config.TotalItemCount = config.UsedItemCountMax * 5;
6053 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
6054
6055 const char* testDescription = desc5.c_str();
6056
6057 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
6058 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02006059 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01006060
6061 PoolTestResult result{};
Adam Sawickib8333fb2018-03-13 16:15:53 +01006062 TestPool_Benchmark(result, config);
Adam Sawickib8333fb2018-03-13 16:15:53 +01006063 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
6064 }
6065 }
6066 }
6067 }
6068 }
6069 }
6070}
6071
Adam Sawickia83793a2018-09-03 13:40:42 +02006072static void BasicTestBuddyAllocator()
6073{
6074 wprintf(L"Basic test buddy allocator\n");
6075
6076 RandomNumberGenerator rand{76543};
6077
6078 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
6079 sampleBufCreateInfo.size = 1024; // Whatever.
6080 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
6081
6082 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6083 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6084
6085 VmaPoolCreateInfo poolCreateInfo = {};
6086 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006087 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006088
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02006089 // Deliberately adding 1023 to test usable size smaller than memory block size.
6090 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02006091 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02006092 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02006093
6094 VmaPool pool = nullptr;
6095 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006096 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006097
6098 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
6099
6100 VmaAllocationCreateInfo allocCreateInfo = {};
6101 allocCreateInfo.pool = pool;
6102
6103 std::vector<BufferInfo> bufInfo;
6104 BufferInfo newBufInfo;
6105 VmaAllocationInfo allocInfo;
6106
6107 bufCreateInfo.size = 1024 * 256;
6108 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6109 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006110 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006111 bufInfo.push_back(newBufInfo);
6112
6113 bufCreateInfo.size = 1024 * 512;
6114 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6115 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006116 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006117 bufInfo.push_back(newBufInfo);
6118
6119 bufCreateInfo.size = 1024 * 128;
6120 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6121 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006122 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006123 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02006124
6125 // Test very small allocation, smaller than minimum node size.
6126 bufCreateInfo.size = 1;
6127 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6128 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006129 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02006130 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02006131
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006132 // Test some small allocation with alignment requirement.
6133 {
6134 VkMemoryRequirements memReq;
6135 memReq.alignment = 256;
6136 memReq.memoryTypeBits = UINT32_MAX;
6137 memReq.size = 32;
6138
6139 newBufInfo.Buffer = VK_NULL_HANDLE;
6140 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
6141 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006142 TEST(res == VK_SUCCESS);
6143 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006144 bufInfo.push_back(newBufInfo);
6145 }
6146
6147 //SaveAllocatorStatsToFile(L"TEST.json");
6148
Adam Sawicki21017c62018-09-07 15:26:59 +02006149 VmaPoolStats stats = {};
6150 vmaGetPoolStats(g_hAllocator, pool, &stats);
6151 int DBG = 0; // Set breakpoint here to inspect `stats`.
6152
Adam Sawicki80927152018-09-07 17:27:23 +02006153 // Allocate enough new buffers to surely fall into second block.
6154 for(uint32_t i = 0; i < 32; ++i)
6155 {
6156 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
6157 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6158 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006159 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02006160 bufInfo.push_back(newBufInfo);
6161 }
6162
6163 SaveAllocatorStatsToFile(L"BuddyTest01.json");
6164
Adam Sawickia83793a2018-09-03 13:40:42 +02006165 // Destroy the buffers in random order.
6166 while(!bufInfo.empty())
6167 {
6168 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
6169 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
6170 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
6171 bufInfo.erase(bufInfo.begin() + indexToDestroy);
6172 }
6173
6174 vmaDestroyPool(g_hAllocator, pool);
6175}
6176
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006177static void BasicTestAllocatePages()
6178{
6179 wprintf(L"Basic test allocate pages\n");
6180
6181 RandomNumberGenerator rand{765461};
6182
6183 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
6184 sampleBufCreateInfo.size = 1024; // Whatever.
6185 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
6186
6187 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6188 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6189
6190 VmaPoolCreateInfo poolCreateInfo = {};
6191 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02006192 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006193
6194 // 1 block of 1 MB.
6195 poolCreateInfo.blockSize = 1024 * 1024;
6196 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
6197
6198 // Create pool.
6199 VmaPool pool = nullptr;
6200 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02006201 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006202
6203 // Make 100 allocations of 4 KB - they should fit into the pool.
6204 VkMemoryRequirements memReq;
6205 memReq.memoryTypeBits = UINT32_MAX;
6206 memReq.alignment = 4 * 1024;
6207 memReq.size = 4 * 1024;
6208
6209 VmaAllocationCreateInfo allocCreateInfo = {};
6210 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
6211 allocCreateInfo.pool = pool;
6212
6213 constexpr uint32_t allocCount = 100;
6214
6215 std::vector<VmaAllocation> alloc{allocCount};
6216 std::vector<VmaAllocationInfo> allocInfo{allocCount};
6217 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006218 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006219 for(uint32_t i = 0; i < allocCount; ++i)
6220 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006221 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006222 allocInfo[i].pMappedData != nullptr &&
6223 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
6224 allocInfo[i].memoryType == allocInfo[0].memoryType);
6225 }
6226
6227 // Free the allocations.
6228 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6229 std::fill(alloc.begin(), alloc.end(), nullptr);
6230 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6231
6232 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
6233 // Also test optional allocationInfo = null.
6234 memReq.size = 100 * 1024;
6235 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02006236 TEST(res != VK_SUCCESS);
6237 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006238
6239 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
6240 memReq.size = 4 * 1024;
6241 memReq.alignment = 128 * 1024;
6242 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006243 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006244
6245 // Make 100 dedicated allocations of 4 KB.
6246 memReq.alignment = 4 * 1024;
6247 memReq.size = 4 * 1024;
6248
6249 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
6250 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6251 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
6252 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006253 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006254 for(uint32_t i = 0; i < allocCount; ++i)
6255 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006256 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006257 allocInfo[i].pMappedData != nullptr &&
6258 allocInfo[i].memoryType == allocInfo[0].memoryType &&
6259 allocInfo[i].offset == 0);
6260 if(i > 0)
6261 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006262 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006263 }
6264 }
6265
6266 // Free the allocations.
6267 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6268 std::fill(alloc.begin(), alloc.end(), nullptr);
6269 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6270
6271 vmaDestroyPool(g_hAllocator, pool);
6272}
6273
Adam Sawickif2975342018-10-16 13:49:02 +02006274// Test the testing environment.
6275static void TestGpuData()
6276{
6277 RandomNumberGenerator rand = { 53434 };
6278
6279 std::vector<AllocInfo> allocInfo;
6280
6281 for(size_t i = 0; i < 100; ++i)
6282 {
6283 AllocInfo info = {};
6284
6285 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
6286 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
6287 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
6288 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
6289 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
6290
6291 VmaAllocationCreateInfo allocCreateInfo = {};
6292 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6293
6294 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
6295 TEST(res == VK_SUCCESS);
6296
6297 info.m_StartValue = rand.Generate();
6298
6299 allocInfo.push_back(std::move(info));
6300 }
6301
6302 UploadGpuData(allocInfo.data(), allocInfo.size());
6303
6304 ValidateGpuData(allocInfo.data(), allocInfo.size());
6305
6306 DestroyAllAllocations(allocInfo);
6307}
6308
Adam Sawickib8333fb2018-03-13 16:15:53 +01006309void Test()
6310{
6311 wprintf(L"TESTING:\n");
6312
Adam Sawicki48b8a332019-11-02 15:24:33 +01006313 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02006314 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01006315 ////////////////////////////////////////////////////////////////////////////////
6316 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02006317 return;
6318 }
6319
Adam Sawickib8333fb2018-03-13 16:15:53 +01006320 // # Simple tests
6321
6322 TestBasics();
Adam Sawickiaaa1a562020-06-24 17:41:09 +02006323 TestAllocationVersusResourceSize();
Adam Sawickif2975342018-10-16 13:49:02 +02006324 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02006325#if VMA_DEBUG_MARGIN
6326 TestDebugMargin();
6327#else
6328 TestPool_SameSize();
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01006329 TestPool_MinBlockCount();
Adam Sawicki212a4a62018-06-14 15:44:45 +02006330 TestHeapSizeLimit();
6331#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02006332#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
6333 TestAllocationsInitialization();
6334#endif
Adam Sawickiefa88c42019-11-18 16:33:56 +01006335 TestMemoryUsage();
Adam Sawicki50882502020-02-07 16:51:31 +01006336 TestDeviceCoherentMemory();
Adam Sawicki40ffe982019-10-11 15:56:02 +02006337 TestBudget();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006338 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02006339 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006340 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02006341 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02006342 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02006343 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006344
Adam Sawicki4338f662018-09-07 14:12:37 +02006345 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006346 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02006347
Adam Sawickie73e9882020-03-20 18:05:42 +01006348 if(g_BufferDeviceAddressEnabled)
6349 TestBufferDeviceAddress();
6350
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006351 {
6352 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02006353 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006354 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02006355 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006356 fclose(file);
6357 }
6358
Adam Sawickib8333fb2018-03-13 16:15:53 +01006359 TestDefragmentationSimple();
6360 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01006361 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01006362 TestDefragmentationGpu();
Adam Sawickia52012d2019-12-23 15:28:51 +01006363 TestDefragmentationIncrementalBasic();
6364 TestDefragmentationIncrementalComplex();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006365
6366 // # Detailed tests
6367 FILE* file;
6368 fopen_s(&file, "Results.csv", "w");
6369 assert(file != NULL);
6370
6371 WriteMainTestResultHeader(file);
6372 PerformMainTests(file);
6373 //PerformCustomMainTest(file);
6374
6375 WritePoolTestResultHeader(file);
6376 PerformPoolTests(file);
6377 //PerformCustomPoolTest(file);
6378
6379 fclose(file);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01006380
Adam Sawickib8333fb2018-03-13 16:15:53 +01006381 wprintf(L"Done.\n");
6382}
6383
Adam Sawickif1a793c2018-03-13 15:42:22 +01006384#endif // #ifdef _WIN32