blob: 509432f1448467fc9032d437b0b90a299fb38201 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
1243 std::vector<AllocInfo> allocations;
1244
1245 // persistentlyMappedOption = 0 - not persistently mapped.
1246 // persistentlyMappedOption = 1 - persistently mapped.
1247 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1248 {
1249 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1250 const bool persistentlyMapped = persistentlyMappedOption != 0;
1251
1252 // # Test 1
1253 // Buffers of fixed size.
1254 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1255 // Expected result: at least 1 block freed.
1256 {
1257 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1258 {
1259 AllocInfo allocInfo;
1260 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1261 allocations.push_back(allocInfo);
1262 }
1263
1264 for(size_t i = 1; i < allocations.size(); ++i)
1265 {
1266 DestroyAllocation(allocations[i]);
1267 allocations.erase(allocations.begin() + i);
1268 }
1269
1270 VmaDefragmentationStats defragStats;
1271 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1273 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001274
1275 ValidateAllocationsData(allocations.data(), allocations.size());
1276
1277 DestroyAllAllocations(allocations);
1278 }
1279
1280 // # Test 2
1281 // Buffers of fixed size.
1282 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1283 // Expected result: Each of 4 interations makes some progress.
1284 {
1285 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1286 {
1287 AllocInfo allocInfo;
1288 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1289 allocations.push_back(allocInfo);
1290 }
1291
1292 for(size_t i = 1; i < allocations.size(); ++i)
1293 {
1294 DestroyAllocation(allocations[i]);
1295 allocations.erase(allocations.begin() + i);
1296 }
1297
1298 VmaDefragmentationInfo defragInfo = {};
1299 defragInfo.maxAllocationsToMove = 1;
1300 defragInfo.maxBytesToMove = BUF_SIZE;
1301
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1303 {
1304 VmaDefragmentationStats defragStats;
1305 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001306 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001307 }
1308
1309 ValidateAllocationsData(allocations.data(), allocations.size());
1310
1311 DestroyAllAllocations(allocations);
1312 }
1313
1314 // # Test 3
1315 // Buffers of variable size.
1316 // Create a number of buffers. Remove some percent of them.
1317 // Defragment while having some percent of them unmovable.
1318 // Expected result: Just simple validation.
1319 {
1320 for(size_t i = 0; i < 100; ++i)
1321 {
1322 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1323 localBufCreateInfo.size = RandomBufSize();
1324
1325 AllocInfo allocInfo;
1326 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1327 allocations.push_back(allocInfo);
1328 }
1329
1330 const uint32_t percentToDelete = 60;
1331 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1332 for(size_t i = 0; i < numberToDelete; ++i)
1333 {
1334 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1335 DestroyAllocation(allocations[indexToDelete]);
1336 allocations.erase(allocations.begin() + indexToDelete);
1337 }
1338
1339 // Non-movable allocations will be at the beginning of allocations array.
1340 const uint32_t percentNonMovable = 20;
1341 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1342 for(size_t i = 0; i < numberNonMovable; ++i)
1343 {
1344 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1345 if(indexNonMovable != i)
1346 std::swap(allocations[i], allocations[indexNonMovable]);
1347 }
1348
1349 VmaDefragmentationStats defragStats;
1350 Defragment(
1351 allocations.data() + numberNonMovable,
1352 allocations.size() - numberNonMovable,
1353 nullptr, &defragStats);
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359 }
1360
1361 vmaDestroyPool(g_hAllocator, pool);
1362}
1363
1364void TestDefragmentationFull()
1365{
1366 std::vector<AllocInfo> allocations;
1367
1368 // Create initial allocations.
1369 for(size_t i = 0; i < 400; ++i)
1370 {
1371 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001372 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001373 allocations.push_back(allocation);
1374 }
1375
1376 // Delete random allocations
1377 const size_t allocationsToDeletePercent = 80;
1378 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1379 for(size_t i = 0; i < allocationsToDelete; ++i)
1380 {
1381 size_t index = (size_t)rand() % allocations.size();
1382 DestroyAllocation(allocations[index]);
1383 allocations.erase(allocations.begin() + index);
1384 }
1385
1386 for(size_t i = 0; i < allocations.size(); ++i)
1387 ValidateAllocationData(allocations[i]);
1388
Adam Sawicki0667e332018-08-24 17:26:44 +02001389 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001390
1391 {
1392 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1393 for(size_t i = 0; i < allocations.size(); ++i)
1394 vmaAllocations[i] = allocations[i].m_Allocation;
1395
1396 const size_t nonMovablePercent = 0;
1397 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1398 for(size_t i = 0; i < nonMovableCount; ++i)
1399 {
1400 size_t index = (size_t)rand() % vmaAllocations.size();
1401 vmaAllocations.erase(vmaAllocations.begin() + index);
1402 }
1403
1404 const uint32_t defragCount = 1;
1405 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1406 {
1407 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1408
1409 VmaDefragmentationInfo defragmentationInfo;
1410 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1411 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1412
1413 wprintf(L"Defragmentation #%u\n", defragIndex);
1414
1415 time_point begTime = std::chrono::high_resolution_clock::now();
1416
1417 VmaDefragmentationStats stats;
1418 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001419 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001420
1421 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1422
1423 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1424 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1425 wprintf(L"Time: %.2f s\n", defragmentDuration);
1426
1427 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1428 {
1429 if(allocationsChanged[i])
1430 {
1431 RecreateAllocationResource(allocations[i]);
1432 }
1433 }
1434
1435 for(size_t i = 0; i < allocations.size(); ++i)
1436 ValidateAllocationData(allocations[i]);
1437
Adam Sawicki0667e332018-08-24 17:26:44 +02001438 //wchar_t fileName[MAX_PATH];
1439 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1440 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001441 }
1442 }
1443
1444 // Destroy all remaining allocations.
1445 DestroyAllAllocations(allocations);
1446}
1447
Adam Sawickic6ede152018-11-16 17:04:14 +01001448static void TestDefragmentationGpu(uint32_t flags)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001449{
Adam Sawickic6ede152018-11-16 17:04:14 +01001450 const wchar_t* flagsName = L"0";
1451 switch(flags)
1452 {
1453 case VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT:
1454 flagsName = L"FAST";
1455 break;
1456 case VMA_DEFRAGMENTATION_OPTIMAL_ALGORITHM_BIT:
1457 flagsName = L"OPTIMAL";
1458 break;
1459 }
1460
1461 wprintf(L"Test defragmentation GPU (%s)\n", flagsName);
Adam Sawicki05704002018-11-08 16:07:29 +01001462 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001463
1464 std::vector<AllocInfo> allocations;
1465
1466 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001467 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1468 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001469 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001470 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1471 const size_t percentToLeave = 30;
1472 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001473 RandomNumberGenerator rand = { 234522 };
1474
1475 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001476
1477 VmaAllocationCreateInfo allocCreateInfo = {};
1478 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001479 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001480
1481 // Create all intended buffers.
1482 for(size_t i = 0; i < bufCount; ++i)
1483 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001484 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1485
1486 if(rand.Generate() % 100 < percentNonMovable)
1487 {
1488 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1489 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1490 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1491 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1492 }
1493 else
1494 {
1495 // Different usage just to see different color in output from VmaDumpVis.
1496 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1497 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1498 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1499 // And in JSON dump.
1500 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1501 }
1502
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001503 AllocInfo alloc;
1504 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1505 alloc.m_StartValue = rand.Generate();
1506 allocations.push_back(alloc);
1507 }
1508
1509 // Destroy some percentage of them.
1510 {
1511 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1512 for(size_t i = 0; i < buffersToDestroy; ++i)
1513 {
1514 const size_t index = rand.Generate() % allocations.size();
1515 allocations[index].Destroy();
1516 allocations.erase(allocations.begin() + index);
1517 }
1518 }
1519
1520 // Fill them with meaningful data.
1521 UploadGpuData(allocations.data(), allocations.size());
1522
Adam Sawickic6ede152018-11-16 17:04:14 +01001523 wchar_t fileName[MAX_PATH];
1524 swprintf_s(fileName, L"GPU_defragmentation_%s_A_before.json", flagsName);
1525 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001526
1527 // Defragment using GPU only.
1528 {
1529 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001530
Adam Sawickic6ede152018-11-16 17:04:14 +01001531 std::vector<VmaAllocation> allocationPtrs;
1532 std::vector<VkBool32> allocationChanged;
1533 std::vector<size_t> allocationOriginalIndex;
1534
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001535 for(size_t i = 0; i < allocCount; ++i)
1536 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001537 VmaAllocationInfo allocInfo = {};
1538 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1539 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1540 {
1541 allocationPtrs.push_back(allocations[i].m_Allocation);
1542 allocationChanged.push_back(VK_FALSE);
1543 allocationOriginalIndex.push_back(i);
1544 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001545 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001546
1547 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001548
1549 BeginSingleTimeCommands();
1550
1551 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawickic6ede152018-11-16 17:04:14 +01001552 defragInfo.flags = flags;
1553 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001554 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001555 defragInfo.pAllocationsChanged = allocationChanged.data();
1556 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001557 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1558 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1559
1560 VmaDefragmentationStats stats = {};
1561 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1562 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1563 TEST(res >= VK_SUCCESS);
1564
1565 EndSingleTimeCommands();
1566
1567 vmaDefragmentationEnd(g_hAllocator, ctx);
1568
Adam Sawickic6ede152018-11-16 17:04:14 +01001569 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001570 {
1571 if(allocationChanged[i])
1572 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001573 const size_t origAllocIndex = allocationOriginalIndex[i];
1574 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001575 }
1576 }
1577
Adam Sawicki440307e2018-10-18 15:05:19 +02001578 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1579 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
1580 TEST(stats.allocationsLost == 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001581 }
1582
1583 ValidateGpuData(allocations.data(), allocations.size());
1584
Adam Sawickic6ede152018-11-16 17:04:14 +01001585 swprintf_s(fileName, L"GPU_defragmentation_%s_B_after.json", flagsName);
1586 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001587
1588 // Destroy all remaining buffers.
1589 for(size_t i = allocations.size(); i--; )
1590 {
1591 allocations[i].Destroy();
1592 }
Adam Sawicki05704002018-11-08 16:07:29 +01001593
1594 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001595}
1596
Adam Sawickib8333fb2018-03-13 16:15:53 +01001597static void TestUserData()
1598{
1599 VkResult res;
1600
1601 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1602 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1603 bufCreateInfo.size = 0x10000;
1604
1605 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1606 {
1607 // Opaque pointer
1608 {
1609
1610 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1611 void* pointerToSomething = &res;
1612
1613 VmaAllocationCreateInfo allocCreateInfo = {};
1614 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1615 allocCreateInfo.pUserData = numberAsPointer;
1616 if(testIndex == 1)
1617 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1618
1619 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1620 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001621 TEST(res == VK_SUCCESS);
1622 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001623
1624 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001625 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001626
1627 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1628 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001629 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001630
1631 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1632 }
1633
1634 // String
1635 {
1636 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1637 const char* name2 = "2";
1638 const size_t name1Len = strlen(name1);
1639
1640 char* name1Buf = new char[name1Len + 1];
1641 strcpy_s(name1Buf, name1Len + 1, name1);
1642
1643 VmaAllocationCreateInfo allocCreateInfo = {};
1644 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1645 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1646 allocCreateInfo.pUserData = name1Buf;
1647 if(testIndex == 1)
1648 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1649
1650 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1651 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001652 TEST(res == VK_SUCCESS);
1653 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1654 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001655
1656 delete[] name1Buf;
1657
1658 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001659 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001660
1661 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1662 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001663 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001664
1665 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1666 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001667 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001668
1669 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1670 }
1671 }
1672}
1673
Adam Sawicki370ab182018-11-08 16:31:00 +01001674static void TestInvalidAllocations()
1675{
1676 VkResult res;
1677
1678 VmaAllocationCreateInfo allocCreateInfo = {};
1679 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1680
1681 // Try to allocate 0 bytes.
1682 {
1683 VkMemoryRequirements memReq = {};
1684 memReq.size = 0; // !!!
1685 memReq.alignment = 4;
1686 memReq.memoryTypeBits = UINT32_MAX;
1687 VmaAllocation alloc = VK_NULL_HANDLE;
1688 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1689 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1690 }
1691
1692 // Try to create buffer with size = 0.
1693 {
1694 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1695 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1696 bufCreateInfo.size = 0; // !!!
1697 VkBuffer buf = VK_NULL_HANDLE;
1698 VmaAllocation alloc = VK_NULL_HANDLE;
1699 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1700 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1701 }
1702
1703 // Try to create image with one dimension = 0.
1704 {
1705 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1706 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1707 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1708 imageCreateInfo.extent.width = 128;
1709 imageCreateInfo.extent.height = 0; // !!!
1710 imageCreateInfo.extent.depth = 1;
1711 imageCreateInfo.mipLevels = 1;
1712 imageCreateInfo.arrayLayers = 1;
1713 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1714 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1715 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1716 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1717 VkImage image = VK_NULL_HANDLE;
1718 VmaAllocation alloc = VK_NULL_HANDLE;
1719 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1720 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1721 }
1722}
1723
Adam Sawickib8333fb2018-03-13 16:15:53 +01001724static void TestMemoryRequirements()
1725{
1726 VkResult res;
1727 VkBuffer buf;
1728 VmaAllocation alloc;
1729 VmaAllocationInfo allocInfo;
1730
1731 const VkPhysicalDeviceMemoryProperties* memProps;
1732 vmaGetMemoryProperties(g_hAllocator, &memProps);
1733
1734 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1735 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1736 bufInfo.size = 128;
1737
1738 VmaAllocationCreateInfo allocCreateInfo = {};
1739
1740 // No requirements.
1741 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001742 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001743 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1744
1745 // Usage.
1746 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1747 allocCreateInfo.requiredFlags = 0;
1748 allocCreateInfo.preferredFlags = 0;
1749 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1750
1751 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001752 TEST(res == VK_SUCCESS);
1753 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001754 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1755
1756 // Required flags, preferred flags.
1757 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1758 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1759 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1760 allocCreateInfo.memoryTypeBits = 0;
1761
1762 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001763 TEST(res == VK_SUCCESS);
1764 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1765 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001766 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1767
1768 // memoryTypeBits.
1769 const uint32_t memType = allocInfo.memoryType;
1770 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1771 allocCreateInfo.requiredFlags = 0;
1772 allocCreateInfo.preferredFlags = 0;
1773 allocCreateInfo.memoryTypeBits = 1u << memType;
1774
1775 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001776 TEST(res == VK_SUCCESS);
1777 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001778 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1779
1780}
1781
1782static void TestBasics()
1783{
1784 VkResult res;
1785
1786 TestMemoryRequirements();
1787
1788 // Lost allocation
1789 {
1790 VmaAllocation alloc = VK_NULL_HANDLE;
1791 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001792 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001793
1794 VmaAllocationInfo allocInfo;
1795 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001796 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1797 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001798
1799 vmaFreeMemory(g_hAllocator, alloc);
1800 }
1801
1802 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1803 {
1804 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1805 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1806 bufCreateInfo.size = 128;
1807
1808 VmaAllocationCreateInfo allocCreateInfo = {};
1809 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1810 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1811
1812 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1813 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001814 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001815
1816 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1817
1818 // Same with OWN_MEMORY.
1819 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1820
1821 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001822 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001823
1824 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1825 }
1826
1827 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001828
1829 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001830}
1831
1832void TestHeapSizeLimit()
1833{
1834 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1835 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1836
1837 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1838 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1839 {
1840 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1841 }
1842
1843 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1844 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1845 allocatorCreateInfo.device = g_hDevice;
1846 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1847
1848 VmaAllocator hAllocator;
1849 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001850 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001851
1852 struct Item
1853 {
1854 VkBuffer hBuf;
1855 VmaAllocation hAlloc;
1856 };
1857 std::vector<Item> items;
1858
1859 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1860 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1861
1862 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1863 VmaAllocationInfo ownAllocInfo;
1864 {
1865 VmaAllocationCreateInfo allocCreateInfo = {};
1866 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1867 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1868
1869 bufCreateInfo.size = BLOCK_SIZE / 2;
1870
1871 for(size_t i = 0; i < 2; ++i)
1872 {
1873 Item item;
1874 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001875 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001876 items.push_back(item);
1877 }
1878 }
1879
1880 // Create pool to make sure allocations must be out of this memory type.
1881 VmaPoolCreateInfo poolCreateInfo = {};
1882 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1883 poolCreateInfo.blockSize = BLOCK_SIZE;
1884
1885 VmaPool hPool;
1886 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001887 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001888
1889 // 2. Allocate normal buffers from all the remaining memory.
1890 {
1891 VmaAllocationCreateInfo allocCreateInfo = {};
1892 allocCreateInfo.pool = hPool;
1893
1894 bufCreateInfo.size = BLOCK_SIZE / 2;
1895
1896 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1897 for(size_t i = 0; i < bufCount; ++i)
1898 {
1899 Item item;
1900 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001901 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001902 items.push_back(item);
1903 }
1904 }
1905
1906 // 3. Allocation of one more (even small) buffer should fail.
1907 {
1908 VmaAllocationCreateInfo allocCreateInfo = {};
1909 allocCreateInfo.pool = hPool;
1910
1911 bufCreateInfo.size = 128;
1912
1913 VkBuffer hBuf;
1914 VmaAllocation hAlloc;
1915 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001916 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001917 }
1918
1919 // Destroy everything.
1920 for(size_t i = items.size(); i--; )
1921 {
1922 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
1923 }
1924
1925 vmaDestroyPool(hAllocator, hPool);
1926
1927 vmaDestroyAllocator(hAllocator);
1928}
1929
Adam Sawicki212a4a62018-06-14 15:44:45 +02001930#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02001931static void TestDebugMargin()
1932{
1933 if(VMA_DEBUG_MARGIN == 0)
1934 {
1935 return;
1936 }
1937
1938 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02001939 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02001940
1941 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02001942 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02001943
1944 // Create few buffers of different size.
1945 const size_t BUF_COUNT = 10;
1946 BufferInfo buffers[BUF_COUNT];
1947 VmaAllocationInfo allocInfo[BUF_COUNT];
1948 for(size_t i = 0; i < 10; ++i)
1949 {
1950 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02001951 // Last one will be mapped.
1952 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02001953
1954 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001955 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02001956 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001957 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001958
1959 if(i == BUF_COUNT - 1)
1960 {
1961 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001962 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001963 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
1964 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
1965 }
Adam Sawicki73b16652018-06-11 16:39:25 +02001966 }
1967
1968 // Check if their offsets preserve margin between them.
1969 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
1970 {
1971 if(lhs.deviceMemory != rhs.deviceMemory)
1972 {
1973 return lhs.deviceMemory < rhs.deviceMemory;
1974 }
1975 return lhs.offset < rhs.offset;
1976 });
1977 for(size_t i = 1; i < BUF_COUNT; ++i)
1978 {
1979 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
1980 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001981 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02001982 }
1983 }
1984
Adam Sawicki212a4a62018-06-14 15:44:45 +02001985 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001986 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001987
Adam Sawicki73b16652018-06-11 16:39:25 +02001988 // Destroy all buffers.
1989 for(size_t i = BUF_COUNT; i--; )
1990 {
1991 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
1992 }
1993}
Adam Sawicki212a4a62018-06-14 15:44:45 +02001994#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02001995
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001996static void TestLinearAllocator()
1997{
1998 wprintf(L"Test linear allocator\n");
1999
2000 RandomNumberGenerator rand{645332};
2001
2002 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2003 sampleBufCreateInfo.size = 1024; // Whatever.
2004 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2005
2006 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2007 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2008
2009 VmaPoolCreateInfo poolCreateInfo = {};
2010 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002011 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002012
Adam Sawickiee082772018-06-20 17:45:49 +02002013 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002014 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2015 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2016
2017 VmaPool pool = nullptr;
2018 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002019 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002020
2021 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2022
2023 VmaAllocationCreateInfo allocCreateInfo = {};
2024 allocCreateInfo.pool = pool;
2025
2026 constexpr size_t maxBufCount = 100;
2027 std::vector<BufferInfo> bufInfo;
2028
2029 constexpr VkDeviceSize bufSizeMin = 16;
2030 constexpr VkDeviceSize bufSizeMax = 1024;
2031 VmaAllocationInfo allocInfo;
2032 VkDeviceSize prevOffset = 0;
2033
2034 // Test one-time free.
2035 for(size_t i = 0; i < 2; ++i)
2036 {
2037 // Allocate number of buffers of varying size that surely fit into this block.
2038 VkDeviceSize bufSumSize = 0;
2039 for(size_t i = 0; i < maxBufCount; ++i)
2040 {
2041 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2042 BufferInfo newBufInfo;
2043 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2044 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002045 TEST(res == VK_SUCCESS);
2046 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002047 bufInfo.push_back(newBufInfo);
2048 prevOffset = allocInfo.offset;
2049 bufSumSize += bufCreateInfo.size;
2050 }
2051
2052 // Validate pool stats.
2053 VmaPoolStats stats;
2054 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002055 TEST(stats.size == poolCreateInfo.blockSize);
2056 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2057 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002058
2059 // Destroy the buffers in random order.
2060 while(!bufInfo.empty())
2061 {
2062 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2063 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2064 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2065 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2066 }
2067 }
2068
2069 // Test stack.
2070 {
2071 // Allocate number of buffers of varying size that surely fit into this block.
2072 for(size_t i = 0; i < maxBufCount; ++i)
2073 {
2074 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2075 BufferInfo newBufInfo;
2076 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2077 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002078 TEST(res == VK_SUCCESS);
2079 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002080 bufInfo.push_back(newBufInfo);
2081 prevOffset = allocInfo.offset;
2082 }
2083
2084 // Destroy few buffers from top of the stack.
2085 for(size_t i = 0; i < maxBufCount / 5; ++i)
2086 {
2087 const BufferInfo& currBufInfo = bufInfo.back();
2088 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2089 bufInfo.pop_back();
2090 }
2091
2092 // Create some more
2093 for(size_t i = 0; i < maxBufCount / 5; ++i)
2094 {
2095 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2096 BufferInfo newBufInfo;
2097 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2098 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002099 TEST(res == VK_SUCCESS);
2100 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002101 bufInfo.push_back(newBufInfo);
2102 prevOffset = allocInfo.offset;
2103 }
2104
2105 // Destroy the buffers in reverse order.
2106 while(!bufInfo.empty())
2107 {
2108 const BufferInfo& currBufInfo = bufInfo.back();
2109 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2110 bufInfo.pop_back();
2111 }
2112 }
2113
Adam Sawickiee082772018-06-20 17:45:49 +02002114 // Test ring buffer.
2115 {
2116 // Allocate number of buffers that surely fit into this block.
2117 bufCreateInfo.size = bufSizeMax;
2118 for(size_t i = 0; i < maxBufCount; ++i)
2119 {
2120 BufferInfo newBufInfo;
2121 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2122 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002123 TEST(res == VK_SUCCESS);
2124 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002125 bufInfo.push_back(newBufInfo);
2126 prevOffset = allocInfo.offset;
2127 }
2128
2129 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2130 const size_t buffersPerIter = maxBufCount / 10 - 1;
2131 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2132 for(size_t iter = 0; iter < iterCount; ++iter)
2133 {
2134 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2135 {
2136 const BufferInfo& currBufInfo = bufInfo.front();
2137 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2138 bufInfo.erase(bufInfo.begin());
2139 }
2140 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2141 {
2142 BufferInfo newBufInfo;
2143 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2144 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002145 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002146 bufInfo.push_back(newBufInfo);
2147 }
2148 }
2149
2150 // Allocate buffers until we reach out-of-memory.
2151 uint32_t debugIndex = 0;
2152 while(res == VK_SUCCESS)
2153 {
2154 BufferInfo newBufInfo;
2155 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2156 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2157 if(res == VK_SUCCESS)
2158 {
2159 bufInfo.push_back(newBufInfo);
2160 }
2161 else
2162 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002163 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002164 }
2165 ++debugIndex;
2166 }
2167
2168 // Destroy the buffers in random order.
2169 while(!bufInfo.empty())
2170 {
2171 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2172 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2173 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2174 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2175 }
2176 }
2177
Adam Sawicki680b2252018-08-22 14:47:32 +02002178 // Test double stack.
2179 {
2180 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2181 VkDeviceSize prevOffsetLower = 0;
2182 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2183 for(size_t i = 0; i < maxBufCount; ++i)
2184 {
2185 const bool upperAddress = (i % 2) != 0;
2186 if(upperAddress)
2187 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2188 else
2189 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2190 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2191 BufferInfo newBufInfo;
2192 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2193 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002194 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002195 if(upperAddress)
2196 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002197 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002198 prevOffsetUpper = allocInfo.offset;
2199 }
2200 else
2201 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002202 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002203 prevOffsetLower = allocInfo.offset;
2204 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002205 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002206 bufInfo.push_back(newBufInfo);
2207 }
2208
2209 // Destroy few buffers from top of the stack.
2210 for(size_t i = 0; i < maxBufCount / 5; ++i)
2211 {
2212 const BufferInfo& currBufInfo = bufInfo.back();
2213 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2214 bufInfo.pop_back();
2215 }
2216
2217 // Create some more
2218 for(size_t i = 0; i < maxBufCount / 5; ++i)
2219 {
2220 const bool upperAddress = (i % 2) != 0;
2221 if(upperAddress)
2222 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2223 else
2224 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2225 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2226 BufferInfo newBufInfo;
2227 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2228 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002229 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002230 bufInfo.push_back(newBufInfo);
2231 }
2232
2233 // Destroy the buffers in reverse order.
2234 while(!bufInfo.empty())
2235 {
2236 const BufferInfo& currBufInfo = bufInfo.back();
2237 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2238 bufInfo.pop_back();
2239 }
2240
2241 // Create buffers on both sides until we reach out of memory.
2242 prevOffsetLower = 0;
2243 prevOffsetUpper = poolCreateInfo.blockSize;
2244 res = VK_SUCCESS;
2245 for(size_t i = 0; res == VK_SUCCESS; ++i)
2246 {
2247 const bool upperAddress = (i % 2) != 0;
2248 if(upperAddress)
2249 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2250 else
2251 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2252 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2253 BufferInfo newBufInfo;
2254 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2255 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2256 if(res == VK_SUCCESS)
2257 {
2258 if(upperAddress)
2259 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002260 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002261 prevOffsetUpper = allocInfo.offset;
2262 }
2263 else
2264 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002265 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002266 prevOffsetLower = allocInfo.offset;
2267 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002268 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002269 bufInfo.push_back(newBufInfo);
2270 }
2271 }
2272
2273 // Destroy the buffers in random order.
2274 while(!bufInfo.empty())
2275 {
2276 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2277 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2278 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2279 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2280 }
2281
2282 // Create buffers on upper side only, constant size, until we reach out of memory.
2283 prevOffsetUpper = poolCreateInfo.blockSize;
2284 res = VK_SUCCESS;
2285 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2286 bufCreateInfo.size = bufSizeMax;
2287 for(size_t i = 0; res == VK_SUCCESS; ++i)
2288 {
2289 BufferInfo newBufInfo;
2290 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2291 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2292 if(res == VK_SUCCESS)
2293 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002294 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002295 prevOffsetUpper = allocInfo.offset;
2296 bufInfo.push_back(newBufInfo);
2297 }
2298 }
2299
2300 // Destroy the buffers in reverse order.
2301 while(!bufInfo.empty())
2302 {
2303 const BufferInfo& currBufInfo = bufInfo.back();
2304 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2305 bufInfo.pop_back();
2306 }
2307 }
2308
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002309 // Test ring buffer with lost allocations.
2310 {
2311 // Allocate number of buffers until pool is full.
2312 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2313 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2314 res = VK_SUCCESS;
2315 for(size_t i = 0; res == VK_SUCCESS; ++i)
2316 {
2317 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2318
2319 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2320
2321 BufferInfo newBufInfo;
2322 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2323 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2324 if(res == VK_SUCCESS)
2325 bufInfo.push_back(newBufInfo);
2326 }
2327
2328 // Free first half of it.
2329 {
2330 const size_t buffersToDelete = bufInfo.size() / 2;
2331 for(size_t i = 0; i < buffersToDelete; ++i)
2332 {
2333 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2334 }
2335 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2336 }
2337
2338 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002339 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002340 res = VK_SUCCESS;
2341 for(size_t i = 0; res == VK_SUCCESS; ++i)
2342 {
2343 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2344
2345 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2346
2347 BufferInfo newBufInfo;
2348 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2349 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2350 if(res == VK_SUCCESS)
2351 bufInfo.push_back(newBufInfo);
2352 }
2353
2354 VkDeviceSize firstNewOffset;
2355 {
2356 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2357
2358 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2359 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2360 bufCreateInfo.size = bufSizeMax;
2361
2362 BufferInfo newBufInfo;
2363 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2364 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002365 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002366 bufInfo.push_back(newBufInfo);
2367 firstNewOffset = allocInfo.offset;
2368
2369 // Make sure at least one buffer from the beginning became lost.
2370 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002371 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002372 }
2373
2374 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2375 size_t newCount = 1;
2376 for(;;)
2377 {
2378 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2379
2380 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2381
2382 BufferInfo newBufInfo;
2383 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2384 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002385 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002386 bufInfo.push_back(newBufInfo);
2387 ++newCount;
2388 if(allocInfo.offset < firstNewOffset)
2389 break;
2390 }
2391
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002392 // Delete buffers that are lost.
2393 for(size_t i = bufInfo.size(); i--; )
2394 {
2395 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2396 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2397 {
2398 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2399 bufInfo.erase(bufInfo.begin() + i);
2400 }
2401 }
2402
2403 // Test vmaMakePoolAllocationsLost
2404 {
2405 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2406
2407 size_t lostAllocCount = SIZE_MAX;
2408 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002409 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002410
2411 size_t realLostAllocCount = 0;
2412 for(size_t i = 0; i < bufInfo.size(); ++i)
2413 {
2414 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2415 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2416 ++realLostAllocCount;
2417 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002418 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002419 }
2420
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002421 // Destroy all the buffers in forward order.
2422 for(size_t i = 0; i < bufInfo.size(); ++i)
2423 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2424 bufInfo.clear();
2425 }
2426
Adam Sawicki70a683e2018-08-24 15:36:32 +02002427 vmaDestroyPool(g_hAllocator, pool);
2428}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002429
Adam Sawicki70a683e2018-08-24 15:36:32 +02002430static void TestLinearAllocatorMultiBlock()
2431{
2432 wprintf(L"Test linear allocator multi block\n");
2433
2434 RandomNumberGenerator rand{345673};
2435
2436 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2437 sampleBufCreateInfo.size = 1024 * 1024;
2438 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2439
2440 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2441 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2442
2443 VmaPoolCreateInfo poolCreateInfo = {};
2444 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2445 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002446 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002447
2448 VmaPool pool = nullptr;
2449 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002450 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002451
2452 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2453
2454 VmaAllocationCreateInfo allocCreateInfo = {};
2455 allocCreateInfo.pool = pool;
2456
2457 std::vector<BufferInfo> bufInfo;
2458 VmaAllocationInfo allocInfo;
2459
2460 // Test one-time free.
2461 {
2462 // Allocate buffers until we move to a second block.
2463 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2464 for(uint32_t i = 0; ; ++i)
2465 {
2466 BufferInfo newBufInfo;
2467 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2468 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002469 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002470 bufInfo.push_back(newBufInfo);
2471 if(lastMem && allocInfo.deviceMemory != lastMem)
2472 {
2473 break;
2474 }
2475 lastMem = allocInfo.deviceMemory;
2476 }
2477
Adam Sawickib8d34d52018-10-03 17:41:20 +02002478 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002479
2480 // Make sure that pool has now two blocks.
2481 VmaPoolStats poolStats = {};
2482 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002483 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002484
2485 // Destroy all the buffers in random order.
2486 while(!bufInfo.empty())
2487 {
2488 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2489 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2490 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2491 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2492 }
2493
2494 // Make sure that pool has now at most one block.
2495 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002496 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002497 }
2498
2499 // Test stack.
2500 {
2501 // Allocate buffers until we move to a second block.
2502 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2503 for(uint32_t i = 0; ; ++i)
2504 {
2505 BufferInfo newBufInfo;
2506 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2507 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002508 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002509 bufInfo.push_back(newBufInfo);
2510 if(lastMem && allocInfo.deviceMemory != lastMem)
2511 {
2512 break;
2513 }
2514 lastMem = allocInfo.deviceMemory;
2515 }
2516
Adam Sawickib8d34d52018-10-03 17:41:20 +02002517 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002518
2519 // Add few more buffers.
2520 for(uint32_t i = 0; i < 5; ++i)
2521 {
2522 BufferInfo newBufInfo;
2523 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2524 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002525 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002526 bufInfo.push_back(newBufInfo);
2527 }
2528
2529 // Make sure that pool has now two blocks.
2530 VmaPoolStats poolStats = {};
2531 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002532 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002533
2534 // Delete half of buffers, LIFO.
2535 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2536 {
2537 const BufferInfo& currBufInfo = bufInfo.back();
2538 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2539 bufInfo.pop_back();
2540 }
2541
2542 // Add one more buffer.
2543 BufferInfo newBufInfo;
2544 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2545 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002546 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002547 bufInfo.push_back(newBufInfo);
2548
2549 // Make sure that pool has now one block.
2550 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002551 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002552
2553 // Delete all the remaining buffers, LIFO.
2554 while(!bufInfo.empty())
2555 {
2556 const BufferInfo& currBufInfo = bufInfo.back();
2557 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2558 bufInfo.pop_back();
2559 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002560 }
2561
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002562 vmaDestroyPool(g_hAllocator, pool);
2563}
2564
Adam Sawickifd11d752018-08-22 15:02:10 +02002565static void ManuallyTestLinearAllocator()
2566{
2567 VmaStats origStats;
2568 vmaCalculateStats(g_hAllocator, &origStats);
2569
2570 wprintf(L"Manually test linear allocator\n");
2571
2572 RandomNumberGenerator rand{645332};
2573
2574 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2575 sampleBufCreateInfo.size = 1024; // Whatever.
2576 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2577
2578 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2579 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2580
2581 VmaPoolCreateInfo poolCreateInfo = {};
2582 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002583 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002584
2585 poolCreateInfo.blockSize = 10 * 1024;
2586 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2587 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2588
2589 VmaPool pool = nullptr;
2590 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002591 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002592
2593 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2594
2595 VmaAllocationCreateInfo allocCreateInfo = {};
2596 allocCreateInfo.pool = pool;
2597
2598 std::vector<BufferInfo> bufInfo;
2599 VmaAllocationInfo allocInfo;
2600 BufferInfo newBufInfo;
2601
2602 // Test double stack.
2603 {
2604 /*
2605 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2606 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2607
2608 Totally:
2609 1 block allocated
2610 10240 Vulkan bytes
2611 6 new allocations
2612 2256 bytes in allocations
2613 */
2614
2615 bufCreateInfo.size = 32;
2616 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2617 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002618 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002619 bufInfo.push_back(newBufInfo);
2620
2621 bufCreateInfo.size = 1024;
2622 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2623 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002624 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002625 bufInfo.push_back(newBufInfo);
2626
2627 bufCreateInfo.size = 32;
2628 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2629 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002630 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002631 bufInfo.push_back(newBufInfo);
2632
2633 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2634
2635 bufCreateInfo.size = 128;
2636 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2637 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002638 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002639 bufInfo.push_back(newBufInfo);
2640
2641 bufCreateInfo.size = 1024;
2642 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2643 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002644 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002645 bufInfo.push_back(newBufInfo);
2646
2647 bufCreateInfo.size = 16;
2648 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2649 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002650 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002651 bufInfo.push_back(newBufInfo);
2652
2653 VmaStats currStats;
2654 vmaCalculateStats(g_hAllocator, &currStats);
2655 VmaPoolStats poolStats;
2656 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2657
2658 char* statsStr = nullptr;
2659 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2660
2661 // PUT BREAKPOINT HERE TO CHECK.
2662 // Inspect: currStats versus origStats, poolStats, statsStr.
2663 int I = 0;
2664
2665 vmaFreeStatsString(g_hAllocator, statsStr);
2666
2667 // Destroy the buffers in reverse order.
2668 while(!bufInfo.empty())
2669 {
2670 const BufferInfo& currBufInfo = bufInfo.back();
2671 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2672 bufInfo.pop_back();
2673 }
2674 }
2675
2676 vmaDestroyPool(g_hAllocator, pool);
2677}
2678
Adam Sawicki80927152018-09-07 17:27:23 +02002679static void BenchmarkAlgorithmsCase(FILE* file,
2680 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002681 bool empty,
2682 VmaAllocationCreateFlags allocStrategy,
2683 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002684{
2685 RandomNumberGenerator rand{16223};
2686
2687 const VkDeviceSize bufSizeMin = 32;
2688 const VkDeviceSize bufSizeMax = 1024;
2689 const size_t maxBufCapacity = 10000;
2690 const uint32_t iterationCount = 10;
2691
2692 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2693 sampleBufCreateInfo.size = bufSizeMax;
2694 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2695
2696 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2697 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2698
2699 VmaPoolCreateInfo poolCreateInfo = {};
2700 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002701 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002702
2703 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002704 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002705 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2706
2707 VmaPool pool = nullptr;
2708 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002709 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002710
2711 // Buffer created just to get memory requirements. Never bound to any memory.
2712 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2713 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002714 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002715
2716 VkMemoryRequirements memReq = {};
2717 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2718
2719 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2720
2721 VmaAllocationCreateInfo allocCreateInfo = {};
2722 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002723 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002724
2725 VmaAllocation alloc;
2726 std::vector<VmaAllocation> baseAllocations;
2727
2728 if(!empty)
2729 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002730 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002731 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002732 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002733 {
2734 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2735 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002736 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002737 baseAllocations.push_back(alloc);
2738 totalSize += memReq.size;
2739 }
2740
2741 // Delete half of them, choose randomly.
2742 size_t allocsToDelete = baseAllocations.size() / 2;
2743 for(size_t i = 0; i < allocsToDelete; ++i)
2744 {
2745 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2746 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2747 baseAllocations.erase(baseAllocations.begin() + index);
2748 }
2749 }
2750
2751 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002752 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002753 std::vector<VmaAllocation> testAllocations;
2754 testAllocations.reserve(allocCount);
2755 duration allocTotalDuration = duration::zero();
2756 duration freeTotalDuration = duration::zero();
2757 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2758 {
2759 // Allocations
2760 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2761 for(size_t i = 0; i < allocCount; ++i)
2762 {
2763 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2764 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002765 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002766 testAllocations.push_back(alloc);
2767 }
2768 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2769
2770 // Deallocations
2771 switch(freeOrder)
2772 {
2773 case FREE_ORDER::FORWARD:
2774 // Leave testAllocations unchanged.
2775 break;
2776 case FREE_ORDER::BACKWARD:
2777 std::reverse(testAllocations.begin(), testAllocations.end());
2778 break;
2779 case FREE_ORDER::RANDOM:
2780 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2781 break;
2782 default: assert(0);
2783 }
2784
2785 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2786 for(size_t i = 0; i < allocCount; ++i)
2787 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2788 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2789
2790 testAllocations.clear();
2791 }
2792
2793 // Delete baseAllocations
2794 while(!baseAllocations.empty())
2795 {
2796 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2797 baseAllocations.pop_back();
2798 }
2799
2800 vmaDestroyPool(g_hAllocator, pool);
2801
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002802 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2803 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2804
Adam Sawicki80927152018-09-07 17:27:23 +02002805 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2806 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002807 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002808 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002809 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002810 allocTotalSeconds,
2811 freeTotalSeconds);
2812
2813 if(file)
2814 {
2815 std::string currTime;
2816 CurrentTimeToStr(currTime);
2817
Adam Sawicki80927152018-09-07 17:27:23 +02002818 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002819 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002820 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002821 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002822 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002823 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2824 allocTotalSeconds,
2825 freeTotalSeconds);
2826 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002827}
2828
Adam Sawicki80927152018-09-07 17:27:23 +02002829static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002830{
Adam Sawicki80927152018-09-07 17:27:23 +02002831 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002832
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002833 if(file)
2834 {
2835 fprintf(file,
2836 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002837 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002838 "Allocation time (s),Deallocation time (s)\n");
2839 }
2840
Adam Sawicki0a607132018-08-24 11:18:41 +02002841 uint32_t freeOrderCount = 1;
2842 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2843 freeOrderCount = 3;
2844 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2845 freeOrderCount = 2;
2846
2847 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002848 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002849
2850 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2851 {
2852 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2853 switch(freeOrderIndex)
2854 {
2855 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2856 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2857 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2858 default: assert(0);
2859 }
2860
2861 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2862 {
Adam Sawicki80927152018-09-07 17:27:23 +02002863 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002864 {
Adam Sawicki80927152018-09-07 17:27:23 +02002865 uint32_t algorithm = 0;
2866 switch(algorithmIndex)
2867 {
2868 case 0:
2869 break;
2870 case 1:
2871 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2872 break;
2873 case 2:
2874 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2875 break;
2876 default:
2877 assert(0);
2878 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002879
Adam Sawicki80927152018-09-07 17:27:23 +02002880 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002881 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2882 {
2883 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002884 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002885 {
2886 switch(allocStrategyIndex)
2887 {
2888 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2889 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2890 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2891 default: assert(0);
2892 }
2893 }
2894
Adam Sawicki80927152018-09-07 17:27:23 +02002895 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002896 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002897 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002898 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002899 strategy,
2900 freeOrder); // freeOrder
2901 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002902 }
2903 }
2904 }
2905}
2906
Adam Sawickib8333fb2018-03-13 16:15:53 +01002907static void TestPool_SameSize()
2908{
2909 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2910 const size_t BUF_COUNT = 100;
2911 VkResult res;
2912
2913 RandomNumberGenerator rand{123};
2914
2915 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2916 bufferInfo.size = BUF_SIZE;
2917 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2918
2919 uint32_t memoryTypeBits = UINT32_MAX;
2920 {
2921 VkBuffer dummyBuffer;
2922 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002923 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002924
2925 VkMemoryRequirements memReq;
2926 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2927 memoryTypeBits = memReq.memoryTypeBits;
2928
2929 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2930 }
2931
2932 VmaAllocationCreateInfo poolAllocInfo = {};
2933 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2934 uint32_t memTypeIndex;
2935 res = vmaFindMemoryTypeIndex(
2936 g_hAllocator,
2937 memoryTypeBits,
2938 &poolAllocInfo,
2939 &memTypeIndex);
2940
2941 VmaPoolCreateInfo poolCreateInfo = {};
2942 poolCreateInfo.memoryTypeIndex = memTypeIndex;
2943 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
2944 poolCreateInfo.minBlockCount = 1;
2945 poolCreateInfo.maxBlockCount = 4;
2946 poolCreateInfo.frameInUseCount = 0;
2947
2948 VmaPool pool;
2949 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002950 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002951
2952 vmaSetCurrentFrameIndex(g_hAllocator, 1);
2953
2954 VmaAllocationCreateInfo allocInfo = {};
2955 allocInfo.pool = pool;
2956 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
2957 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2958
2959 struct BufItem
2960 {
2961 VkBuffer Buf;
2962 VmaAllocation Alloc;
2963 };
2964 std::vector<BufItem> items;
2965
2966 // Fill entire pool.
2967 for(size_t i = 0; i < BUF_COUNT; ++i)
2968 {
2969 BufItem item;
2970 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002971 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002972 items.push_back(item);
2973 }
2974
2975 // Make sure that another allocation would fail.
2976 {
2977 BufItem item;
2978 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002979 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002980 }
2981
2982 // Validate that no buffer is lost. Also check that they are not mapped.
2983 for(size_t i = 0; i < items.size(); ++i)
2984 {
2985 VmaAllocationInfo allocInfo;
2986 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002987 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
2988 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002989 }
2990
2991 // Free some percent of random items.
2992 {
2993 const size_t PERCENT_TO_FREE = 10;
2994 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
2995 for(size_t i = 0; i < itemsToFree; ++i)
2996 {
2997 size_t index = (size_t)rand.Generate() % items.size();
2998 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2999 items.erase(items.begin() + index);
3000 }
3001 }
3002
3003 // Randomly allocate and free items.
3004 {
3005 const size_t OPERATION_COUNT = BUF_COUNT;
3006 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3007 {
3008 bool allocate = rand.Generate() % 2 != 0;
3009 if(allocate)
3010 {
3011 if(items.size() < BUF_COUNT)
3012 {
3013 BufItem item;
3014 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003015 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003016 items.push_back(item);
3017 }
3018 }
3019 else // Free
3020 {
3021 if(!items.empty())
3022 {
3023 size_t index = (size_t)rand.Generate() % items.size();
3024 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3025 items.erase(items.begin() + index);
3026 }
3027 }
3028 }
3029 }
3030
3031 // Allocate up to maximum.
3032 while(items.size() < BUF_COUNT)
3033 {
3034 BufItem item;
3035 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003036 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003037 items.push_back(item);
3038 }
3039
3040 // Validate that no buffer is lost.
3041 for(size_t i = 0; i < items.size(); ++i)
3042 {
3043 VmaAllocationInfo allocInfo;
3044 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003045 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003046 }
3047
3048 // Next frame.
3049 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3050
3051 // Allocate another BUF_COUNT buffers.
3052 for(size_t i = 0; i < BUF_COUNT; ++i)
3053 {
3054 BufItem item;
3055 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003057 items.push_back(item);
3058 }
3059
3060 // Make sure the first BUF_COUNT is lost. Delete them.
3061 for(size_t i = 0; i < BUF_COUNT; ++i)
3062 {
3063 VmaAllocationInfo allocInfo;
3064 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003065 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003066 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3067 }
3068 items.erase(items.begin(), items.begin() + BUF_COUNT);
3069
3070 // Validate that no buffer is lost.
3071 for(size_t i = 0; i < items.size(); ++i)
3072 {
3073 VmaAllocationInfo allocInfo;
3074 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003075 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003076 }
3077
3078 // Free one item.
3079 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3080 items.pop_back();
3081
3082 // Validate statistics.
3083 {
3084 VmaPoolStats poolStats = {};
3085 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003086 TEST(poolStats.allocationCount == items.size());
3087 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3088 TEST(poolStats.unusedRangeCount == 1);
3089 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3090 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003091 }
3092
3093 // Free all remaining items.
3094 for(size_t i = items.size(); i--; )
3095 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3096 items.clear();
3097
3098 // Allocate maximum items again.
3099 for(size_t i = 0; i < BUF_COUNT; ++i)
3100 {
3101 BufItem item;
3102 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003103 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003104 items.push_back(item);
3105 }
3106
3107 // Delete every other item.
3108 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3109 {
3110 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3111 items.erase(items.begin() + i);
3112 }
3113
3114 // Defragment!
3115 {
3116 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3117 for(size_t i = 0; i < items.size(); ++i)
3118 allocationsToDefragment[i] = items[i].Alloc;
3119
3120 VmaDefragmentationStats defragmentationStats;
3121 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003122 TEST(res == VK_SUCCESS);
3123 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003124 }
3125
3126 // Free all remaining items.
3127 for(size_t i = items.size(); i--; )
3128 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3129 items.clear();
3130
3131 ////////////////////////////////////////////////////////////////////////////////
3132 // Test for vmaMakePoolAllocationsLost
3133
3134 // Allocate 4 buffers on frame 10.
3135 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3136 for(size_t i = 0; i < 4; ++i)
3137 {
3138 BufItem item;
3139 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003140 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003141 items.push_back(item);
3142 }
3143
3144 // Touch first 2 of them on frame 11.
3145 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3146 for(size_t i = 0; i < 2; ++i)
3147 {
3148 VmaAllocationInfo allocInfo;
3149 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3150 }
3151
3152 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3153 size_t lostCount = 0xDEADC0DE;
3154 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003155 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003156
3157 // Make another call. Now 0 should be lost.
3158 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003159 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003160
3161 // Make another call, with null count. Should not crash.
3162 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3163
3164 // END: Free all remaining items.
3165 for(size_t i = items.size(); i--; )
3166 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3167
3168 items.clear();
3169
Adam Sawickid2924172018-06-11 12:48:46 +02003170 ////////////////////////////////////////////////////////////////////////////////
3171 // Test for allocation too large for pool
3172
3173 {
3174 VmaAllocationCreateInfo allocCreateInfo = {};
3175 allocCreateInfo.pool = pool;
3176
3177 VkMemoryRequirements memReq;
3178 memReq.memoryTypeBits = UINT32_MAX;
3179 memReq.alignment = 1;
3180 memReq.size = poolCreateInfo.blockSize + 4;
3181
3182 VmaAllocation alloc = nullptr;
3183 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003184 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003185 }
3186
Adam Sawickib8333fb2018-03-13 16:15:53 +01003187 vmaDestroyPool(g_hAllocator, pool);
3188}
3189
Adam Sawickib0c36362018-11-13 16:17:38 +01003190static void TestResize()
3191{
3192 wprintf(L"Testing vmaResizeAllocation...\n");
3193
3194 const VkDeviceSize KILOBYTE = 1024ull;
3195 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3196
3197 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3198 bufCreateInfo.size = 2 * MEGABYTE;
3199 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3200
3201 VmaAllocationCreateInfo allocCreateInfo = {};
3202 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3203
3204 uint32_t memTypeIndex = UINT32_MAX;
3205 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3206
3207 VmaPoolCreateInfo poolCreateInfo = {};
3208 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3209 poolCreateInfo.blockSize = 8 * MEGABYTE;
3210 poolCreateInfo.minBlockCount = 1;
3211 poolCreateInfo.maxBlockCount = 1;
3212 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3213
3214 VmaPool pool;
3215 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3216
3217 allocCreateInfo.pool = pool;
3218
3219 // Fill 8 MB pool with 4 * 2 MB allocations.
3220 VmaAllocation allocs[4] = {};
3221
3222 VkMemoryRequirements memReq = {};
3223 memReq.memoryTypeBits = UINT32_MAX;
3224 memReq.alignment = 4;
3225 memReq.size = bufCreateInfo.size;
3226
3227 VmaAllocationInfo allocInfo = {};
3228
3229 for(uint32_t i = 0; i < 4; ++i)
3230 {
3231 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3232 }
3233
3234 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3235
3236 // Case: Resize to the same size always succeeds.
3237 {
3238 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3239 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3240 TEST(allocInfo.size == 2ull * 1024 * 1024);
3241 }
3242
3243 // Case: Shrink allocation at the end.
3244 {
3245 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3246 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3247 TEST(allocInfo.size == 1ull * 1024 * 1024);
3248 }
3249
3250 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3251
3252 // Case: Shrink allocation before free space.
3253 {
3254 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3255 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3256 TEST(allocInfo.size == 512 * KILOBYTE);
3257 }
3258
3259 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3260
3261 // Case: Shrink allocation before next allocation.
3262 {
3263 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3264 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3265 TEST(allocInfo.size == 1 * MEGABYTE);
3266 }
3267
3268 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3269
3270 // Case: Grow allocation while there is even more space available.
3271 {
3272 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3273 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3274 TEST(allocInfo.size == 1 * MEGABYTE);
3275 }
3276
3277 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3278
3279 // Case: Grow allocation while there is exact amount of free space available.
3280 {
3281 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3282 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3283 TEST(allocInfo.size == 2 * MEGABYTE);
3284 }
3285
3286 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3287
3288 // Case: Fail to grow when there is not enough free space due to next allocation.
3289 {
3290 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3291 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3292 TEST(allocInfo.size == 2 * MEGABYTE);
3293 }
3294
3295 // Case: Fail to grow when there is not enough free space due to end of memory block.
3296 {
3297 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3298 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3299 TEST(allocInfo.size == 1 * MEGABYTE);
3300 }
3301
3302 for(uint32_t i = 4; i--; )
3303 {
3304 vmaFreeMemory(g_hAllocator, allocs[i]);
3305 }
3306
3307 vmaDestroyPool(g_hAllocator, pool);
3308
3309 // Test dedicated allocation
3310 {
3311 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3312 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3313 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3314
3315 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3316 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3317
3318 // Case: Resize to the same size always succeeds.
3319 {
3320 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3321 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3322 TEST(allocInfo.size == 2ull * 1024 * 1024);
3323 }
3324
3325 // Case: Shrinking fails.
3326 {
3327 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3328 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3329 TEST(allocInfo.size == 2ull * 1024 * 1024);
3330 }
3331
3332 // Case: Growing fails.
3333 {
3334 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3335 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3336 TEST(allocInfo.size == 2ull * 1024 * 1024);
3337 }
3338
3339 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3340 }
3341}
3342
Adam Sawickie44c6262018-06-15 14:30:39 +02003343static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3344{
3345 const uint8_t* pBytes = (const uint8_t*)pMemory;
3346 for(size_t i = 0; i < size; ++i)
3347 {
3348 if(pBytes[i] != pattern)
3349 {
3350 return false;
3351 }
3352 }
3353 return true;
3354}
3355
3356static void TestAllocationsInitialization()
3357{
3358 VkResult res;
3359
3360 const size_t BUF_SIZE = 1024;
3361
3362 // Create pool.
3363
3364 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3365 bufInfo.size = BUF_SIZE;
3366 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3367
3368 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3369 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3370
3371 VmaPoolCreateInfo poolCreateInfo = {};
3372 poolCreateInfo.blockSize = BUF_SIZE * 10;
3373 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3374 poolCreateInfo.maxBlockCount = 1;
3375 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003376 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003377
3378 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3379 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003380 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003381
3382 // Create one persistently mapped buffer to keep memory of this block mapped,
3383 // so that pointer to mapped data will remain (more or less...) valid even
3384 // after destruction of other allocations.
3385
3386 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3387 VkBuffer firstBuf;
3388 VmaAllocation firstAlloc;
3389 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003390 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003391
3392 // Test buffers.
3393
3394 for(uint32_t i = 0; i < 2; ++i)
3395 {
3396 const bool persistentlyMapped = i == 0;
3397 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3398 VkBuffer buf;
3399 VmaAllocation alloc;
3400 VmaAllocationInfo allocInfo;
3401 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003402 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003403
3404 void* pMappedData;
3405 if(!persistentlyMapped)
3406 {
3407 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003408 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003409 }
3410 else
3411 {
3412 pMappedData = allocInfo.pMappedData;
3413 }
3414
3415 // Validate initialized content
3416 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003417 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003418
3419 if(!persistentlyMapped)
3420 {
3421 vmaUnmapMemory(g_hAllocator, alloc);
3422 }
3423
3424 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3425
3426 // Validate freed content
3427 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003428 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003429 }
3430
3431 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3432 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3433}
3434
Adam Sawickib8333fb2018-03-13 16:15:53 +01003435static void TestPool_Benchmark(
3436 PoolTestResult& outResult,
3437 const PoolTestConfig& config)
3438{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003439 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003440
3441 RandomNumberGenerator mainRand{config.RandSeed};
3442
3443 uint32_t allocationSizeProbabilitySum = std::accumulate(
3444 config.AllocationSizes.begin(),
3445 config.AllocationSizes.end(),
3446 0u,
3447 [](uint32_t sum, const AllocationSize& allocSize) {
3448 return sum + allocSize.Probability;
3449 });
3450
3451 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3452 bufferInfo.size = 256; // Whatever.
3453 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3454
3455 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3456 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3457 imageInfo.extent.width = 256; // Whatever.
3458 imageInfo.extent.height = 256; // Whatever.
3459 imageInfo.extent.depth = 1;
3460 imageInfo.mipLevels = 1;
3461 imageInfo.arrayLayers = 1;
3462 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3463 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3464 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3465 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3466 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3467
3468 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3469 {
3470 VkBuffer dummyBuffer;
3471 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003472 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003473
3474 VkMemoryRequirements memReq;
3475 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3476 bufferMemoryTypeBits = memReq.memoryTypeBits;
3477
3478 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3479 }
3480
3481 uint32_t imageMemoryTypeBits = UINT32_MAX;
3482 {
3483 VkImage dummyImage;
3484 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003485 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003486
3487 VkMemoryRequirements memReq;
3488 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3489 imageMemoryTypeBits = memReq.memoryTypeBits;
3490
3491 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3492 }
3493
3494 uint32_t memoryTypeBits = 0;
3495 if(config.UsesBuffers() && config.UsesImages())
3496 {
3497 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3498 if(memoryTypeBits == 0)
3499 {
3500 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3501 return;
3502 }
3503 }
3504 else if(config.UsesBuffers())
3505 memoryTypeBits = bufferMemoryTypeBits;
3506 else if(config.UsesImages())
3507 memoryTypeBits = imageMemoryTypeBits;
3508 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003509 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003510
3511 VmaPoolCreateInfo poolCreateInfo = {};
3512 poolCreateInfo.memoryTypeIndex = 0;
3513 poolCreateInfo.minBlockCount = 1;
3514 poolCreateInfo.maxBlockCount = 1;
3515 poolCreateInfo.blockSize = config.PoolSize;
3516 poolCreateInfo.frameInUseCount = 1;
3517
3518 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3519 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3520 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3521
3522 VmaPool pool;
3523 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003524 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003525
3526 // Start time measurement - after creating pool and initializing data structures.
3527 time_point timeBeg = std::chrono::high_resolution_clock::now();
3528
3529 ////////////////////////////////////////////////////////////////////////////////
3530 // ThreadProc
3531 auto ThreadProc = [&](
3532 PoolTestThreadResult* outThreadResult,
3533 uint32_t randSeed,
3534 HANDLE frameStartEvent,
3535 HANDLE frameEndEvent) -> void
3536 {
3537 RandomNumberGenerator threadRand{randSeed};
3538
3539 outThreadResult->AllocationTimeMin = duration::max();
3540 outThreadResult->AllocationTimeSum = duration::zero();
3541 outThreadResult->AllocationTimeMax = duration::min();
3542 outThreadResult->DeallocationTimeMin = duration::max();
3543 outThreadResult->DeallocationTimeSum = duration::zero();
3544 outThreadResult->DeallocationTimeMax = duration::min();
3545 outThreadResult->AllocationCount = 0;
3546 outThreadResult->DeallocationCount = 0;
3547 outThreadResult->LostAllocationCount = 0;
3548 outThreadResult->LostAllocationTotalSize = 0;
3549 outThreadResult->FailedAllocationCount = 0;
3550 outThreadResult->FailedAllocationTotalSize = 0;
3551
3552 struct Item
3553 {
3554 VkDeviceSize BufferSize;
3555 VkExtent2D ImageSize;
3556 VkBuffer Buf;
3557 VkImage Image;
3558 VmaAllocation Alloc;
3559
3560 VkDeviceSize CalcSizeBytes() const
3561 {
3562 return BufferSize +
3563 ImageSize.width * ImageSize.height * 4;
3564 }
3565 };
3566 std::vector<Item> unusedItems, usedItems;
3567
3568 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3569
3570 // Create all items - all unused, not yet allocated.
3571 for(size_t i = 0; i < threadTotalItemCount; ++i)
3572 {
3573 Item item = {};
3574
3575 uint32_t allocSizeIndex = 0;
3576 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3577 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3578 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3579
3580 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3581 if(allocSize.BufferSizeMax > 0)
3582 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003583 TEST(allocSize.BufferSizeMin > 0);
3584 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003585 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3586 item.BufferSize = allocSize.BufferSizeMin;
3587 else
3588 {
3589 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3590 item.BufferSize = item.BufferSize / 16 * 16;
3591 }
3592 }
3593 else
3594 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003595 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003596 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3597 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3598 else
3599 {
3600 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3601 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3602 }
3603 }
3604
3605 unusedItems.push_back(item);
3606 }
3607
3608 auto Allocate = [&](Item& item) -> VkResult
3609 {
3610 VmaAllocationCreateInfo allocCreateInfo = {};
3611 allocCreateInfo.pool = pool;
3612 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3613 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3614
3615 if(item.BufferSize)
3616 {
3617 bufferInfo.size = item.BufferSize;
3618 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3619 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3620 }
3621 else
3622 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003623 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003624
3625 imageInfo.extent.width = item.ImageSize.width;
3626 imageInfo.extent.height = item.ImageSize.height;
3627 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3628 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3629 }
3630 };
3631
3632 ////////////////////////////////////////////////////////////////////////////////
3633 // Frames
3634 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3635 {
3636 WaitForSingleObject(frameStartEvent, INFINITE);
3637
3638 // Always make some percent of used bufs unused, to choose different used ones.
3639 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3640 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3641 {
3642 size_t index = threadRand.Generate() % usedItems.size();
3643 unusedItems.push_back(usedItems[index]);
3644 usedItems.erase(usedItems.begin() + index);
3645 }
3646
3647 // Determine which bufs we want to use in this frame.
3648 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3649 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003650 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003651 // Move some used to unused.
3652 while(usedBufCount < usedItems.size())
3653 {
3654 size_t index = threadRand.Generate() % usedItems.size();
3655 unusedItems.push_back(usedItems[index]);
3656 usedItems.erase(usedItems.begin() + index);
3657 }
3658 // Move some unused to used.
3659 while(usedBufCount > usedItems.size())
3660 {
3661 size_t index = threadRand.Generate() % unusedItems.size();
3662 usedItems.push_back(unusedItems[index]);
3663 unusedItems.erase(unusedItems.begin() + index);
3664 }
3665
3666 uint32_t touchExistingCount = 0;
3667 uint32_t touchLostCount = 0;
3668 uint32_t createSucceededCount = 0;
3669 uint32_t createFailedCount = 0;
3670
3671 // Touch all used bufs. If not created or lost, allocate.
3672 for(size_t i = 0; i < usedItems.size(); ++i)
3673 {
3674 Item& item = usedItems[i];
3675 // Not yet created.
3676 if(item.Alloc == VK_NULL_HANDLE)
3677 {
3678 res = Allocate(item);
3679 ++outThreadResult->AllocationCount;
3680 if(res != VK_SUCCESS)
3681 {
3682 item.Alloc = VK_NULL_HANDLE;
3683 item.Buf = VK_NULL_HANDLE;
3684 ++outThreadResult->FailedAllocationCount;
3685 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3686 ++createFailedCount;
3687 }
3688 else
3689 ++createSucceededCount;
3690 }
3691 else
3692 {
3693 // Touch.
3694 VmaAllocationInfo allocInfo;
3695 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3696 // Lost.
3697 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3698 {
3699 ++touchLostCount;
3700
3701 // Destroy.
3702 {
3703 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3704 if(item.Buf)
3705 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3706 else
3707 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3708 ++outThreadResult->DeallocationCount;
3709 }
3710 item.Alloc = VK_NULL_HANDLE;
3711 item.Buf = VK_NULL_HANDLE;
3712
3713 ++outThreadResult->LostAllocationCount;
3714 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3715
3716 // Recreate.
3717 res = Allocate(item);
3718 ++outThreadResult->AllocationCount;
3719 // Creation failed.
3720 if(res != VK_SUCCESS)
3721 {
3722 ++outThreadResult->FailedAllocationCount;
3723 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3724 ++createFailedCount;
3725 }
3726 else
3727 ++createSucceededCount;
3728 }
3729 else
3730 ++touchExistingCount;
3731 }
3732 }
3733
3734 /*
3735 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3736 randSeed, frameIndex,
3737 touchExistingCount, touchLostCount,
3738 createSucceededCount, createFailedCount);
3739 */
3740
3741 SetEvent(frameEndEvent);
3742 }
3743
3744 // Free all remaining items.
3745 for(size_t i = usedItems.size(); i--; )
3746 {
3747 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3748 if(usedItems[i].Buf)
3749 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3750 else
3751 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3752 ++outThreadResult->DeallocationCount;
3753 }
3754 for(size_t i = unusedItems.size(); i--; )
3755 {
3756 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3757 if(unusedItems[i].Buf)
3758 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3759 else
3760 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3761 ++outThreadResult->DeallocationCount;
3762 }
3763 };
3764
3765 // Launch threads.
3766 uint32_t threadRandSeed = mainRand.Generate();
3767 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3768 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3769 std::vector<std::thread> bkgThreads;
3770 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3771 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3772 {
3773 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3774 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3775 bkgThreads.emplace_back(std::bind(
3776 ThreadProc,
3777 &threadResults[threadIndex],
3778 threadRandSeed + threadIndex,
3779 frameStartEvents[threadIndex],
3780 frameEndEvents[threadIndex]));
3781 }
3782
3783 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003784 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003785 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3786 {
3787 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3788 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3789 SetEvent(frameStartEvents[threadIndex]);
3790 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3791 }
3792
3793 // Wait for threads finished
3794 for(size_t i = 0; i < bkgThreads.size(); ++i)
3795 {
3796 bkgThreads[i].join();
3797 CloseHandle(frameEndEvents[i]);
3798 CloseHandle(frameStartEvents[i]);
3799 }
3800 bkgThreads.clear();
3801
3802 // Finish time measurement - before destroying pool.
3803 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3804
3805 vmaDestroyPool(g_hAllocator, pool);
3806
3807 outResult.AllocationTimeMin = duration::max();
3808 outResult.AllocationTimeAvg = duration::zero();
3809 outResult.AllocationTimeMax = duration::min();
3810 outResult.DeallocationTimeMin = duration::max();
3811 outResult.DeallocationTimeAvg = duration::zero();
3812 outResult.DeallocationTimeMax = duration::min();
3813 outResult.LostAllocationCount = 0;
3814 outResult.LostAllocationTotalSize = 0;
3815 outResult.FailedAllocationCount = 0;
3816 outResult.FailedAllocationTotalSize = 0;
3817 size_t allocationCount = 0;
3818 size_t deallocationCount = 0;
3819 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3820 {
3821 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3822 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3823 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3824 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3825 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3826 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3827 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3828 allocationCount += threadResult.AllocationCount;
3829 deallocationCount += threadResult.DeallocationCount;
3830 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3831 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3832 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3833 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3834 }
3835 if(allocationCount)
3836 outResult.AllocationTimeAvg /= allocationCount;
3837 if(deallocationCount)
3838 outResult.DeallocationTimeAvg /= deallocationCount;
3839}
3840
3841static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3842{
3843 if(ptr1 < ptr2)
3844 return ptr1 + size1 > ptr2;
3845 else if(ptr2 < ptr1)
3846 return ptr2 + size2 > ptr1;
3847 else
3848 return true;
3849}
3850
3851static void TestMapping()
3852{
3853 wprintf(L"Testing mapping...\n");
3854
3855 VkResult res;
3856 uint32_t memTypeIndex = UINT32_MAX;
3857
3858 enum TEST
3859 {
3860 TEST_NORMAL,
3861 TEST_POOL,
3862 TEST_DEDICATED,
3863 TEST_COUNT
3864 };
3865 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3866 {
3867 VmaPool pool = nullptr;
3868 if(testIndex == TEST_POOL)
3869 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003870 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003871 VmaPoolCreateInfo poolInfo = {};
3872 poolInfo.memoryTypeIndex = memTypeIndex;
3873 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003874 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003875 }
3876
3877 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3878 bufInfo.size = 0x10000;
3879 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3880
3881 VmaAllocationCreateInfo allocCreateInfo = {};
3882 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3883 allocCreateInfo.pool = pool;
3884 if(testIndex == TEST_DEDICATED)
3885 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3886
3887 VmaAllocationInfo allocInfo;
3888
3889 // Mapped manually
3890
3891 // Create 2 buffers.
3892 BufferInfo bufferInfos[3];
3893 for(size_t i = 0; i < 2; ++i)
3894 {
3895 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3896 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003897 TEST(res == VK_SUCCESS);
3898 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003899 memTypeIndex = allocInfo.memoryType;
3900 }
3901
3902 // Map buffer 0.
3903 char* data00 = nullptr;
3904 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003905 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003906 data00[0xFFFF] = data00[0];
3907
3908 // Map buffer 0 second time.
3909 char* data01 = nullptr;
3910 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003911 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003912
3913 // Map buffer 1.
3914 char* data1 = nullptr;
3915 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003916 TEST(res == VK_SUCCESS && data1 != nullptr);
3917 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003918 data1[0xFFFF] = data1[0];
3919
3920 // Unmap buffer 0 two times.
3921 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3922 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3923 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003924 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003925
3926 // Unmap buffer 1.
3927 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3928 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003929 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003930
3931 // Create 3rd buffer - persistently mapped.
3932 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3933 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3934 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003935 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003936
3937 // Map buffer 2.
3938 char* data2 = nullptr;
3939 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003940 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003941 data2[0xFFFF] = data2[0];
3942
3943 // Unmap buffer 2.
3944 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3945 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003946 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003947
3948 // Destroy all buffers.
3949 for(size_t i = 3; i--; )
3950 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3951
3952 vmaDestroyPool(g_hAllocator, pool);
3953 }
3954}
3955
3956static void TestMappingMultithreaded()
3957{
3958 wprintf(L"Testing mapping multithreaded...\n");
3959
3960 static const uint32_t threadCount = 16;
3961 static const uint32_t bufferCount = 1024;
3962 static const uint32_t threadBufferCount = bufferCount / threadCount;
3963
3964 VkResult res;
3965 volatile uint32_t memTypeIndex = UINT32_MAX;
3966
3967 enum TEST
3968 {
3969 TEST_NORMAL,
3970 TEST_POOL,
3971 TEST_DEDICATED,
3972 TEST_COUNT
3973 };
3974 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3975 {
3976 VmaPool pool = nullptr;
3977 if(testIndex == TEST_POOL)
3978 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003979 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003980 VmaPoolCreateInfo poolInfo = {};
3981 poolInfo.memoryTypeIndex = memTypeIndex;
3982 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003983 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003984 }
3985
3986 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3987 bufCreateInfo.size = 0x10000;
3988 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3989
3990 VmaAllocationCreateInfo allocCreateInfo = {};
3991 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3992 allocCreateInfo.pool = pool;
3993 if(testIndex == TEST_DEDICATED)
3994 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3995
3996 std::thread threads[threadCount];
3997 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3998 {
3999 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4000 // ======== THREAD FUNCTION ========
4001
4002 RandomNumberGenerator rand{threadIndex};
4003
4004 enum class MODE
4005 {
4006 // Don't map this buffer at all.
4007 DONT_MAP,
4008 // Map and quickly unmap.
4009 MAP_FOR_MOMENT,
4010 // Map and unmap before destruction.
4011 MAP_FOR_LONGER,
4012 // Map two times. Quickly unmap, second unmap before destruction.
4013 MAP_TWO_TIMES,
4014 // Create this buffer as persistently mapped.
4015 PERSISTENTLY_MAPPED,
4016 COUNT
4017 };
4018 std::vector<BufferInfo> bufInfos{threadBufferCount};
4019 std::vector<MODE> bufModes{threadBufferCount};
4020
4021 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4022 {
4023 BufferInfo& bufInfo = bufInfos[bufferIndex];
4024 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4025 bufModes[bufferIndex] = mode;
4026
4027 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4028 if(mode == MODE::PERSISTENTLY_MAPPED)
4029 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4030
4031 VmaAllocationInfo allocInfo;
4032 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4033 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004034 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004035
4036 if(memTypeIndex == UINT32_MAX)
4037 memTypeIndex = allocInfo.memoryType;
4038
4039 char* data = nullptr;
4040
4041 if(mode == MODE::PERSISTENTLY_MAPPED)
4042 {
4043 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004044 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004045 }
4046 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4047 mode == MODE::MAP_TWO_TIMES)
4048 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004049 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004050 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004051 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004052
4053 if(mode == MODE::MAP_TWO_TIMES)
4054 {
4055 char* data2 = nullptr;
4056 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004057 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004058 }
4059 }
4060 else if(mode == MODE::DONT_MAP)
4061 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004062 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004063 }
4064 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004065 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004066
4067 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4068 if(data)
4069 data[0xFFFF] = data[0];
4070
4071 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4072 {
4073 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4074
4075 VmaAllocationInfo allocInfo;
4076 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4077 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004078 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004079 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004080 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004081 }
4082
4083 switch(rand.Generate() % 3)
4084 {
4085 case 0: Sleep(0); break; // Yield.
4086 case 1: Sleep(10); break; // 10 ms
4087 // default: No sleep.
4088 }
4089
4090 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4091 if(data)
4092 data[0xFFFF] = data[0];
4093 }
4094
4095 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4096 {
4097 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4098 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4099 {
4100 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4101
4102 VmaAllocationInfo allocInfo;
4103 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004104 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004105 }
4106
4107 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4108 }
4109 });
4110 }
4111
4112 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4113 threads[threadIndex].join();
4114
4115 vmaDestroyPool(g_hAllocator, pool);
4116 }
4117}
4118
4119static void WriteMainTestResultHeader(FILE* file)
4120{
4121 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004122 "Code,Time,"
4123 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004124 "Total Time (us),"
4125 "Allocation Time Min (us),"
4126 "Allocation Time Avg (us),"
4127 "Allocation Time Max (us),"
4128 "Deallocation Time Min (us),"
4129 "Deallocation Time Avg (us),"
4130 "Deallocation Time Max (us),"
4131 "Total Memory Allocated (B),"
4132 "Free Range Size Avg (B),"
4133 "Free Range Size Max (B)\n");
4134}
4135
4136static void WriteMainTestResult(
4137 FILE* file,
4138 const char* codeDescription,
4139 const char* testDescription,
4140 const Config& config, const Result& result)
4141{
4142 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4143 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4144 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4145 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4146 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4147 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4148 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4149
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004150 std::string currTime;
4151 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004152
4153 fprintf(file,
4154 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004155 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4156 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004157 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004158 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004159 totalTimeSeconds * 1e6f,
4160 allocationTimeMinSeconds * 1e6f,
4161 allocationTimeAvgSeconds * 1e6f,
4162 allocationTimeMaxSeconds * 1e6f,
4163 deallocationTimeMinSeconds * 1e6f,
4164 deallocationTimeAvgSeconds * 1e6f,
4165 deallocationTimeMaxSeconds * 1e6f,
4166 result.TotalMemoryAllocated,
4167 result.FreeRangeSizeAvg,
4168 result.FreeRangeSizeMax);
4169}
4170
4171static void WritePoolTestResultHeader(FILE* file)
4172{
4173 fprintf(file,
4174 "Code,Test,Time,"
4175 "Config,"
4176 "Total Time (us),"
4177 "Allocation Time Min (us),"
4178 "Allocation Time Avg (us),"
4179 "Allocation Time Max (us),"
4180 "Deallocation Time Min (us),"
4181 "Deallocation Time Avg (us),"
4182 "Deallocation Time Max (us),"
4183 "Lost Allocation Count,"
4184 "Lost Allocation Total Size (B),"
4185 "Failed Allocation Count,"
4186 "Failed Allocation Total Size (B)\n");
4187}
4188
4189static void WritePoolTestResult(
4190 FILE* file,
4191 const char* codeDescription,
4192 const char* testDescription,
4193 const PoolTestConfig& config,
4194 const PoolTestResult& result)
4195{
4196 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4197 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4198 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4199 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4200 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4201 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4202 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4203
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004204 std::string currTime;
4205 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004206
4207 fprintf(file,
4208 "%s,%s,%s,"
4209 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4210 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4211 // General
4212 codeDescription,
4213 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004214 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004215 // Config
4216 config.ThreadCount,
4217 (unsigned long long)config.PoolSize,
4218 config.FrameCount,
4219 config.TotalItemCount,
4220 config.UsedItemCountMin,
4221 config.UsedItemCountMax,
4222 config.ItemsToMakeUnusedPercent,
4223 // Results
4224 totalTimeSeconds * 1e6f,
4225 allocationTimeMinSeconds * 1e6f,
4226 allocationTimeAvgSeconds * 1e6f,
4227 allocationTimeMaxSeconds * 1e6f,
4228 deallocationTimeMinSeconds * 1e6f,
4229 deallocationTimeAvgSeconds * 1e6f,
4230 deallocationTimeMaxSeconds * 1e6f,
4231 result.LostAllocationCount,
4232 result.LostAllocationTotalSize,
4233 result.FailedAllocationCount,
4234 result.FailedAllocationTotalSize);
4235}
4236
4237static void PerformCustomMainTest(FILE* file)
4238{
4239 Config config{};
4240 config.RandSeed = 65735476;
4241 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4242 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4243 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4244 config.FreeOrder = FREE_ORDER::FORWARD;
4245 config.ThreadCount = 16;
4246 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004247 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004248
4249 // Buffers
4250 //config.AllocationSizes.push_back({4, 16, 1024});
4251 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4252
4253 // Images
4254 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4255 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4256
4257 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4258 config.AdditionalOperationCount = 1024;
4259
4260 Result result{};
4261 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004262 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004263 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4264}
4265
4266static void PerformCustomPoolTest(FILE* file)
4267{
4268 PoolTestConfig config;
4269 config.PoolSize = 100 * 1024 * 1024;
4270 config.RandSeed = 2345764;
4271 config.ThreadCount = 1;
4272 config.FrameCount = 200;
4273 config.ItemsToMakeUnusedPercent = 2;
4274
4275 AllocationSize allocSize = {};
4276 allocSize.BufferSizeMin = 1024;
4277 allocSize.BufferSizeMax = 1024 * 1024;
4278 allocSize.Probability = 1;
4279 config.AllocationSizes.push_back(allocSize);
4280
4281 allocSize.BufferSizeMin = 0;
4282 allocSize.BufferSizeMax = 0;
4283 allocSize.ImageSizeMin = 128;
4284 allocSize.ImageSizeMax = 1024;
4285 allocSize.Probability = 1;
4286 config.AllocationSizes.push_back(allocSize);
4287
4288 config.PoolSize = config.CalcAvgResourceSize() * 200;
4289 config.UsedItemCountMax = 160;
4290 config.TotalItemCount = config.UsedItemCountMax * 10;
4291 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4292
4293 g_MemoryAliasingWarningEnabled = false;
4294 PoolTestResult result = {};
4295 TestPool_Benchmark(result, config);
4296 g_MemoryAliasingWarningEnabled = true;
4297
4298 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4299}
4300
Adam Sawickib8333fb2018-03-13 16:15:53 +01004301static void PerformMainTests(FILE* file)
4302{
4303 uint32_t repeatCount = 1;
4304 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4305
4306 Config config{};
4307 config.RandSeed = 65735476;
4308 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4309 config.FreeOrder = FREE_ORDER::FORWARD;
4310
4311 size_t threadCountCount = 1;
4312 switch(ConfigType)
4313 {
4314 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4315 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4316 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4317 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4318 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4319 default: assert(0);
4320 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004321
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004322 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004323
Adam Sawickib8333fb2018-03-13 16:15:53 +01004324 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4325 {
4326 std::string desc1;
4327
4328 switch(threadCountIndex)
4329 {
4330 case 0:
4331 desc1 += "1_thread";
4332 config.ThreadCount = 1;
4333 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4334 break;
4335 case 1:
4336 desc1 += "16_threads+0%_common";
4337 config.ThreadCount = 16;
4338 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4339 break;
4340 case 2:
4341 desc1 += "16_threads+50%_common";
4342 config.ThreadCount = 16;
4343 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4344 break;
4345 case 3:
4346 desc1 += "16_threads+100%_common";
4347 config.ThreadCount = 16;
4348 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4349 break;
4350 case 4:
4351 desc1 += "2_threads+0%_common";
4352 config.ThreadCount = 2;
4353 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4354 break;
4355 case 5:
4356 desc1 += "2_threads+50%_common";
4357 config.ThreadCount = 2;
4358 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4359 break;
4360 case 6:
4361 desc1 += "2_threads+100%_common";
4362 config.ThreadCount = 2;
4363 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4364 break;
4365 default:
4366 assert(0);
4367 }
4368
4369 // 0 = buffers, 1 = images, 2 = buffers and images
4370 size_t buffersVsImagesCount = 2;
4371 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4372 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4373 {
4374 std::string desc2 = desc1;
4375 switch(buffersVsImagesIndex)
4376 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004377 case 0: desc2 += ",Buffers"; break;
4378 case 1: desc2 += ",Images"; break;
4379 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004380 default: assert(0);
4381 }
4382
4383 // 0 = small, 1 = large, 2 = small and large
4384 size_t smallVsLargeCount = 2;
4385 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4386 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4387 {
4388 std::string desc3 = desc2;
4389 switch(smallVsLargeIndex)
4390 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004391 case 0: desc3 += ",Small"; break;
4392 case 1: desc3 += ",Large"; break;
4393 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004394 default: assert(0);
4395 }
4396
4397 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4398 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4399 else
4400 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4401
4402 // 0 = varying sizes min...max, 1 = set of constant sizes
4403 size_t constantSizesCount = 1;
4404 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4405 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4406 {
4407 std::string desc4 = desc3;
4408 switch(constantSizesIndex)
4409 {
4410 case 0: desc4 += " Varying_sizes"; break;
4411 case 1: desc4 += " Constant_sizes"; break;
4412 default: assert(0);
4413 }
4414
4415 config.AllocationSizes.clear();
4416 // Buffers present
4417 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4418 {
4419 // Small
4420 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4421 {
4422 // Varying size
4423 if(constantSizesIndex == 0)
4424 config.AllocationSizes.push_back({4, 16, 1024});
4425 // Constant sizes
4426 else
4427 {
4428 config.AllocationSizes.push_back({1, 16, 16});
4429 config.AllocationSizes.push_back({1, 64, 64});
4430 config.AllocationSizes.push_back({1, 256, 256});
4431 config.AllocationSizes.push_back({1, 1024, 1024});
4432 }
4433 }
4434 // Large
4435 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4436 {
4437 // Varying size
4438 if(constantSizesIndex == 0)
4439 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4440 // Constant sizes
4441 else
4442 {
4443 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4444 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4445 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4446 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4447 }
4448 }
4449 }
4450 // Images present
4451 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4452 {
4453 // Small
4454 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4455 {
4456 // Varying size
4457 if(constantSizesIndex == 0)
4458 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4459 // Constant sizes
4460 else
4461 {
4462 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4463 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4464 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4465 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4466 }
4467 }
4468 // Large
4469 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4470 {
4471 // Varying size
4472 if(constantSizesIndex == 0)
4473 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4474 // Constant sizes
4475 else
4476 {
4477 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4478 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4479 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4480 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4481 }
4482 }
4483 }
4484
4485 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4486 size_t beginBytesToAllocateCount = 1;
4487 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4488 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4489 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4490 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4491 {
4492 std::string desc5 = desc4;
4493
4494 switch(beginBytesToAllocateIndex)
4495 {
4496 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004497 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004498 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4499 config.AdditionalOperationCount = 0;
4500 break;
4501 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004502 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004503 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4504 config.AdditionalOperationCount = 1024;
4505 break;
4506 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004507 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004508 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4509 config.AdditionalOperationCount = 1024;
4510 break;
4511 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004512 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004513 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4514 config.AdditionalOperationCount = 1024;
4515 break;
4516 default:
4517 assert(0);
4518 }
4519
Adam Sawicki0667e332018-08-24 17:26:44 +02004520 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004521 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004522 std::string desc6 = desc5;
4523 switch(strategyIndex)
4524 {
4525 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004526 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004527 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4528 break;
4529 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004530 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004531 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4532 break;
4533 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004534 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004535 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4536 break;
4537 default:
4538 assert(0);
4539 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004540
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004541 desc6 += ',';
4542 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004543
4544 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004545
4546 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4547 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004548 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004549
4550 Result result{};
4551 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004552 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004553 if(file)
4554 {
4555 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4556 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004557 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004558 }
4559 }
4560 }
4561 }
4562 }
4563 }
4564}
4565
4566static void PerformPoolTests(FILE* file)
4567{
4568 const size_t AVG_RESOURCES_PER_POOL = 300;
4569
4570 uint32_t repeatCount = 1;
4571 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4572
4573 PoolTestConfig config{};
4574 config.RandSeed = 2346343;
4575 config.FrameCount = 200;
4576 config.ItemsToMakeUnusedPercent = 2;
4577
4578 size_t threadCountCount = 1;
4579 switch(ConfigType)
4580 {
4581 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4582 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4583 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4584 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4585 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4586 default: assert(0);
4587 }
4588 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4589 {
4590 std::string desc1;
4591
4592 switch(threadCountIndex)
4593 {
4594 case 0:
4595 desc1 += "1_thread";
4596 config.ThreadCount = 1;
4597 break;
4598 case 1:
4599 desc1 += "16_threads";
4600 config.ThreadCount = 16;
4601 break;
4602 case 2:
4603 desc1 += "2_threads";
4604 config.ThreadCount = 2;
4605 break;
4606 default:
4607 assert(0);
4608 }
4609
4610 // 0 = buffers, 1 = images, 2 = buffers and images
4611 size_t buffersVsImagesCount = 2;
4612 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4613 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4614 {
4615 std::string desc2 = desc1;
4616 switch(buffersVsImagesIndex)
4617 {
4618 case 0: desc2 += " Buffers"; break;
4619 case 1: desc2 += " Images"; break;
4620 case 2: desc2 += " Buffers+Images"; break;
4621 default: assert(0);
4622 }
4623
4624 // 0 = small, 1 = large, 2 = small and large
4625 size_t smallVsLargeCount = 2;
4626 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4627 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4628 {
4629 std::string desc3 = desc2;
4630 switch(smallVsLargeIndex)
4631 {
4632 case 0: desc3 += " Small"; break;
4633 case 1: desc3 += " Large"; break;
4634 case 2: desc3 += " Small+Large"; break;
4635 default: assert(0);
4636 }
4637
4638 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4639 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4640 else
4641 config.PoolSize = 4ull * 1024 * 1024;
4642
4643 // 0 = varying sizes min...max, 1 = set of constant sizes
4644 size_t constantSizesCount = 1;
4645 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4646 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4647 {
4648 std::string desc4 = desc3;
4649 switch(constantSizesIndex)
4650 {
4651 case 0: desc4 += " Varying_sizes"; break;
4652 case 1: desc4 += " Constant_sizes"; break;
4653 default: assert(0);
4654 }
4655
4656 config.AllocationSizes.clear();
4657 // Buffers present
4658 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4659 {
4660 // Small
4661 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4662 {
4663 // Varying size
4664 if(constantSizesIndex == 0)
4665 config.AllocationSizes.push_back({4, 16, 1024});
4666 // Constant sizes
4667 else
4668 {
4669 config.AllocationSizes.push_back({1, 16, 16});
4670 config.AllocationSizes.push_back({1, 64, 64});
4671 config.AllocationSizes.push_back({1, 256, 256});
4672 config.AllocationSizes.push_back({1, 1024, 1024});
4673 }
4674 }
4675 // Large
4676 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4677 {
4678 // Varying size
4679 if(constantSizesIndex == 0)
4680 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4681 // Constant sizes
4682 else
4683 {
4684 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4685 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4686 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4687 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4688 }
4689 }
4690 }
4691 // Images present
4692 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4693 {
4694 // Small
4695 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4696 {
4697 // Varying size
4698 if(constantSizesIndex == 0)
4699 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4700 // Constant sizes
4701 else
4702 {
4703 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4704 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4705 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4706 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4707 }
4708 }
4709 // Large
4710 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4711 {
4712 // Varying size
4713 if(constantSizesIndex == 0)
4714 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4715 // Constant sizes
4716 else
4717 {
4718 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4719 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4720 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4721 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4722 }
4723 }
4724 }
4725
4726 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4727 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4728
4729 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4730 size_t subscriptionModeCount;
4731 switch(ConfigType)
4732 {
4733 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4734 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4735 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4736 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4737 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4738 default: assert(0);
4739 }
4740 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4741 {
4742 std::string desc5 = desc4;
4743
4744 switch(subscriptionModeIndex)
4745 {
4746 case 0:
4747 desc5 += " Subscription_66%";
4748 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4749 break;
4750 case 1:
4751 desc5 += " Subscription_133%";
4752 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4753 break;
4754 case 2:
4755 desc5 += " Subscription_100%";
4756 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4757 break;
4758 case 3:
4759 desc5 += " Subscription_33%";
4760 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4761 break;
4762 case 4:
4763 desc5 += " Subscription_166%";
4764 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4765 break;
4766 default:
4767 assert(0);
4768 }
4769
4770 config.TotalItemCount = config.UsedItemCountMax * 5;
4771 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4772
4773 const char* testDescription = desc5.c_str();
4774
4775 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4776 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004777 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004778
4779 PoolTestResult result{};
4780 g_MemoryAliasingWarningEnabled = false;
4781 TestPool_Benchmark(result, config);
4782 g_MemoryAliasingWarningEnabled = true;
4783 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4784 }
4785 }
4786 }
4787 }
4788 }
4789 }
4790}
4791
Adam Sawickia83793a2018-09-03 13:40:42 +02004792static void BasicTestBuddyAllocator()
4793{
4794 wprintf(L"Basic test buddy allocator\n");
4795
4796 RandomNumberGenerator rand{76543};
4797
4798 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4799 sampleBufCreateInfo.size = 1024; // Whatever.
4800 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4801
4802 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4803 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4804
4805 VmaPoolCreateInfo poolCreateInfo = {};
4806 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004807 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004808
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004809 // Deliberately adding 1023 to test usable size smaller than memory block size.
4810 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004811 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004812 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004813
4814 VmaPool pool = nullptr;
4815 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004816 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004817
4818 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4819
4820 VmaAllocationCreateInfo allocCreateInfo = {};
4821 allocCreateInfo.pool = pool;
4822
4823 std::vector<BufferInfo> bufInfo;
4824 BufferInfo newBufInfo;
4825 VmaAllocationInfo allocInfo;
4826
4827 bufCreateInfo.size = 1024 * 256;
4828 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4829 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004830 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004831 bufInfo.push_back(newBufInfo);
4832
4833 bufCreateInfo.size = 1024 * 512;
4834 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4835 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004836 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004837 bufInfo.push_back(newBufInfo);
4838
4839 bufCreateInfo.size = 1024 * 128;
4840 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4841 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004842 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004843 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004844
4845 // Test very small allocation, smaller than minimum node size.
4846 bufCreateInfo.size = 1;
4847 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4848 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004849 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004850 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004851
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004852 // Test some small allocation with alignment requirement.
4853 {
4854 VkMemoryRequirements memReq;
4855 memReq.alignment = 256;
4856 memReq.memoryTypeBits = UINT32_MAX;
4857 memReq.size = 32;
4858
4859 newBufInfo.Buffer = VK_NULL_HANDLE;
4860 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4861 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004862 TEST(res == VK_SUCCESS);
4863 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004864 bufInfo.push_back(newBufInfo);
4865 }
4866
4867 //SaveAllocatorStatsToFile(L"TEST.json");
4868
Adam Sawicki21017c62018-09-07 15:26:59 +02004869 VmaPoolStats stats = {};
4870 vmaGetPoolStats(g_hAllocator, pool, &stats);
4871 int DBG = 0; // Set breakpoint here to inspect `stats`.
4872
Adam Sawicki80927152018-09-07 17:27:23 +02004873 // Allocate enough new buffers to surely fall into second block.
4874 for(uint32_t i = 0; i < 32; ++i)
4875 {
4876 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4877 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4878 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004879 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004880 bufInfo.push_back(newBufInfo);
4881 }
4882
4883 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4884
Adam Sawickia83793a2018-09-03 13:40:42 +02004885 // Destroy the buffers in random order.
4886 while(!bufInfo.empty())
4887 {
4888 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4889 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4890 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4891 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4892 }
4893
4894 vmaDestroyPool(g_hAllocator, pool);
4895}
4896
Adam Sawickif2975342018-10-16 13:49:02 +02004897// Test the testing environment.
4898static void TestGpuData()
4899{
4900 RandomNumberGenerator rand = { 53434 };
4901
4902 std::vector<AllocInfo> allocInfo;
4903
4904 for(size_t i = 0; i < 100; ++i)
4905 {
4906 AllocInfo info = {};
4907
4908 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4909 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4910 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4911 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4912 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4913
4914 VmaAllocationCreateInfo allocCreateInfo = {};
4915 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4916
4917 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
4918 TEST(res == VK_SUCCESS);
4919
4920 info.m_StartValue = rand.Generate();
4921
4922 allocInfo.push_back(std::move(info));
4923 }
4924
4925 UploadGpuData(allocInfo.data(), allocInfo.size());
4926
4927 ValidateGpuData(allocInfo.data(), allocInfo.size());
4928
4929 DestroyAllAllocations(allocInfo);
4930}
4931
Adam Sawickib8333fb2018-03-13 16:15:53 +01004932void Test()
4933{
4934 wprintf(L"TESTING:\n");
4935
Adam Sawickif2975342018-10-16 13:49:02 +02004936 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02004937 {
4938 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004939 // ########################################
4940 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02004941
Adam Sawickic6ede152018-11-16 17:04:14 +01004942 TestDefragmentationGpu(0);
4943 TestDefragmentationGpu(VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT);
4944 TestDefragmentationGpu(VMA_DEFRAGMENTATION_OPTIMAL_ALGORITHM_BIT);
Adam Sawicki05704002018-11-08 16:07:29 +01004945 TestDefragmentationSimple();
4946 TestDefragmentationFull();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004947 return;
4948 }
4949
Adam Sawickib8333fb2018-03-13 16:15:53 +01004950 // # Simple tests
4951
4952 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02004953 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02004954#if VMA_DEBUG_MARGIN
4955 TestDebugMargin();
4956#else
4957 TestPool_SameSize();
4958 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01004959 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02004960#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02004961#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
4962 TestAllocationsInitialization();
4963#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01004964 TestMapping();
4965 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02004966 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02004967 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004968 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004969
Adam Sawicki4338f662018-09-07 14:12:37 +02004970 BasicTestBuddyAllocator();
4971
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004972 {
4973 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02004974 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004975 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02004976 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004977 fclose(file);
4978 }
4979
Adam Sawickib8333fb2018-03-13 16:15:53 +01004980 TestDefragmentationSimple();
4981 TestDefragmentationFull();
Adam Sawickic6ede152018-11-16 17:04:14 +01004982 TestDefragmentationGpu(0);
4983 TestDefragmentationGpu(VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT);
4984 TestDefragmentationGpu(VMA_DEFRAGMENTATION_OPTIMAL_ALGORITHM_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004985
4986 // # Detailed tests
4987 FILE* file;
4988 fopen_s(&file, "Results.csv", "w");
4989 assert(file != NULL);
4990
4991 WriteMainTestResultHeader(file);
4992 PerformMainTests(file);
4993 //PerformCustomMainTest(file);
4994
4995 WritePoolTestResultHeader(file);
4996 PerformPoolTests(file);
4997 //PerformCustomPoolTest(file);
4998
4999 fclose(file);
5000
5001 wprintf(L"Done.\n");
5002}
5003
Adam Sawickif1a793c2018-03-13 15:42:22 +01005004#endif // #ifdef _WIN32