blob: ff6aa3a786402c2c8c94d97878983b9d66915110 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
1243 std::vector<AllocInfo> allocations;
1244
1245 // persistentlyMappedOption = 0 - not persistently mapped.
1246 // persistentlyMappedOption = 1 - persistently mapped.
1247 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1248 {
1249 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1250 const bool persistentlyMapped = persistentlyMappedOption != 0;
1251
1252 // # Test 1
1253 // Buffers of fixed size.
1254 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1255 // Expected result: at least 1 block freed.
1256 {
1257 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1258 {
1259 AllocInfo allocInfo;
1260 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1261 allocations.push_back(allocInfo);
1262 }
1263
1264 for(size_t i = 1; i < allocations.size(); ++i)
1265 {
1266 DestroyAllocation(allocations[i]);
1267 allocations.erase(allocations.begin() + i);
1268 }
1269
1270 VmaDefragmentationStats defragStats;
1271 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1273 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001274
1275 ValidateAllocationsData(allocations.data(), allocations.size());
1276
1277 DestroyAllAllocations(allocations);
1278 }
1279
1280 // # Test 2
1281 // Buffers of fixed size.
1282 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1283 // Expected result: Each of 4 interations makes some progress.
1284 {
1285 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1286 {
1287 AllocInfo allocInfo;
1288 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1289 allocations.push_back(allocInfo);
1290 }
1291
1292 for(size_t i = 1; i < allocations.size(); ++i)
1293 {
1294 DestroyAllocation(allocations[i]);
1295 allocations.erase(allocations.begin() + i);
1296 }
1297
1298 VmaDefragmentationInfo defragInfo = {};
1299 defragInfo.maxAllocationsToMove = 1;
1300 defragInfo.maxBytesToMove = BUF_SIZE;
1301
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1303 {
1304 VmaDefragmentationStats defragStats;
1305 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001306 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001307 }
1308
1309 ValidateAllocationsData(allocations.data(), allocations.size());
1310
1311 DestroyAllAllocations(allocations);
1312 }
1313
1314 // # Test 3
1315 // Buffers of variable size.
1316 // Create a number of buffers. Remove some percent of them.
1317 // Defragment while having some percent of them unmovable.
1318 // Expected result: Just simple validation.
1319 {
1320 for(size_t i = 0; i < 100; ++i)
1321 {
1322 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1323 localBufCreateInfo.size = RandomBufSize();
1324
1325 AllocInfo allocInfo;
1326 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1327 allocations.push_back(allocInfo);
1328 }
1329
1330 const uint32_t percentToDelete = 60;
1331 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1332 for(size_t i = 0; i < numberToDelete; ++i)
1333 {
1334 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1335 DestroyAllocation(allocations[indexToDelete]);
1336 allocations.erase(allocations.begin() + indexToDelete);
1337 }
1338
1339 // Non-movable allocations will be at the beginning of allocations array.
1340 const uint32_t percentNonMovable = 20;
1341 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1342 for(size_t i = 0; i < numberNonMovable; ++i)
1343 {
1344 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1345 if(indexNonMovable != i)
1346 std::swap(allocations[i], allocations[indexNonMovable]);
1347 }
1348
1349 VmaDefragmentationStats defragStats;
1350 Defragment(
1351 allocations.data() + numberNonMovable,
1352 allocations.size() - numberNonMovable,
1353 nullptr, &defragStats);
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359 }
1360
1361 vmaDestroyPool(g_hAllocator, pool);
1362}
1363
1364void TestDefragmentationFull()
1365{
1366 std::vector<AllocInfo> allocations;
1367
1368 // Create initial allocations.
1369 for(size_t i = 0; i < 400; ++i)
1370 {
1371 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001372 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001373 allocations.push_back(allocation);
1374 }
1375
1376 // Delete random allocations
1377 const size_t allocationsToDeletePercent = 80;
1378 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1379 for(size_t i = 0; i < allocationsToDelete; ++i)
1380 {
1381 size_t index = (size_t)rand() % allocations.size();
1382 DestroyAllocation(allocations[index]);
1383 allocations.erase(allocations.begin() + index);
1384 }
1385
1386 for(size_t i = 0; i < allocations.size(); ++i)
1387 ValidateAllocationData(allocations[i]);
1388
Adam Sawicki0667e332018-08-24 17:26:44 +02001389 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001390
1391 {
1392 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1393 for(size_t i = 0; i < allocations.size(); ++i)
1394 vmaAllocations[i] = allocations[i].m_Allocation;
1395
1396 const size_t nonMovablePercent = 0;
1397 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1398 for(size_t i = 0; i < nonMovableCount; ++i)
1399 {
1400 size_t index = (size_t)rand() % vmaAllocations.size();
1401 vmaAllocations.erase(vmaAllocations.begin() + index);
1402 }
1403
1404 const uint32_t defragCount = 1;
1405 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1406 {
1407 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1408
1409 VmaDefragmentationInfo defragmentationInfo;
1410 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1411 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1412
1413 wprintf(L"Defragmentation #%u\n", defragIndex);
1414
1415 time_point begTime = std::chrono::high_resolution_clock::now();
1416
1417 VmaDefragmentationStats stats;
1418 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001419 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001420
1421 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1422
1423 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1424 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1425 wprintf(L"Time: %.2f s\n", defragmentDuration);
1426
1427 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1428 {
1429 if(allocationsChanged[i])
1430 {
1431 RecreateAllocationResource(allocations[i]);
1432 }
1433 }
1434
1435 for(size_t i = 0; i < allocations.size(); ++i)
1436 ValidateAllocationData(allocations[i]);
1437
Adam Sawicki0667e332018-08-24 17:26:44 +02001438 //wchar_t fileName[MAX_PATH];
1439 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1440 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001441 }
1442 }
1443
1444 // Destroy all remaining allocations.
1445 DestroyAllAllocations(allocations);
1446}
1447
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001448static void TestDefragmentationGpu()
1449{
1450 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001451 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001452
1453 std::vector<AllocInfo> allocations;
1454
1455 // Create that many allocations to surely fill 3 new blocks of 256 MB.
1456 const VkDeviceSize bufSize = 10ull * 1024 * 1024;
1457 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
1458 const size_t bufCount = (size_t)(totalSize / bufSize);
1459 const size_t percentToLeave = 20;
1460 RandomNumberGenerator rand = { 234522 };
1461
1462 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1463 bufCreateInfo.size = bufSize;
Adam Sawicki440307e2018-10-18 15:05:19 +02001464 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1465 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1466 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001467
1468 VmaAllocationCreateInfo allocCreateInfo = {};
1469 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1470 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1471 allocCreateInfo.pUserData = "TestDefragmentationGpu";
1472
1473 // Create all intended buffers.
1474 for(size_t i = 0; i < bufCount; ++i)
1475 {
1476 AllocInfo alloc;
1477 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1478 alloc.m_StartValue = rand.Generate();
1479 allocations.push_back(alloc);
1480 }
1481
1482 // Destroy some percentage of them.
1483 {
1484 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1485 for(size_t i = 0; i < buffersToDestroy; ++i)
1486 {
1487 const size_t index = rand.Generate() % allocations.size();
1488 allocations[index].Destroy();
1489 allocations.erase(allocations.begin() + index);
1490 }
1491 }
1492
1493 // Fill them with meaningful data.
1494 UploadGpuData(allocations.data(), allocations.size());
1495
1496 SaveAllocatorStatsToFile(L"GPU_defragmentation_A_before.json");
1497
1498 // Defragment using GPU only.
1499 {
1500 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001501
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001502 std::vector<VmaAllocation> allocationPtrs(allocCount);
1503 std::vector<VkBool32> allocationChanged(allocCount);
1504 for(size_t i = 0; i < allocCount; ++i)
1505 {
1506 allocationPtrs[i] = allocations[i].m_Allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001507 }
Adam Sawicki440307e2018-10-18 15:05:19 +02001508 memset(allocationChanged.data(), 0, allocCount * sizeof(VkBool32));
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001509
1510 BeginSingleTimeCommands();
1511
1512 VmaDefragmentationInfo2 defragInfo = {};
1513 defragInfo.allocationCount = (uint32_t)allocCount;
1514 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001515 defragInfo.pAllocationsChanged = allocationChanged.data();
1516 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001517 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1518 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1519
1520 VmaDefragmentationStats stats = {};
1521 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1522 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1523 TEST(res >= VK_SUCCESS);
1524
1525 EndSingleTimeCommands();
1526
1527 vmaDefragmentationEnd(g_hAllocator, ctx);
1528
1529 for(size_t i = 0; i < allocCount; ++i)
1530 {
1531 if(allocationChanged[i])
1532 {
1533 RecreateAllocationResource(allocations[i]);
1534 }
1535 }
1536
Adam Sawicki440307e2018-10-18 15:05:19 +02001537 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1538 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
1539 TEST(stats.allocationsLost == 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001540 }
1541
1542 ValidateGpuData(allocations.data(), allocations.size());
1543
1544 SaveAllocatorStatsToFile(L"GPU_defragmentation_B_after.json");
1545
1546 // Destroy all remaining buffers.
1547 for(size_t i = allocations.size(); i--; )
1548 {
1549 allocations[i].Destroy();
1550 }
Adam Sawicki05704002018-11-08 16:07:29 +01001551
1552 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001553}
1554
Adam Sawickib8333fb2018-03-13 16:15:53 +01001555static void TestUserData()
1556{
1557 VkResult res;
1558
1559 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1560 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1561 bufCreateInfo.size = 0x10000;
1562
1563 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1564 {
1565 // Opaque pointer
1566 {
1567
1568 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1569 void* pointerToSomething = &res;
1570
1571 VmaAllocationCreateInfo allocCreateInfo = {};
1572 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1573 allocCreateInfo.pUserData = numberAsPointer;
1574 if(testIndex == 1)
1575 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1576
1577 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1578 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001579 TEST(res == VK_SUCCESS);
1580 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001581
1582 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001583 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001584
1585 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1586 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001587 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001588
1589 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1590 }
1591
1592 // String
1593 {
1594 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1595 const char* name2 = "2";
1596 const size_t name1Len = strlen(name1);
1597
1598 char* name1Buf = new char[name1Len + 1];
1599 strcpy_s(name1Buf, name1Len + 1, name1);
1600
1601 VmaAllocationCreateInfo allocCreateInfo = {};
1602 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1603 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1604 allocCreateInfo.pUserData = name1Buf;
1605 if(testIndex == 1)
1606 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1607
1608 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1609 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001610 TEST(res == VK_SUCCESS);
1611 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1612 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001613
1614 delete[] name1Buf;
1615
1616 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001617 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001618
1619 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1620 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001621 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001622
1623 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1624 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001625 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001626
1627 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1628 }
1629 }
1630}
1631
Adam Sawicki370ab182018-11-08 16:31:00 +01001632static void TestInvalidAllocations()
1633{
1634 VkResult res;
1635
1636 VmaAllocationCreateInfo allocCreateInfo = {};
1637 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1638
1639 // Try to allocate 0 bytes.
1640 {
1641 VkMemoryRequirements memReq = {};
1642 memReq.size = 0; // !!!
1643 memReq.alignment = 4;
1644 memReq.memoryTypeBits = UINT32_MAX;
1645 VmaAllocation alloc = VK_NULL_HANDLE;
1646 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1647 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1648 }
1649
1650 // Try to create buffer with size = 0.
1651 {
1652 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1653 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1654 bufCreateInfo.size = 0; // !!!
1655 VkBuffer buf = VK_NULL_HANDLE;
1656 VmaAllocation alloc = VK_NULL_HANDLE;
1657 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1658 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1659 }
1660
1661 // Try to create image with one dimension = 0.
1662 {
1663 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1664 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1665 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1666 imageCreateInfo.extent.width = 128;
1667 imageCreateInfo.extent.height = 0; // !!!
1668 imageCreateInfo.extent.depth = 1;
1669 imageCreateInfo.mipLevels = 1;
1670 imageCreateInfo.arrayLayers = 1;
1671 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1672 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1673 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1674 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1675 VkImage image = VK_NULL_HANDLE;
1676 VmaAllocation alloc = VK_NULL_HANDLE;
1677 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1678 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1679 }
1680}
1681
Adam Sawickib8333fb2018-03-13 16:15:53 +01001682static void TestMemoryRequirements()
1683{
1684 VkResult res;
1685 VkBuffer buf;
1686 VmaAllocation alloc;
1687 VmaAllocationInfo allocInfo;
1688
1689 const VkPhysicalDeviceMemoryProperties* memProps;
1690 vmaGetMemoryProperties(g_hAllocator, &memProps);
1691
1692 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1693 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1694 bufInfo.size = 128;
1695
1696 VmaAllocationCreateInfo allocCreateInfo = {};
1697
1698 // No requirements.
1699 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001700 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001701 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1702
1703 // Usage.
1704 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1705 allocCreateInfo.requiredFlags = 0;
1706 allocCreateInfo.preferredFlags = 0;
1707 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1708
1709 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001710 TEST(res == VK_SUCCESS);
1711 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001712 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1713
1714 // Required flags, preferred flags.
1715 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1716 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1717 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1718 allocCreateInfo.memoryTypeBits = 0;
1719
1720 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001721 TEST(res == VK_SUCCESS);
1722 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1723 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001724 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1725
1726 // memoryTypeBits.
1727 const uint32_t memType = allocInfo.memoryType;
1728 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1729 allocCreateInfo.requiredFlags = 0;
1730 allocCreateInfo.preferredFlags = 0;
1731 allocCreateInfo.memoryTypeBits = 1u << memType;
1732
1733 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001734 TEST(res == VK_SUCCESS);
1735 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001736 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1737
1738}
1739
1740static void TestBasics()
1741{
1742 VkResult res;
1743
1744 TestMemoryRequirements();
1745
1746 // Lost allocation
1747 {
1748 VmaAllocation alloc = VK_NULL_HANDLE;
1749 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001750 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001751
1752 VmaAllocationInfo allocInfo;
1753 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001754 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1755 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001756
1757 vmaFreeMemory(g_hAllocator, alloc);
1758 }
1759
1760 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1761 {
1762 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1763 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1764 bufCreateInfo.size = 128;
1765
1766 VmaAllocationCreateInfo allocCreateInfo = {};
1767 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1768 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1769
1770 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1771 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001772 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001773
1774 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1775
1776 // Same with OWN_MEMORY.
1777 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1778
1779 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001780 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001781
1782 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1783 }
1784
1785 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001786
1787 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001788}
1789
1790void TestHeapSizeLimit()
1791{
1792 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1793 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1794
1795 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1796 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1797 {
1798 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1799 }
1800
1801 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1802 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1803 allocatorCreateInfo.device = g_hDevice;
1804 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1805
1806 VmaAllocator hAllocator;
1807 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001808 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001809
1810 struct Item
1811 {
1812 VkBuffer hBuf;
1813 VmaAllocation hAlloc;
1814 };
1815 std::vector<Item> items;
1816
1817 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1818 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1819
1820 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1821 VmaAllocationInfo ownAllocInfo;
1822 {
1823 VmaAllocationCreateInfo allocCreateInfo = {};
1824 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1825 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1826
1827 bufCreateInfo.size = BLOCK_SIZE / 2;
1828
1829 for(size_t i = 0; i < 2; ++i)
1830 {
1831 Item item;
1832 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001833 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001834 items.push_back(item);
1835 }
1836 }
1837
1838 // Create pool to make sure allocations must be out of this memory type.
1839 VmaPoolCreateInfo poolCreateInfo = {};
1840 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1841 poolCreateInfo.blockSize = BLOCK_SIZE;
1842
1843 VmaPool hPool;
1844 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001845 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001846
1847 // 2. Allocate normal buffers from all the remaining memory.
1848 {
1849 VmaAllocationCreateInfo allocCreateInfo = {};
1850 allocCreateInfo.pool = hPool;
1851
1852 bufCreateInfo.size = BLOCK_SIZE / 2;
1853
1854 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1855 for(size_t i = 0; i < bufCount; ++i)
1856 {
1857 Item item;
1858 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001859 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001860 items.push_back(item);
1861 }
1862 }
1863
1864 // 3. Allocation of one more (even small) buffer should fail.
1865 {
1866 VmaAllocationCreateInfo allocCreateInfo = {};
1867 allocCreateInfo.pool = hPool;
1868
1869 bufCreateInfo.size = 128;
1870
1871 VkBuffer hBuf;
1872 VmaAllocation hAlloc;
1873 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001874 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001875 }
1876
1877 // Destroy everything.
1878 for(size_t i = items.size(); i--; )
1879 {
1880 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
1881 }
1882
1883 vmaDestroyPool(hAllocator, hPool);
1884
1885 vmaDestroyAllocator(hAllocator);
1886}
1887
Adam Sawicki212a4a62018-06-14 15:44:45 +02001888#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02001889static void TestDebugMargin()
1890{
1891 if(VMA_DEBUG_MARGIN == 0)
1892 {
1893 return;
1894 }
1895
1896 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02001897 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02001898
1899 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02001900 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02001901
1902 // Create few buffers of different size.
1903 const size_t BUF_COUNT = 10;
1904 BufferInfo buffers[BUF_COUNT];
1905 VmaAllocationInfo allocInfo[BUF_COUNT];
1906 for(size_t i = 0; i < 10; ++i)
1907 {
1908 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02001909 // Last one will be mapped.
1910 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02001911
1912 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001913 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02001914 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001915 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001916
1917 if(i == BUF_COUNT - 1)
1918 {
1919 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001920 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001921 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
1922 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
1923 }
Adam Sawicki73b16652018-06-11 16:39:25 +02001924 }
1925
1926 // Check if their offsets preserve margin between them.
1927 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
1928 {
1929 if(lhs.deviceMemory != rhs.deviceMemory)
1930 {
1931 return lhs.deviceMemory < rhs.deviceMemory;
1932 }
1933 return lhs.offset < rhs.offset;
1934 });
1935 for(size_t i = 1; i < BUF_COUNT; ++i)
1936 {
1937 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
1938 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001939 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02001940 }
1941 }
1942
Adam Sawicki212a4a62018-06-14 15:44:45 +02001943 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001944 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001945
Adam Sawicki73b16652018-06-11 16:39:25 +02001946 // Destroy all buffers.
1947 for(size_t i = BUF_COUNT; i--; )
1948 {
1949 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
1950 }
1951}
Adam Sawicki212a4a62018-06-14 15:44:45 +02001952#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02001953
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001954static void TestLinearAllocator()
1955{
1956 wprintf(L"Test linear allocator\n");
1957
1958 RandomNumberGenerator rand{645332};
1959
1960 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1961 sampleBufCreateInfo.size = 1024; // Whatever.
1962 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1963
1964 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
1965 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1966
1967 VmaPoolCreateInfo poolCreateInfo = {};
1968 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001969 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001970
Adam Sawickiee082772018-06-20 17:45:49 +02001971 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001972 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
1973 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
1974
1975 VmaPool pool = nullptr;
1976 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001977 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001978
1979 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
1980
1981 VmaAllocationCreateInfo allocCreateInfo = {};
1982 allocCreateInfo.pool = pool;
1983
1984 constexpr size_t maxBufCount = 100;
1985 std::vector<BufferInfo> bufInfo;
1986
1987 constexpr VkDeviceSize bufSizeMin = 16;
1988 constexpr VkDeviceSize bufSizeMax = 1024;
1989 VmaAllocationInfo allocInfo;
1990 VkDeviceSize prevOffset = 0;
1991
1992 // Test one-time free.
1993 for(size_t i = 0; i < 2; ++i)
1994 {
1995 // Allocate number of buffers of varying size that surely fit into this block.
1996 VkDeviceSize bufSumSize = 0;
1997 for(size_t i = 0; i < maxBufCount; ++i)
1998 {
1999 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2000 BufferInfo newBufInfo;
2001 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2002 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002003 TEST(res == VK_SUCCESS);
2004 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002005 bufInfo.push_back(newBufInfo);
2006 prevOffset = allocInfo.offset;
2007 bufSumSize += bufCreateInfo.size;
2008 }
2009
2010 // Validate pool stats.
2011 VmaPoolStats stats;
2012 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002013 TEST(stats.size == poolCreateInfo.blockSize);
2014 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2015 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002016
2017 // Destroy the buffers in random order.
2018 while(!bufInfo.empty())
2019 {
2020 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2021 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2022 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2023 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2024 }
2025 }
2026
2027 // Test stack.
2028 {
2029 // Allocate number of buffers of varying size that surely fit into this block.
2030 for(size_t i = 0; i < maxBufCount; ++i)
2031 {
2032 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2033 BufferInfo newBufInfo;
2034 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2035 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002036 TEST(res == VK_SUCCESS);
2037 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002038 bufInfo.push_back(newBufInfo);
2039 prevOffset = allocInfo.offset;
2040 }
2041
2042 // Destroy few buffers from top of the stack.
2043 for(size_t i = 0; i < maxBufCount / 5; ++i)
2044 {
2045 const BufferInfo& currBufInfo = bufInfo.back();
2046 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2047 bufInfo.pop_back();
2048 }
2049
2050 // Create some more
2051 for(size_t i = 0; i < maxBufCount / 5; ++i)
2052 {
2053 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2054 BufferInfo newBufInfo;
2055 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2056 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002057 TEST(res == VK_SUCCESS);
2058 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002059 bufInfo.push_back(newBufInfo);
2060 prevOffset = allocInfo.offset;
2061 }
2062
2063 // Destroy the buffers in reverse order.
2064 while(!bufInfo.empty())
2065 {
2066 const BufferInfo& currBufInfo = bufInfo.back();
2067 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2068 bufInfo.pop_back();
2069 }
2070 }
2071
Adam Sawickiee082772018-06-20 17:45:49 +02002072 // Test ring buffer.
2073 {
2074 // Allocate number of buffers that surely fit into this block.
2075 bufCreateInfo.size = bufSizeMax;
2076 for(size_t i = 0; i < maxBufCount; ++i)
2077 {
2078 BufferInfo newBufInfo;
2079 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2080 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002081 TEST(res == VK_SUCCESS);
2082 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002083 bufInfo.push_back(newBufInfo);
2084 prevOffset = allocInfo.offset;
2085 }
2086
2087 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2088 const size_t buffersPerIter = maxBufCount / 10 - 1;
2089 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2090 for(size_t iter = 0; iter < iterCount; ++iter)
2091 {
2092 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2093 {
2094 const BufferInfo& currBufInfo = bufInfo.front();
2095 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2096 bufInfo.erase(bufInfo.begin());
2097 }
2098 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2099 {
2100 BufferInfo newBufInfo;
2101 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2102 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002103 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002104 bufInfo.push_back(newBufInfo);
2105 }
2106 }
2107
2108 // Allocate buffers until we reach out-of-memory.
2109 uint32_t debugIndex = 0;
2110 while(res == VK_SUCCESS)
2111 {
2112 BufferInfo newBufInfo;
2113 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2114 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2115 if(res == VK_SUCCESS)
2116 {
2117 bufInfo.push_back(newBufInfo);
2118 }
2119 else
2120 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002121 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002122 }
2123 ++debugIndex;
2124 }
2125
2126 // Destroy the buffers in random order.
2127 while(!bufInfo.empty())
2128 {
2129 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2130 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2131 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2132 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2133 }
2134 }
2135
Adam Sawicki680b2252018-08-22 14:47:32 +02002136 // Test double stack.
2137 {
2138 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2139 VkDeviceSize prevOffsetLower = 0;
2140 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2141 for(size_t i = 0; i < maxBufCount; ++i)
2142 {
2143 const bool upperAddress = (i % 2) != 0;
2144 if(upperAddress)
2145 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2146 else
2147 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2148 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2149 BufferInfo newBufInfo;
2150 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2151 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002152 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002153 if(upperAddress)
2154 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002155 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002156 prevOffsetUpper = allocInfo.offset;
2157 }
2158 else
2159 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002160 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002161 prevOffsetLower = allocInfo.offset;
2162 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002163 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002164 bufInfo.push_back(newBufInfo);
2165 }
2166
2167 // Destroy few buffers from top of the stack.
2168 for(size_t i = 0; i < maxBufCount / 5; ++i)
2169 {
2170 const BufferInfo& currBufInfo = bufInfo.back();
2171 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2172 bufInfo.pop_back();
2173 }
2174
2175 // Create some more
2176 for(size_t i = 0; i < maxBufCount / 5; ++i)
2177 {
2178 const bool upperAddress = (i % 2) != 0;
2179 if(upperAddress)
2180 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2181 else
2182 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2183 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2184 BufferInfo newBufInfo;
2185 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2186 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002187 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002188 bufInfo.push_back(newBufInfo);
2189 }
2190
2191 // Destroy the buffers in reverse order.
2192 while(!bufInfo.empty())
2193 {
2194 const BufferInfo& currBufInfo = bufInfo.back();
2195 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2196 bufInfo.pop_back();
2197 }
2198
2199 // Create buffers on both sides until we reach out of memory.
2200 prevOffsetLower = 0;
2201 prevOffsetUpper = poolCreateInfo.blockSize;
2202 res = VK_SUCCESS;
2203 for(size_t i = 0; res == VK_SUCCESS; ++i)
2204 {
2205 const bool upperAddress = (i % 2) != 0;
2206 if(upperAddress)
2207 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2208 else
2209 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2210 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2211 BufferInfo newBufInfo;
2212 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2213 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2214 if(res == VK_SUCCESS)
2215 {
2216 if(upperAddress)
2217 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002218 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002219 prevOffsetUpper = allocInfo.offset;
2220 }
2221 else
2222 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002223 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002224 prevOffsetLower = allocInfo.offset;
2225 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002226 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002227 bufInfo.push_back(newBufInfo);
2228 }
2229 }
2230
2231 // Destroy the buffers in random order.
2232 while(!bufInfo.empty())
2233 {
2234 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2235 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2236 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2237 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2238 }
2239
2240 // Create buffers on upper side only, constant size, until we reach out of memory.
2241 prevOffsetUpper = poolCreateInfo.blockSize;
2242 res = VK_SUCCESS;
2243 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2244 bufCreateInfo.size = bufSizeMax;
2245 for(size_t i = 0; res == VK_SUCCESS; ++i)
2246 {
2247 BufferInfo newBufInfo;
2248 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2249 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2250 if(res == VK_SUCCESS)
2251 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002252 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002253 prevOffsetUpper = allocInfo.offset;
2254 bufInfo.push_back(newBufInfo);
2255 }
2256 }
2257
2258 // Destroy the buffers in reverse order.
2259 while(!bufInfo.empty())
2260 {
2261 const BufferInfo& currBufInfo = bufInfo.back();
2262 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2263 bufInfo.pop_back();
2264 }
2265 }
2266
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002267 // Test ring buffer with lost allocations.
2268 {
2269 // Allocate number of buffers until pool is full.
2270 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2271 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2272 res = VK_SUCCESS;
2273 for(size_t i = 0; res == VK_SUCCESS; ++i)
2274 {
2275 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2276
2277 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2278
2279 BufferInfo newBufInfo;
2280 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2281 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2282 if(res == VK_SUCCESS)
2283 bufInfo.push_back(newBufInfo);
2284 }
2285
2286 // Free first half of it.
2287 {
2288 const size_t buffersToDelete = bufInfo.size() / 2;
2289 for(size_t i = 0; i < buffersToDelete; ++i)
2290 {
2291 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2292 }
2293 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2294 }
2295
2296 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002297 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002298 res = VK_SUCCESS;
2299 for(size_t i = 0; res == VK_SUCCESS; ++i)
2300 {
2301 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2302
2303 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2304
2305 BufferInfo newBufInfo;
2306 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2307 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2308 if(res == VK_SUCCESS)
2309 bufInfo.push_back(newBufInfo);
2310 }
2311
2312 VkDeviceSize firstNewOffset;
2313 {
2314 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2315
2316 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2317 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2318 bufCreateInfo.size = bufSizeMax;
2319
2320 BufferInfo newBufInfo;
2321 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2322 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002323 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002324 bufInfo.push_back(newBufInfo);
2325 firstNewOffset = allocInfo.offset;
2326
2327 // Make sure at least one buffer from the beginning became lost.
2328 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002329 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002330 }
2331
2332 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2333 size_t newCount = 1;
2334 for(;;)
2335 {
2336 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2337
2338 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2339
2340 BufferInfo newBufInfo;
2341 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2342 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002343 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002344 bufInfo.push_back(newBufInfo);
2345 ++newCount;
2346 if(allocInfo.offset < firstNewOffset)
2347 break;
2348 }
2349
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002350 // Delete buffers that are lost.
2351 for(size_t i = bufInfo.size(); i--; )
2352 {
2353 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2354 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2355 {
2356 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2357 bufInfo.erase(bufInfo.begin() + i);
2358 }
2359 }
2360
2361 // Test vmaMakePoolAllocationsLost
2362 {
2363 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2364
2365 size_t lostAllocCount = SIZE_MAX;
2366 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002367 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002368
2369 size_t realLostAllocCount = 0;
2370 for(size_t i = 0; i < bufInfo.size(); ++i)
2371 {
2372 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2373 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2374 ++realLostAllocCount;
2375 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002376 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002377 }
2378
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002379 // Destroy all the buffers in forward order.
2380 for(size_t i = 0; i < bufInfo.size(); ++i)
2381 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2382 bufInfo.clear();
2383 }
2384
Adam Sawicki70a683e2018-08-24 15:36:32 +02002385 vmaDestroyPool(g_hAllocator, pool);
2386}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002387
Adam Sawicki70a683e2018-08-24 15:36:32 +02002388static void TestLinearAllocatorMultiBlock()
2389{
2390 wprintf(L"Test linear allocator multi block\n");
2391
2392 RandomNumberGenerator rand{345673};
2393
2394 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2395 sampleBufCreateInfo.size = 1024 * 1024;
2396 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2397
2398 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2399 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2400
2401 VmaPoolCreateInfo poolCreateInfo = {};
2402 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2403 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002404 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002405
2406 VmaPool pool = nullptr;
2407 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002408 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002409
2410 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2411
2412 VmaAllocationCreateInfo allocCreateInfo = {};
2413 allocCreateInfo.pool = pool;
2414
2415 std::vector<BufferInfo> bufInfo;
2416 VmaAllocationInfo allocInfo;
2417
2418 // Test one-time free.
2419 {
2420 // Allocate buffers until we move to a second block.
2421 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2422 for(uint32_t i = 0; ; ++i)
2423 {
2424 BufferInfo newBufInfo;
2425 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2426 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002427 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002428 bufInfo.push_back(newBufInfo);
2429 if(lastMem && allocInfo.deviceMemory != lastMem)
2430 {
2431 break;
2432 }
2433 lastMem = allocInfo.deviceMemory;
2434 }
2435
Adam Sawickib8d34d52018-10-03 17:41:20 +02002436 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002437
2438 // Make sure that pool has now two blocks.
2439 VmaPoolStats poolStats = {};
2440 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002441 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002442
2443 // Destroy all the buffers in random order.
2444 while(!bufInfo.empty())
2445 {
2446 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2447 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2448 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2449 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2450 }
2451
2452 // Make sure that pool has now at most one block.
2453 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002454 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002455 }
2456
2457 // Test stack.
2458 {
2459 // Allocate buffers until we move to a second block.
2460 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2461 for(uint32_t i = 0; ; ++i)
2462 {
2463 BufferInfo newBufInfo;
2464 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2465 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002466 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002467 bufInfo.push_back(newBufInfo);
2468 if(lastMem && allocInfo.deviceMemory != lastMem)
2469 {
2470 break;
2471 }
2472 lastMem = allocInfo.deviceMemory;
2473 }
2474
Adam Sawickib8d34d52018-10-03 17:41:20 +02002475 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002476
2477 // Add few more buffers.
2478 for(uint32_t i = 0; i < 5; ++i)
2479 {
2480 BufferInfo newBufInfo;
2481 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2482 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002483 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002484 bufInfo.push_back(newBufInfo);
2485 }
2486
2487 // Make sure that pool has now two blocks.
2488 VmaPoolStats poolStats = {};
2489 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002490 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002491
2492 // Delete half of buffers, LIFO.
2493 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2494 {
2495 const BufferInfo& currBufInfo = bufInfo.back();
2496 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2497 bufInfo.pop_back();
2498 }
2499
2500 // Add one more buffer.
2501 BufferInfo newBufInfo;
2502 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2503 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002504 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002505 bufInfo.push_back(newBufInfo);
2506
2507 // Make sure that pool has now one block.
2508 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002509 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002510
2511 // Delete all the remaining buffers, LIFO.
2512 while(!bufInfo.empty())
2513 {
2514 const BufferInfo& currBufInfo = bufInfo.back();
2515 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2516 bufInfo.pop_back();
2517 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002518 }
2519
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002520 vmaDestroyPool(g_hAllocator, pool);
2521}
2522
Adam Sawickifd11d752018-08-22 15:02:10 +02002523static void ManuallyTestLinearAllocator()
2524{
2525 VmaStats origStats;
2526 vmaCalculateStats(g_hAllocator, &origStats);
2527
2528 wprintf(L"Manually test linear allocator\n");
2529
2530 RandomNumberGenerator rand{645332};
2531
2532 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2533 sampleBufCreateInfo.size = 1024; // Whatever.
2534 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2535
2536 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2537 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2538
2539 VmaPoolCreateInfo poolCreateInfo = {};
2540 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002541 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002542
2543 poolCreateInfo.blockSize = 10 * 1024;
2544 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2545 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2546
2547 VmaPool pool = nullptr;
2548 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002549 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002550
2551 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2552
2553 VmaAllocationCreateInfo allocCreateInfo = {};
2554 allocCreateInfo.pool = pool;
2555
2556 std::vector<BufferInfo> bufInfo;
2557 VmaAllocationInfo allocInfo;
2558 BufferInfo newBufInfo;
2559
2560 // Test double stack.
2561 {
2562 /*
2563 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2564 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2565
2566 Totally:
2567 1 block allocated
2568 10240 Vulkan bytes
2569 6 new allocations
2570 2256 bytes in allocations
2571 */
2572
2573 bufCreateInfo.size = 32;
2574 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2575 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002576 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002577 bufInfo.push_back(newBufInfo);
2578
2579 bufCreateInfo.size = 1024;
2580 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2581 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002582 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002583 bufInfo.push_back(newBufInfo);
2584
2585 bufCreateInfo.size = 32;
2586 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2587 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002588 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002589 bufInfo.push_back(newBufInfo);
2590
2591 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2592
2593 bufCreateInfo.size = 128;
2594 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2595 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002596 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002597 bufInfo.push_back(newBufInfo);
2598
2599 bufCreateInfo.size = 1024;
2600 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2601 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002602 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002603 bufInfo.push_back(newBufInfo);
2604
2605 bufCreateInfo.size = 16;
2606 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2607 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002608 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002609 bufInfo.push_back(newBufInfo);
2610
2611 VmaStats currStats;
2612 vmaCalculateStats(g_hAllocator, &currStats);
2613 VmaPoolStats poolStats;
2614 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2615
2616 char* statsStr = nullptr;
2617 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2618
2619 // PUT BREAKPOINT HERE TO CHECK.
2620 // Inspect: currStats versus origStats, poolStats, statsStr.
2621 int I = 0;
2622
2623 vmaFreeStatsString(g_hAllocator, statsStr);
2624
2625 // Destroy the buffers in reverse order.
2626 while(!bufInfo.empty())
2627 {
2628 const BufferInfo& currBufInfo = bufInfo.back();
2629 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2630 bufInfo.pop_back();
2631 }
2632 }
2633
2634 vmaDestroyPool(g_hAllocator, pool);
2635}
2636
Adam Sawicki80927152018-09-07 17:27:23 +02002637static void BenchmarkAlgorithmsCase(FILE* file,
2638 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002639 bool empty,
2640 VmaAllocationCreateFlags allocStrategy,
2641 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002642{
2643 RandomNumberGenerator rand{16223};
2644
2645 const VkDeviceSize bufSizeMin = 32;
2646 const VkDeviceSize bufSizeMax = 1024;
2647 const size_t maxBufCapacity = 10000;
2648 const uint32_t iterationCount = 10;
2649
2650 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2651 sampleBufCreateInfo.size = bufSizeMax;
2652 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2653
2654 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2655 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2656
2657 VmaPoolCreateInfo poolCreateInfo = {};
2658 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002659 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002660
2661 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002662 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002663 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2664
2665 VmaPool pool = nullptr;
2666 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002667 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002668
2669 // Buffer created just to get memory requirements. Never bound to any memory.
2670 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2671 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002672 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002673
2674 VkMemoryRequirements memReq = {};
2675 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2676
2677 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2678
2679 VmaAllocationCreateInfo allocCreateInfo = {};
2680 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002681 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002682
2683 VmaAllocation alloc;
2684 std::vector<VmaAllocation> baseAllocations;
2685
2686 if(!empty)
2687 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002688 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002689 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002690 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002691 {
2692 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2693 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002694 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002695 baseAllocations.push_back(alloc);
2696 totalSize += memReq.size;
2697 }
2698
2699 // Delete half of them, choose randomly.
2700 size_t allocsToDelete = baseAllocations.size() / 2;
2701 for(size_t i = 0; i < allocsToDelete; ++i)
2702 {
2703 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2704 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2705 baseAllocations.erase(baseAllocations.begin() + index);
2706 }
2707 }
2708
2709 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002710 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002711 std::vector<VmaAllocation> testAllocations;
2712 testAllocations.reserve(allocCount);
2713 duration allocTotalDuration = duration::zero();
2714 duration freeTotalDuration = duration::zero();
2715 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2716 {
2717 // Allocations
2718 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2719 for(size_t i = 0; i < allocCount; ++i)
2720 {
2721 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2722 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002723 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002724 testAllocations.push_back(alloc);
2725 }
2726 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2727
2728 // Deallocations
2729 switch(freeOrder)
2730 {
2731 case FREE_ORDER::FORWARD:
2732 // Leave testAllocations unchanged.
2733 break;
2734 case FREE_ORDER::BACKWARD:
2735 std::reverse(testAllocations.begin(), testAllocations.end());
2736 break;
2737 case FREE_ORDER::RANDOM:
2738 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2739 break;
2740 default: assert(0);
2741 }
2742
2743 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2744 for(size_t i = 0; i < allocCount; ++i)
2745 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2746 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2747
2748 testAllocations.clear();
2749 }
2750
2751 // Delete baseAllocations
2752 while(!baseAllocations.empty())
2753 {
2754 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2755 baseAllocations.pop_back();
2756 }
2757
2758 vmaDestroyPool(g_hAllocator, pool);
2759
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002760 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2761 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2762
Adam Sawicki80927152018-09-07 17:27:23 +02002763 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2764 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002765 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002766 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002767 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002768 allocTotalSeconds,
2769 freeTotalSeconds);
2770
2771 if(file)
2772 {
2773 std::string currTime;
2774 CurrentTimeToStr(currTime);
2775
Adam Sawicki80927152018-09-07 17:27:23 +02002776 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002777 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002778 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002779 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002780 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002781 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2782 allocTotalSeconds,
2783 freeTotalSeconds);
2784 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002785}
2786
Adam Sawicki80927152018-09-07 17:27:23 +02002787static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002788{
Adam Sawicki80927152018-09-07 17:27:23 +02002789 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002790
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002791 if(file)
2792 {
2793 fprintf(file,
2794 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002795 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002796 "Allocation time (s),Deallocation time (s)\n");
2797 }
2798
Adam Sawicki0a607132018-08-24 11:18:41 +02002799 uint32_t freeOrderCount = 1;
2800 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2801 freeOrderCount = 3;
2802 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2803 freeOrderCount = 2;
2804
2805 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002806 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002807
2808 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2809 {
2810 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2811 switch(freeOrderIndex)
2812 {
2813 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2814 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2815 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2816 default: assert(0);
2817 }
2818
2819 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2820 {
Adam Sawicki80927152018-09-07 17:27:23 +02002821 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002822 {
Adam Sawicki80927152018-09-07 17:27:23 +02002823 uint32_t algorithm = 0;
2824 switch(algorithmIndex)
2825 {
2826 case 0:
2827 break;
2828 case 1:
2829 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2830 break;
2831 case 2:
2832 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2833 break;
2834 default:
2835 assert(0);
2836 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002837
Adam Sawicki80927152018-09-07 17:27:23 +02002838 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002839 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2840 {
2841 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002842 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002843 {
2844 switch(allocStrategyIndex)
2845 {
2846 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2847 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2848 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2849 default: assert(0);
2850 }
2851 }
2852
Adam Sawicki80927152018-09-07 17:27:23 +02002853 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002854 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002855 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002856 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002857 strategy,
2858 freeOrder); // freeOrder
2859 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002860 }
2861 }
2862 }
2863}
2864
Adam Sawickib8333fb2018-03-13 16:15:53 +01002865static void TestPool_SameSize()
2866{
2867 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2868 const size_t BUF_COUNT = 100;
2869 VkResult res;
2870
2871 RandomNumberGenerator rand{123};
2872
2873 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2874 bufferInfo.size = BUF_SIZE;
2875 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2876
2877 uint32_t memoryTypeBits = UINT32_MAX;
2878 {
2879 VkBuffer dummyBuffer;
2880 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002881 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002882
2883 VkMemoryRequirements memReq;
2884 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2885 memoryTypeBits = memReq.memoryTypeBits;
2886
2887 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2888 }
2889
2890 VmaAllocationCreateInfo poolAllocInfo = {};
2891 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2892 uint32_t memTypeIndex;
2893 res = vmaFindMemoryTypeIndex(
2894 g_hAllocator,
2895 memoryTypeBits,
2896 &poolAllocInfo,
2897 &memTypeIndex);
2898
2899 VmaPoolCreateInfo poolCreateInfo = {};
2900 poolCreateInfo.memoryTypeIndex = memTypeIndex;
2901 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
2902 poolCreateInfo.minBlockCount = 1;
2903 poolCreateInfo.maxBlockCount = 4;
2904 poolCreateInfo.frameInUseCount = 0;
2905
2906 VmaPool pool;
2907 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002908 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002909
2910 vmaSetCurrentFrameIndex(g_hAllocator, 1);
2911
2912 VmaAllocationCreateInfo allocInfo = {};
2913 allocInfo.pool = pool;
2914 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
2915 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2916
2917 struct BufItem
2918 {
2919 VkBuffer Buf;
2920 VmaAllocation Alloc;
2921 };
2922 std::vector<BufItem> items;
2923
2924 // Fill entire pool.
2925 for(size_t i = 0; i < BUF_COUNT; ++i)
2926 {
2927 BufItem item;
2928 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002929 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002930 items.push_back(item);
2931 }
2932
2933 // Make sure that another allocation would fail.
2934 {
2935 BufItem item;
2936 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002937 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002938 }
2939
2940 // Validate that no buffer is lost. Also check that they are not mapped.
2941 for(size_t i = 0; i < items.size(); ++i)
2942 {
2943 VmaAllocationInfo allocInfo;
2944 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002945 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
2946 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002947 }
2948
2949 // Free some percent of random items.
2950 {
2951 const size_t PERCENT_TO_FREE = 10;
2952 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
2953 for(size_t i = 0; i < itemsToFree; ++i)
2954 {
2955 size_t index = (size_t)rand.Generate() % items.size();
2956 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2957 items.erase(items.begin() + index);
2958 }
2959 }
2960
2961 // Randomly allocate and free items.
2962 {
2963 const size_t OPERATION_COUNT = BUF_COUNT;
2964 for(size_t i = 0; i < OPERATION_COUNT; ++i)
2965 {
2966 bool allocate = rand.Generate() % 2 != 0;
2967 if(allocate)
2968 {
2969 if(items.size() < BUF_COUNT)
2970 {
2971 BufItem item;
2972 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002973 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002974 items.push_back(item);
2975 }
2976 }
2977 else // Free
2978 {
2979 if(!items.empty())
2980 {
2981 size_t index = (size_t)rand.Generate() % items.size();
2982 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2983 items.erase(items.begin() + index);
2984 }
2985 }
2986 }
2987 }
2988
2989 // Allocate up to maximum.
2990 while(items.size() < BUF_COUNT)
2991 {
2992 BufItem item;
2993 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002994 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002995 items.push_back(item);
2996 }
2997
2998 // Validate that no buffer is lost.
2999 for(size_t i = 0; i < items.size(); ++i)
3000 {
3001 VmaAllocationInfo allocInfo;
3002 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003003 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003004 }
3005
3006 // Next frame.
3007 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3008
3009 // Allocate another BUF_COUNT buffers.
3010 for(size_t i = 0; i < BUF_COUNT; ++i)
3011 {
3012 BufItem item;
3013 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003014 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003015 items.push_back(item);
3016 }
3017
3018 // Make sure the first BUF_COUNT is lost. Delete them.
3019 for(size_t i = 0; i < BUF_COUNT; ++i)
3020 {
3021 VmaAllocationInfo allocInfo;
3022 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003023 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003024 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3025 }
3026 items.erase(items.begin(), items.begin() + BUF_COUNT);
3027
3028 // Validate that no buffer is lost.
3029 for(size_t i = 0; i < items.size(); ++i)
3030 {
3031 VmaAllocationInfo allocInfo;
3032 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003033 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003034 }
3035
3036 // Free one item.
3037 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3038 items.pop_back();
3039
3040 // Validate statistics.
3041 {
3042 VmaPoolStats poolStats = {};
3043 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003044 TEST(poolStats.allocationCount == items.size());
3045 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3046 TEST(poolStats.unusedRangeCount == 1);
3047 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3048 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003049 }
3050
3051 // Free all remaining items.
3052 for(size_t i = items.size(); i--; )
3053 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3054 items.clear();
3055
3056 // Allocate maximum items again.
3057 for(size_t i = 0; i < BUF_COUNT; ++i)
3058 {
3059 BufItem item;
3060 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003061 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003062 items.push_back(item);
3063 }
3064
3065 // Delete every other item.
3066 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3067 {
3068 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3069 items.erase(items.begin() + i);
3070 }
3071
3072 // Defragment!
3073 {
3074 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3075 for(size_t i = 0; i < items.size(); ++i)
3076 allocationsToDefragment[i] = items[i].Alloc;
3077
3078 VmaDefragmentationStats defragmentationStats;
3079 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003080 TEST(res == VK_SUCCESS);
3081 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003082 }
3083
3084 // Free all remaining items.
3085 for(size_t i = items.size(); i--; )
3086 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3087 items.clear();
3088
3089 ////////////////////////////////////////////////////////////////////////////////
3090 // Test for vmaMakePoolAllocationsLost
3091
3092 // Allocate 4 buffers on frame 10.
3093 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3094 for(size_t i = 0; i < 4; ++i)
3095 {
3096 BufItem item;
3097 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003098 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003099 items.push_back(item);
3100 }
3101
3102 // Touch first 2 of them on frame 11.
3103 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3104 for(size_t i = 0; i < 2; ++i)
3105 {
3106 VmaAllocationInfo allocInfo;
3107 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3108 }
3109
3110 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3111 size_t lostCount = 0xDEADC0DE;
3112 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003113 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003114
3115 // Make another call. Now 0 should be lost.
3116 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003117 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003118
3119 // Make another call, with null count. Should not crash.
3120 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3121
3122 // END: Free all remaining items.
3123 for(size_t i = items.size(); i--; )
3124 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3125
3126 items.clear();
3127
Adam Sawickid2924172018-06-11 12:48:46 +02003128 ////////////////////////////////////////////////////////////////////////////////
3129 // Test for allocation too large for pool
3130
3131 {
3132 VmaAllocationCreateInfo allocCreateInfo = {};
3133 allocCreateInfo.pool = pool;
3134
3135 VkMemoryRequirements memReq;
3136 memReq.memoryTypeBits = UINT32_MAX;
3137 memReq.alignment = 1;
3138 memReq.size = poolCreateInfo.blockSize + 4;
3139
3140 VmaAllocation alloc = nullptr;
3141 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003142 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003143 }
3144
Adam Sawickib8333fb2018-03-13 16:15:53 +01003145 vmaDestroyPool(g_hAllocator, pool);
3146}
3147
Adam Sawickib0c36362018-11-13 16:17:38 +01003148static void TestResize()
3149{
3150 wprintf(L"Testing vmaResizeAllocation...\n");
3151
3152 const VkDeviceSize KILOBYTE = 1024ull;
3153 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3154
3155 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3156 bufCreateInfo.size = 2 * MEGABYTE;
3157 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3158
3159 VmaAllocationCreateInfo allocCreateInfo = {};
3160 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3161
3162 uint32_t memTypeIndex = UINT32_MAX;
3163 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3164
3165 VmaPoolCreateInfo poolCreateInfo = {};
3166 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3167 poolCreateInfo.blockSize = 8 * MEGABYTE;
3168 poolCreateInfo.minBlockCount = 1;
3169 poolCreateInfo.maxBlockCount = 1;
3170 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3171
3172 VmaPool pool;
3173 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3174
3175 allocCreateInfo.pool = pool;
3176
3177 // Fill 8 MB pool with 4 * 2 MB allocations.
3178 VmaAllocation allocs[4] = {};
3179
3180 VkMemoryRequirements memReq = {};
3181 memReq.memoryTypeBits = UINT32_MAX;
3182 memReq.alignment = 4;
3183 memReq.size = bufCreateInfo.size;
3184
3185 VmaAllocationInfo allocInfo = {};
3186
3187 for(uint32_t i = 0; i < 4; ++i)
3188 {
3189 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3190 }
3191
3192 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3193
3194 // Case: Resize to the same size always succeeds.
3195 {
3196 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3197 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3198 TEST(allocInfo.size == 2ull * 1024 * 1024);
3199 }
3200
3201 // Case: Shrink allocation at the end.
3202 {
3203 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3204 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3205 TEST(allocInfo.size == 1ull * 1024 * 1024);
3206 }
3207
3208 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3209
3210 // Case: Shrink allocation before free space.
3211 {
3212 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3213 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3214 TEST(allocInfo.size == 512 * KILOBYTE);
3215 }
3216
3217 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3218
3219 // Case: Shrink allocation before next allocation.
3220 {
3221 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3222 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3223 TEST(allocInfo.size == 1 * MEGABYTE);
3224 }
3225
3226 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3227
3228 // Case: Grow allocation while there is even more space available.
3229 {
3230 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3231 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3232 TEST(allocInfo.size == 1 * MEGABYTE);
3233 }
3234
3235 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3236
3237 // Case: Grow allocation while there is exact amount of free space available.
3238 {
3239 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3240 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3241 TEST(allocInfo.size == 2 * MEGABYTE);
3242 }
3243
3244 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3245
3246 // Case: Fail to grow when there is not enough free space due to next allocation.
3247 {
3248 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3249 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3250 TEST(allocInfo.size == 2 * MEGABYTE);
3251 }
3252
3253 // Case: Fail to grow when there is not enough free space due to end of memory block.
3254 {
3255 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3256 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3257 TEST(allocInfo.size == 1 * MEGABYTE);
3258 }
3259
3260 for(uint32_t i = 4; i--; )
3261 {
3262 vmaFreeMemory(g_hAllocator, allocs[i]);
3263 }
3264
3265 vmaDestroyPool(g_hAllocator, pool);
3266
3267 // Test dedicated allocation
3268 {
3269 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3270 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3271 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3272
3273 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3274 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3275
3276 // Case: Resize to the same size always succeeds.
3277 {
3278 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3279 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3280 TEST(allocInfo.size == 2ull * 1024 * 1024);
3281 }
3282
3283 // Case: Shrinking fails.
3284 {
3285 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3286 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3287 TEST(allocInfo.size == 2ull * 1024 * 1024);
3288 }
3289
3290 // Case: Growing fails.
3291 {
3292 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3293 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3294 TEST(allocInfo.size == 2ull * 1024 * 1024);
3295 }
3296
3297 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3298 }
3299}
3300
Adam Sawickie44c6262018-06-15 14:30:39 +02003301static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3302{
3303 const uint8_t* pBytes = (const uint8_t*)pMemory;
3304 for(size_t i = 0; i < size; ++i)
3305 {
3306 if(pBytes[i] != pattern)
3307 {
3308 return false;
3309 }
3310 }
3311 return true;
3312}
3313
3314static void TestAllocationsInitialization()
3315{
3316 VkResult res;
3317
3318 const size_t BUF_SIZE = 1024;
3319
3320 // Create pool.
3321
3322 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3323 bufInfo.size = BUF_SIZE;
3324 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3325
3326 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3327 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3328
3329 VmaPoolCreateInfo poolCreateInfo = {};
3330 poolCreateInfo.blockSize = BUF_SIZE * 10;
3331 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3332 poolCreateInfo.maxBlockCount = 1;
3333 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003334 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003335
3336 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3337 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003338 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003339
3340 // Create one persistently mapped buffer to keep memory of this block mapped,
3341 // so that pointer to mapped data will remain (more or less...) valid even
3342 // after destruction of other allocations.
3343
3344 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3345 VkBuffer firstBuf;
3346 VmaAllocation firstAlloc;
3347 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003348 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003349
3350 // Test buffers.
3351
3352 for(uint32_t i = 0; i < 2; ++i)
3353 {
3354 const bool persistentlyMapped = i == 0;
3355 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3356 VkBuffer buf;
3357 VmaAllocation alloc;
3358 VmaAllocationInfo allocInfo;
3359 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003360 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003361
3362 void* pMappedData;
3363 if(!persistentlyMapped)
3364 {
3365 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003366 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003367 }
3368 else
3369 {
3370 pMappedData = allocInfo.pMappedData;
3371 }
3372
3373 // Validate initialized content
3374 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003375 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003376
3377 if(!persistentlyMapped)
3378 {
3379 vmaUnmapMemory(g_hAllocator, alloc);
3380 }
3381
3382 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3383
3384 // Validate freed content
3385 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003386 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003387 }
3388
3389 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3390 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3391}
3392
Adam Sawickib8333fb2018-03-13 16:15:53 +01003393static void TestPool_Benchmark(
3394 PoolTestResult& outResult,
3395 const PoolTestConfig& config)
3396{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003397 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003398
3399 RandomNumberGenerator mainRand{config.RandSeed};
3400
3401 uint32_t allocationSizeProbabilitySum = std::accumulate(
3402 config.AllocationSizes.begin(),
3403 config.AllocationSizes.end(),
3404 0u,
3405 [](uint32_t sum, const AllocationSize& allocSize) {
3406 return sum + allocSize.Probability;
3407 });
3408
3409 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3410 bufferInfo.size = 256; // Whatever.
3411 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3412
3413 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3414 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3415 imageInfo.extent.width = 256; // Whatever.
3416 imageInfo.extent.height = 256; // Whatever.
3417 imageInfo.extent.depth = 1;
3418 imageInfo.mipLevels = 1;
3419 imageInfo.arrayLayers = 1;
3420 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3421 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3422 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3423 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3424 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3425
3426 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3427 {
3428 VkBuffer dummyBuffer;
3429 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003430 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003431
3432 VkMemoryRequirements memReq;
3433 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3434 bufferMemoryTypeBits = memReq.memoryTypeBits;
3435
3436 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3437 }
3438
3439 uint32_t imageMemoryTypeBits = UINT32_MAX;
3440 {
3441 VkImage dummyImage;
3442 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003443 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003444
3445 VkMemoryRequirements memReq;
3446 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3447 imageMemoryTypeBits = memReq.memoryTypeBits;
3448
3449 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3450 }
3451
3452 uint32_t memoryTypeBits = 0;
3453 if(config.UsesBuffers() && config.UsesImages())
3454 {
3455 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3456 if(memoryTypeBits == 0)
3457 {
3458 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3459 return;
3460 }
3461 }
3462 else if(config.UsesBuffers())
3463 memoryTypeBits = bufferMemoryTypeBits;
3464 else if(config.UsesImages())
3465 memoryTypeBits = imageMemoryTypeBits;
3466 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003467 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003468
3469 VmaPoolCreateInfo poolCreateInfo = {};
3470 poolCreateInfo.memoryTypeIndex = 0;
3471 poolCreateInfo.minBlockCount = 1;
3472 poolCreateInfo.maxBlockCount = 1;
3473 poolCreateInfo.blockSize = config.PoolSize;
3474 poolCreateInfo.frameInUseCount = 1;
3475
3476 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3477 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3478 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3479
3480 VmaPool pool;
3481 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003482 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003483
3484 // Start time measurement - after creating pool and initializing data structures.
3485 time_point timeBeg = std::chrono::high_resolution_clock::now();
3486
3487 ////////////////////////////////////////////////////////////////////////////////
3488 // ThreadProc
3489 auto ThreadProc = [&](
3490 PoolTestThreadResult* outThreadResult,
3491 uint32_t randSeed,
3492 HANDLE frameStartEvent,
3493 HANDLE frameEndEvent) -> void
3494 {
3495 RandomNumberGenerator threadRand{randSeed};
3496
3497 outThreadResult->AllocationTimeMin = duration::max();
3498 outThreadResult->AllocationTimeSum = duration::zero();
3499 outThreadResult->AllocationTimeMax = duration::min();
3500 outThreadResult->DeallocationTimeMin = duration::max();
3501 outThreadResult->DeallocationTimeSum = duration::zero();
3502 outThreadResult->DeallocationTimeMax = duration::min();
3503 outThreadResult->AllocationCount = 0;
3504 outThreadResult->DeallocationCount = 0;
3505 outThreadResult->LostAllocationCount = 0;
3506 outThreadResult->LostAllocationTotalSize = 0;
3507 outThreadResult->FailedAllocationCount = 0;
3508 outThreadResult->FailedAllocationTotalSize = 0;
3509
3510 struct Item
3511 {
3512 VkDeviceSize BufferSize;
3513 VkExtent2D ImageSize;
3514 VkBuffer Buf;
3515 VkImage Image;
3516 VmaAllocation Alloc;
3517
3518 VkDeviceSize CalcSizeBytes() const
3519 {
3520 return BufferSize +
3521 ImageSize.width * ImageSize.height * 4;
3522 }
3523 };
3524 std::vector<Item> unusedItems, usedItems;
3525
3526 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3527
3528 // Create all items - all unused, not yet allocated.
3529 for(size_t i = 0; i < threadTotalItemCount; ++i)
3530 {
3531 Item item = {};
3532
3533 uint32_t allocSizeIndex = 0;
3534 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3535 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3536 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3537
3538 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3539 if(allocSize.BufferSizeMax > 0)
3540 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003541 TEST(allocSize.BufferSizeMin > 0);
3542 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003543 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3544 item.BufferSize = allocSize.BufferSizeMin;
3545 else
3546 {
3547 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3548 item.BufferSize = item.BufferSize / 16 * 16;
3549 }
3550 }
3551 else
3552 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003553 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003554 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3555 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3556 else
3557 {
3558 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3559 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3560 }
3561 }
3562
3563 unusedItems.push_back(item);
3564 }
3565
3566 auto Allocate = [&](Item& item) -> VkResult
3567 {
3568 VmaAllocationCreateInfo allocCreateInfo = {};
3569 allocCreateInfo.pool = pool;
3570 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3571 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3572
3573 if(item.BufferSize)
3574 {
3575 bufferInfo.size = item.BufferSize;
3576 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3577 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3578 }
3579 else
3580 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003581 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003582
3583 imageInfo.extent.width = item.ImageSize.width;
3584 imageInfo.extent.height = item.ImageSize.height;
3585 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3586 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3587 }
3588 };
3589
3590 ////////////////////////////////////////////////////////////////////////////////
3591 // Frames
3592 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3593 {
3594 WaitForSingleObject(frameStartEvent, INFINITE);
3595
3596 // Always make some percent of used bufs unused, to choose different used ones.
3597 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3598 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3599 {
3600 size_t index = threadRand.Generate() % usedItems.size();
3601 unusedItems.push_back(usedItems[index]);
3602 usedItems.erase(usedItems.begin() + index);
3603 }
3604
3605 // Determine which bufs we want to use in this frame.
3606 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3607 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003608 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003609 // Move some used to unused.
3610 while(usedBufCount < usedItems.size())
3611 {
3612 size_t index = threadRand.Generate() % usedItems.size();
3613 unusedItems.push_back(usedItems[index]);
3614 usedItems.erase(usedItems.begin() + index);
3615 }
3616 // Move some unused to used.
3617 while(usedBufCount > usedItems.size())
3618 {
3619 size_t index = threadRand.Generate() % unusedItems.size();
3620 usedItems.push_back(unusedItems[index]);
3621 unusedItems.erase(unusedItems.begin() + index);
3622 }
3623
3624 uint32_t touchExistingCount = 0;
3625 uint32_t touchLostCount = 0;
3626 uint32_t createSucceededCount = 0;
3627 uint32_t createFailedCount = 0;
3628
3629 // Touch all used bufs. If not created or lost, allocate.
3630 for(size_t i = 0; i < usedItems.size(); ++i)
3631 {
3632 Item& item = usedItems[i];
3633 // Not yet created.
3634 if(item.Alloc == VK_NULL_HANDLE)
3635 {
3636 res = Allocate(item);
3637 ++outThreadResult->AllocationCount;
3638 if(res != VK_SUCCESS)
3639 {
3640 item.Alloc = VK_NULL_HANDLE;
3641 item.Buf = VK_NULL_HANDLE;
3642 ++outThreadResult->FailedAllocationCount;
3643 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3644 ++createFailedCount;
3645 }
3646 else
3647 ++createSucceededCount;
3648 }
3649 else
3650 {
3651 // Touch.
3652 VmaAllocationInfo allocInfo;
3653 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3654 // Lost.
3655 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3656 {
3657 ++touchLostCount;
3658
3659 // Destroy.
3660 {
3661 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3662 if(item.Buf)
3663 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3664 else
3665 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3666 ++outThreadResult->DeallocationCount;
3667 }
3668 item.Alloc = VK_NULL_HANDLE;
3669 item.Buf = VK_NULL_HANDLE;
3670
3671 ++outThreadResult->LostAllocationCount;
3672 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3673
3674 // Recreate.
3675 res = Allocate(item);
3676 ++outThreadResult->AllocationCount;
3677 // Creation failed.
3678 if(res != VK_SUCCESS)
3679 {
3680 ++outThreadResult->FailedAllocationCount;
3681 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3682 ++createFailedCount;
3683 }
3684 else
3685 ++createSucceededCount;
3686 }
3687 else
3688 ++touchExistingCount;
3689 }
3690 }
3691
3692 /*
3693 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3694 randSeed, frameIndex,
3695 touchExistingCount, touchLostCount,
3696 createSucceededCount, createFailedCount);
3697 */
3698
3699 SetEvent(frameEndEvent);
3700 }
3701
3702 // Free all remaining items.
3703 for(size_t i = usedItems.size(); i--; )
3704 {
3705 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3706 if(usedItems[i].Buf)
3707 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3708 else
3709 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3710 ++outThreadResult->DeallocationCount;
3711 }
3712 for(size_t i = unusedItems.size(); i--; )
3713 {
3714 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3715 if(unusedItems[i].Buf)
3716 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3717 else
3718 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3719 ++outThreadResult->DeallocationCount;
3720 }
3721 };
3722
3723 // Launch threads.
3724 uint32_t threadRandSeed = mainRand.Generate();
3725 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3726 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3727 std::vector<std::thread> bkgThreads;
3728 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3729 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3730 {
3731 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3732 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3733 bkgThreads.emplace_back(std::bind(
3734 ThreadProc,
3735 &threadResults[threadIndex],
3736 threadRandSeed + threadIndex,
3737 frameStartEvents[threadIndex],
3738 frameEndEvents[threadIndex]));
3739 }
3740
3741 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003742 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003743 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3744 {
3745 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3746 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3747 SetEvent(frameStartEvents[threadIndex]);
3748 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3749 }
3750
3751 // Wait for threads finished
3752 for(size_t i = 0; i < bkgThreads.size(); ++i)
3753 {
3754 bkgThreads[i].join();
3755 CloseHandle(frameEndEvents[i]);
3756 CloseHandle(frameStartEvents[i]);
3757 }
3758 bkgThreads.clear();
3759
3760 // Finish time measurement - before destroying pool.
3761 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3762
3763 vmaDestroyPool(g_hAllocator, pool);
3764
3765 outResult.AllocationTimeMin = duration::max();
3766 outResult.AllocationTimeAvg = duration::zero();
3767 outResult.AllocationTimeMax = duration::min();
3768 outResult.DeallocationTimeMin = duration::max();
3769 outResult.DeallocationTimeAvg = duration::zero();
3770 outResult.DeallocationTimeMax = duration::min();
3771 outResult.LostAllocationCount = 0;
3772 outResult.LostAllocationTotalSize = 0;
3773 outResult.FailedAllocationCount = 0;
3774 outResult.FailedAllocationTotalSize = 0;
3775 size_t allocationCount = 0;
3776 size_t deallocationCount = 0;
3777 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3778 {
3779 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3780 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3781 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3782 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3783 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3784 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3785 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3786 allocationCount += threadResult.AllocationCount;
3787 deallocationCount += threadResult.DeallocationCount;
3788 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3789 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3790 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3791 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3792 }
3793 if(allocationCount)
3794 outResult.AllocationTimeAvg /= allocationCount;
3795 if(deallocationCount)
3796 outResult.DeallocationTimeAvg /= deallocationCount;
3797}
3798
3799static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3800{
3801 if(ptr1 < ptr2)
3802 return ptr1 + size1 > ptr2;
3803 else if(ptr2 < ptr1)
3804 return ptr2 + size2 > ptr1;
3805 else
3806 return true;
3807}
3808
3809static void TestMapping()
3810{
3811 wprintf(L"Testing mapping...\n");
3812
3813 VkResult res;
3814 uint32_t memTypeIndex = UINT32_MAX;
3815
3816 enum TEST
3817 {
3818 TEST_NORMAL,
3819 TEST_POOL,
3820 TEST_DEDICATED,
3821 TEST_COUNT
3822 };
3823 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3824 {
3825 VmaPool pool = nullptr;
3826 if(testIndex == TEST_POOL)
3827 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003828 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003829 VmaPoolCreateInfo poolInfo = {};
3830 poolInfo.memoryTypeIndex = memTypeIndex;
3831 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003832 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003833 }
3834
3835 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3836 bufInfo.size = 0x10000;
3837 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3838
3839 VmaAllocationCreateInfo allocCreateInfo = {};
3840 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3841 allocCreateInfo.pool = pool;
3842 if(testIndex == TEST_DEDICATED)
3843 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3844
3845 VmaAllocationInfo allocInfo;
3846
3847 // Mapped manually
3848
3849 // Create 2 buffers.
3850 BufferInfo bufferInfos[3];
3851 for(size_t i = 0; i < 2; ++i)
3852 {
3853 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3854 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003855 TEST(res == VK_SUCCESS);
3856 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003857 memTypeIndex = allocInfo.memoryType;
3858 }
3859
3860 // Map buffer 0.
3861 char* data00 = nullptr;
3862 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003863 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003864 data00[0xFFFF] = data00[0];
3865
3866 // Map buffer 0 second time.
3867 char* data01 = nullptr;
3868 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003869 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003870
3871 // Map buffer 1.
3872 char* data1 = nullptr;
3873 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003874 TEST(res == VK_SUCCESS && data1 != nullptr);
3875 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003876 data1[0xFFFF] = data1[0];
3877
3878 // Unmap buffer 0 two times.
3879 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3880 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3881 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003882 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003883
3884 // Unmap buffer 1.
3885 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3886 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003887 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003888
3889 // Create 3rd buffer - persistently mapped.
3890 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3891 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3892 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003893 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003894
3895 // Map buffer 2.
3896 char* data2 = nullptr;
3897 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003898 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003899 data2[0xFFFF] = data2[0];
3900
3901 // Unmap buffer 2.
3902 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3903 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003904 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003905
3906 // Destroy all buffers.
3907 for(size_t i = 3; i--; )
3908 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3909
3910 vmaDestroyPool(g_hAllocator, pool);
3911 }
3912}
3913
3914static void TestMappingMultithreaded()
3915{
3916 wprintf(L"Testing mapping multithreaded...\n");
3917
3918 static const uint32_t threadCount = 16;
3919 static const uint32_t bufferCount = 1024;
3920 static const uint32_t threadBufferCount = bufferCount / threadCount;
3921
3922 VkResult res;
3923 volatile uint32_t memTypeIndex = UINT32_MAX;
3924
3925 enum TEST
3926 {
3927 TEST_NORMAL,
3928 TEST_POOL,
3929 TEST_DEDICATED,
3930 TEST_COUNT
3931 };
3932 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3933 {
3934 VmaPool pool = nullptr;
3935 if(testIndex == TEST_POOL)
3936 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003937 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003938 VmaPoolCreateInfo poolInfo = {};
3939 poolInfo.memoryTypeIndex = memTypeIndex;
3940 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003941 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003942 }
3943
3944 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3945 bufCreateInfo.size = 0x10000;
3946 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3947
3948 VmaAllocationCreateInfo allocCreateInfo = {};
3949 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3950 allocCreateInfo.pool = pool;
3951 if(testIndex == TEST_DEDICATED)
3952 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3953
3954 std::thread threads[threadCount];
3955 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3956 {
3957 threads[threadIndex] = std::thread([=, &memTypeIndex](){
3958 // ======== THREAD FUNCTION ========
3959
3960 RandomNumberGenerator rand{threadIndex};
3961
3962 enum class MODE
3963 {
3964 // Don't map this buffer at all.
3965 DONT_MAP,
3966 // Map and quickly unmap.
3967 MAP_FOR_MOMENT,
3968 // Map and unmap before destruction.
3969 MAP_FOR_LONGER,
3970 // Map two times. Quickly unmap, second unmap before destruction.
3971 MAP_TWO_TIMES,
3972 // Create this buffer as persistently mapped.
3973 PERSISTENTLY_MAPPED,
3974 COUNT
3975 };
3976 std::vector<BufferInfo> bufInfos{threadBufferCount};
3977 std::vector<MODE> bufModes{threadBufferCount};
3978
3979 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
3980 {
3981 BufferInfo& bufInfo = bufInfos[bufferIndex];
3982 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
3983 bufModes[bufferIndex] = mode;
3984
3985 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
3986 if(mode == MODE::PERSISTENTLY_MAPPED)
3987 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3988
3989 VmaAllocationInfo allocInfo;
3990 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
3991 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003992 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003993
3994 if(memTypeIndex == UINT32_MAX)
3995 memTypeIndex = allocInfo.memoryType;
3996
3997 char* data = nullptr;
3998
3999 if(mode == MODE::PERSISTENTLY_MAPPED)
4000 {
4001 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004002 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004003 }
4004 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4005 mode == MODE::MAP_TWO_TIMES)
4006 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004007 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004008 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004009 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004010
4011 if(mode == MODE::MAP_TWO_TIMES)
4012 {
4013 char* data2 = nullptr;
4014 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004015 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004016 }
4017 }
4018 else if(mode == MODE::DONT_MAP)
4019 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004020 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004021 }
4022 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004023 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004024
4025 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4026 if(data)
4027 data[0xFFFF] = data[0];
4028
4029 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4030 {
4031 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4032
4033 VmaAllocationInfo allocInfo;
4034 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4035 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004036 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004037 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004038 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004039 }
4040
4041 switch(rand.Generate() % 3)
4042 {
4043 case 0: Sleep(0); break; // Yield.
4044 case 1: Sleep(10); break; // 10 ms
4045 // default: No sleep.
4046 }
4047
4048 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4049 if(data)
4050 data[0xFFFF] = data[0];
4051 }
4052
4053 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4054 {
4055 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4056 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4057 {
4058 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4059
4060 VmaAllocationInfo allocInfo;
4061 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004062 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004063 }
4064
4065 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4066 }
4067 });
4068 }
4069
4070 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4071 threads[threadIndex].join();
4072
4073 vmaDestroyPool(g_hAllocator, pool);
4074 }
4075}
4076
4077static void WriteMainTestResultHeader(FILE* file)
4078{
4079 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004080 "Code,Time,"
4081 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004082 "Total Time (us),"
4083 "Allocation Time Min (us),"
4084 "Allocation Time Avg (us),"
4085 "Allocation Time Max (us),"
4086 "Deallocation Time Min (us),"
4087 "Deallocation Time Avg (us),"
4088 "Deallocation Time Max (us),"
4089 "Total Memory Allocated (B),"
4090 "Free Range Size Avg (B),"
4091 "Free Range Size Max (B)\n");
4092}
4093
4094static void WriteMainTestResult(
4095 FILE* file,
4096 const char* codeDescription,
4097 const char* testDescription,
4098 const Config& config, const Result& result)
4099{
4100 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4101 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4102 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4103 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4104 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4105 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4106 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4107
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004108 std::string currTime;
4109 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004110
4111 fprintf(file,
4112 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004113 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4114 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004115 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004116 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004117 totalTimeSeconds * 1e6f,
4118 allocationTimeMinSeconds * 1e6f,
4119 allocationTimeAvgSeconds * 1e6f,
4120 allocationTimeMaxSeconds * 1e6f,
4121 deallocationTimeMinSeconds * 1e6f,
4122 deallocationTimeAvgSeconds * 1e6f,
4123 deallocationTimeMaxSeconds * 1e6f,
4124 result.TotalMemoryAllocated,
4125 result.FreeRangeSizeAvg,
4126 result.FreeRangeSizeMax);
4127}
4128
4129static void WritePoolTestResultHeader(FILE* file)
4130{
4131 fprintf(file,
4132 "Code,Test,Time,"
4133 "Config,"
4134 "Total Time (us),"
4135 "Allocation Time Min (us),"
4136 "Allocation Time Avg (us),"
4137 "Allocation Time Max (us),"
4138 "Deallocation Time Min (us),"
4139 "Deallocation Time Avg (us),"
4140 "Deallocation Time Max (us),"
4141 "Lost Allocation Count,"
4142 "Lost Allocation Total Size (B),"
4143 "Failed Allocation Count,"
4144 "Failed Allocation Total Size (B)\n");
4145}
4146
4147static void WritePoolTestResult(
4148 FILE* file,
4149 const char* codeDescription,
4150 const char* testDescription,
4151 const PoolTestConfig& config,
4152 const PoolTestResult& result)
4153{
4154 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4155 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4156 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4157 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4158 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4159 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4160 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4161
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004162 std::string currTime;
4163 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004164
4165 fprintf(file,
4166 "%s,%s,%s,"
4167 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4168 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4169 // General
4170 codeDescription,
4171 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004172 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004173 // Config
4174 config.ThreadCount,
4175 (unsigned long long)config.PoolSize,
4176 config.FrameCount,
4177 config.TotalItemCount,
4178 config.UsedItemCountMin,
4179 config.UsedItemCountMax,
4180 config.ItemsToMakeUnusedPercent,
4181 // Results
4182 totalTimeSeconds * 1e6f,
4183 allocationTimeMinSeconds * 1e6f,
4184 allocationTimeAvgSeconds * 1e6f,
4185 allocationTimeMaxSeconds * 1e6f,
4186 deallocationTimeMinSeconds * 1e6f,
4187 deallocationTimeAvgSeconds * 1e6f,
4188 deallocationTimeMaxSeconds * 1e6f,
4189 result.LostAllocationCount,
4190 result.LostAllocationTotalSize,
4191 result.FailedAllocationCount,
4192 result.FailedAllocationTotalSize);
4193}
4194
4195static void PerformCustomMainTest(FILE* file)
4196{
4197 Config config{};
4198 config.RandSeed = 65735476;
4199 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4200 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4201 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4202 config.FreeOrder = FREE_ORDER::FORWARD;
4203 config.ThreadCount = 16;
4204 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004205 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004206
4207 // Buffers
4208 //config.AllocationSizes.push_back({4, 16, 1024});
4209 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4210
4211 // Images
4212 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4213 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4214
4215 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4216 config.AdditionalOperationCount = 1024;
4217
4218 Result result{};
4219 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004220 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004221 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4222}
4223
4224static void PerformCustomPoolTest(FILE* file)
4225{
4226 PoolTestConfig config;
4227 config.PoolSize = 100 * 1024 * 1024;
4228 config.RandSeed = 2345764;
4229 config.ThreadCount = 1;
4230 config.FrameCount = 200;
4231 config.ItemsToMakeUnusedPercent = 2;
4232
4233 AllocationSize allocSize = {};
4234 allocSize.BufferSizeMin = 1024;
4235 allocSize.BufferSizeMax = 1024 * 1024;
4236 allocSize.Probability = 1;
4237 config.AllocationSizes.push_back(allocSize);
4238
4239 allocSize.BufferSizeMin = 0;
4240 allocSize.BufferSizeMax = 0;
4241 allocSize.ImageSizeMin = 128;
4242 allocSize.ImageSizeMax = 1024;
4243 allocSize.Probability = 1;
4244 config.AllocationSizes.push_back(allocSize);
4245
4246 config.PoolSize = config.CalcAvgResourceSize() * 200;
4247 config.UsedItemCountMax = 160;
4248 config.TotalItemCount = config.UsedItemCountMax * 10;
4249 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4250
4251 g_MemoryAliasingWarningEnabled = false;
4252 PoolTestResult result = {};
4253 TestPool_Benchmark(result, config);
4254 g_MemoryAliasingWarningEnabled = true;
4255
4256 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4257}
4258
Adam Sawickib8333fb2018-03-13 16:15:53 +01004259static void PerformMainTests(FILE* file)
4260{
4261 uint32_t repeatCount = 1;
4262 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4263
4264 Config config{};
4265 config.RandSeed = 65735476;
4266 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4267 config.FreeOrder = FREE_ORDER::FORWARD;
4268
4269 size_t threadCountCount = 1;
4270 switch(ConfigType)
4271 {
4272 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4273 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4274 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4275 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4276 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4277 default: assert(0);
4278 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004279
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004280 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004281
Adam Sawickib8333fb2018-03-13 16:15:53 +01004282 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4283 {
4284 std::string desc1;
4285
4286 switch(threadCountIndex)
4287 {
4288 case 0:
4289 desc1 += "1_thread";
4290 config.ThreadCount = 1;
4291 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4292 break;
4293 case 1:
4294 desc1 += "16_threads+0%_common";
4295 config.ThreadCount = 16;
4296 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4297 break;
4298 case 2:
4299 desc1 += "16_threads+50%_common";
4300 config.ThreadCount = 16;
4301 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4302 break;
4303 case 3:
4304 desc1 += "16_threads+100%_common";
4305 config.ThreadCount = 16;
4306 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4307 break;
4308 case 4:
4309 desc1 += "2_threads+0%_common";
4310 config.ThreadCount = 2;
4311 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4312 break;
4313 case 5:
4314 desc1 += "2_threads+50%_common";
4315 config.ThreadCount = 2;
4316 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4317 break;
4318 case 6:
4319 desc1 += "2_threads+100%_common";
4320 config.ThreadCount = 2;
4321 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4322 break;
4323 default:
4324 assert(0);
4325 }
4326
4327 // 0 = buffers, 1 = images, 2 = buffers and images
4328 size_t buffersVsImagesCount = 2;
4329 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4330 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4331 {
4332 std::string desc2 = desc1;
4333 switch(buffersVsImagesIndex)
4334 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004335 case 0: desc2 += ",Buffers"; break;
4336 case 1: desc2 += ",Images"; break;
4337 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004338 default: assert(0);
4339 }
4340
4341 // 0 = small, 1 = large, 2 = small and large
4342 size_t smallVsLargeCount = 2;
4343 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4344 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4345 {
4346 std::string desc3 = desc2;
4347 switch(smallVsLargeIndex)
4348 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004349 case 0: desc3 += ",Small"; break;
4350 case 1: desc3 += ",Large"; break;
4351 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004352 default: assert(0);
4353 }
4354
4355 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4356 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4357 else
4358 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4359
4360 // 0 = varying sizes min...max, 1 = set of constant sizes
4361 size_t constantSizesCount = 1;
4362 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4363 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4364 {
4365 std::string desc4 = desc3;
4366 switch(constantSizesIndex)
4367 {
4368 case 0: desc4 += " Varying_sizes"; break;
4369 case 1: desc4 += " Constant_sizes"; break;
4370 default: assert(0);
4371 }
4372
4373 config.AllocationSizes.clear();
4374 // Buffers present
4375 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4376 {
4377 // Small
4378 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4379 {
4380 // Varying size
4381 if(constantSizesIndex == 0)
4382 config.AllocationSizes.push_back({4, 16, 1024});
4383 // Constant sizes
4384 else
4385 {
4386 config.AllocationSizes.push_back({1, 16, 16});
4387 config.AllocationSizes.push_back({1, 64, 64});
4388 config.AllocationSizes.push_back({1, 256, 256});
4389 config.AllocationSizes.push_back({1, 1024, 1024});
4390 }
4391 }
4392 // Large
4393 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4394 {
4395 // Varying size
4396 if(constantSizesIndex == 0)
4397 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4398 // Constant sizes
4399 else
4400 {
4401 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4402 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4403 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4404 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4405 }
4406 }
4407 }
4408 // Images present
4409 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4410 {
4411 // Small
4412 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4413 {
4414 // Varying size
4415 if(constantSizesIndex == 0)
4416 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4417 // Constant sizes
4418 else
4419 {
4420 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4421 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4422 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4423 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4424 }
4425 }
4426 // Large
4427 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4428 {
4429 // Varying size
4430 if(constantSizesIndex == 0)
4431 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4432 // Constant sizes
4433 else
4434 {
4435 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4436 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4437 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4438 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4439 }
4440 }
4441 }
4442
4443 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4444 size_t beginBytesToAllocateCount = 1;
4445 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4446 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4447 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4448 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4449 {
4450 std::string desc5 = desc4;
4451
4452 switch(beginBytesToAllocateIndex)
4453 {
4454 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004455 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004456 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4457 config.AdditionalOperationCount = 0;
4458 break;
4459 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004460 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004461 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4462 config.AdditionalOperationCount = 1024;
4463 break;
4464 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004465 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004466 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4467 config.AdditionalOperationCount = 1024;
4468 break;
4469 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004470 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004471 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4472 config.AdditionalOperationCount = 1024;
4473 break;
4474 default:
4475 assert(0);
4476 }
4477
Adam Sawicki0667e332018-08-24 17:26:44 +02004478 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004479 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004480 std::string desc6 = desc5;
4481 switch(strategyIndex)
4482 {
4483 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004484 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004485 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4486 break;
4487 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004488 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004489 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4490 break;
4491 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004492 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004493 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4494 break;
4495 default:
4496 assert(0);
4497 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004498
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004499 desc6 += ',';
4500 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004501
4502 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004503
4504 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4505 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004506 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004507
4508 Result result{};
4509 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004510 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004511 if(file)
4512 {
4513 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4514 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004515 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004516 }
4517 }
4518 }
4519 }
4520 }
4521 }
4522}
4523
4524static void PerformPoolTests(FILE* file)
4525{
4526 const size_t AVG_RESOURCES_PER_POOL = 300;
4527
4528 uint32_t repeatCount = 1;
4529 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4530
4531 PoolTestConfig config{};
4532 config.RandSeed = 2346343;
4533 config.FrameCount = 200;
4534 config.ItemsToMakeUnusedPercent = 2;
4535
4536 size_t threadCountCount = 1;
4537 switch(ConfigType)
4538 {
4539 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4540 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4541 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4542 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4543 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4544 default: assert(0);
4545 }
4546 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4547 {
4548 std::string desc1;
4549
4550 switch(threadCountIndex)
4551 {
4552 case 0:
4553 desc1 += "1_thread";
4554 config.ThreadCount = 1;
4555 break;
4556 case 1:
4557 desc1 += "16_threads";
4558 config.ThreadCount = 16;
4559 break;
4560 case 2:
4561 desc1 += "2_threads";
4562 config.ThreadCount = 2;
4563 break;
4564 default:
4565 assert(0);
4566 }
4567
4568 // 0 = buffers, 1 = images, 2 = buffers and images
4569 size_t buffersVsImagesCount = 2;
4570 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4571 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4572 {
4573 std::string desc2 = desc1;
4574 switch(buffersVsImagesIndex)
4575 {
4576 case 0: desc2 += " Buffers"; break;
4577 case 1: desc2 += " Images"; break;
4578 case 2: desc2 += " Buffers+Images"; break;
4579 default: assert(0);
4580 }
4581
4582 // 0 = small, 1 = large, 2 = small and large
4583 size_t smallVsLargeCount = 2;
4584 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4585 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4586 {
4587 std::string desc3 = desc2;
4588 switch(smallVsLargeIndex)
4589 {
4590 case 0: desc3 += " Small"; break;
4591 case 1: desc3 += " Large"; break;
4592 case 2: desc3 += " Small+Large"; break;
4593 default: assert(0);
4594 }
4595
4596 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4597 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4598 else
4599 config.PoolSize = 4ull * 1024 * 1024;
4600
4601 // 0 = varying sizes min...max, 1 = set of constant sizes
4602 size_t constantSizesCount = 1;
4603 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4604 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4605 {
4606 std::string desc4 = desc3;
4607 switch(constantSizesIndex)
4608 {
4609 case 0: desc4 += " Varying_sizes"; break;
4610 case 1: desc4 += " Constant_sizes"; break;
4611 default: assert(0);
4612 }
4613
4614 config.AllocationSizes.clear();
4615 // Buffers present
4616 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4617 {
4618 // Small
4619 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4620 {
4621 // Varying size
4622 if(constantSizesIndex == 0)
4623 config.AllocationSizes.push_back({4, 16, 1024});
4624 // Constant sizes
4625 else
4626 {
4627 config.AllocationSizes.push_back({1, 16, 16});
4628 config.AllocationSizes.push_back({1, 64, 64});
4629 config.AllocationSizes.push_back({1, 256, 256});
4630 config.AllocationSizes.push_back({1, 1024, 1024});
4631 }
4632 }
4633 // Large
4634 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4635 {
4636 // Varying size
4637 if(constantSizesIndex == 0)
4638 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4639 // Constant sizes
4640 else
4641 {
4642 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4643 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4644 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4645 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4646 }
4647 }
4648 }
4649 // Images present
4650 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4651 {
4652 // Small
4653 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4654 {
4655 // Varying size
4656 if(constantSizesIndex == 0)
4657 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4658 // Constant sizes
4659 else
4660 {
4661 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4662 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4663 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4664 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4665 }
4666 }
4667 // Large
4668 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4669 {
4670 // Varying size
4671 if(constantSizesIndex == 0)
4672 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4673 // Constant sizes
4674 else
4675 {
4676 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4677 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4678 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4679 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4680 }
4681 }
4682 }
4683
4684 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4685 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4686
4687 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4688 size_t subscriptionModeCount;
4689 switch(ConfigType)
4690 {
4691 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4692 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4693 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4694 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4695 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4696 default: assert(0);
4697 }
4698 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4699 {
4700 std::string desc5 = desc4;
4701
4702 switch(subscriptionModeIndex)
4703 {
4704 case 0:
4705 desc5 += " Subscription_66%";
4706 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4707 break;
4708 case 1:
4709 desc5 += " Subscription_133%";
4710 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4711 break;
4712 case 2:
4713 desc5 += " Subscription_100%";
4714 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4715 break;
4716 case 3:
4717 desc5 += " Subscription_33%";
4718 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4719 break;
4720 case 4:
4721 desc5 += " Subscription_166%";
4722 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4723 break;
4724 default:
4725 assert(0);
4726 }
4727
4728 config.TotalItemCount = config.UsedItemCountMax * 5;
4729 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4730
4731 const char* testDescription = desc5.c_str();
4732
4733 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4734 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004735 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004736
4737 PoolTestResult result{};
4738 g_MemoryAliasingWarningEnabled = false;
4739 TestPool_Benchmark(result, config);
4740 g_MemoryAliasingWarningEnabled = true;
4741 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4742 }
4743 }
4744 }
4745 }
4746 }
4747 }
4748}
4749
Adam Sawickia83793a2018-09-03 13:40:42 +02004750static void BasicTestBuddyAllocator()
4751{
4752 wprintf(L"Basic test buddy allocator\n");
4753
4754 RandomNumberGenerator rand{76543};
4755
4756 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4757 sampleBufCreateInfo.size = 1024; // Whatever.
4758 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4759
4760 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4761 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4762
4763 VmaPoolCreateInfo poolCreateInfo = {};
4764 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004765 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004766
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004767 // Deliberately adding 1023 to test usable size smaller than memory block size.
4768 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004769 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004770 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004771
4772 VmaPool pool = nullptr;
4773 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004774 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004775
4776 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4777
4778 VmaAllocationCreateInfo allocCreateInfo = {};
4779 allocCreateInfo.pool = pool;
4780
4781 std::vector<BufferInfo> bufInfo;
4782 BufferInfo newBufInfo;
4783 VmaAllocationInfo allocInfo;
4784
4785 bufCreateInfo.size = 1024 * 256;
4786 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4787 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004788 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004789 bufInfo.push_back(newBufInfo);
4790
4791 bufCreateInfo.size = 1024 * 512;
4792 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4793 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004794 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004795 bufInfo.push_back(newBufInfo);
4796
4797 bufCreateInfo.size = 1024 * 128;
4798 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4799 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004800 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004801 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004802
4803 // Test very small allocation, smaller than minimum node size.
4804 bufCreateInfo.size = 1;
4805 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4806 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004807 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004808 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004809
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004810 // Test some small allocation with alignment requirement.
4811 {
4812 VkMemoryRequirements memReq;
4813 memReq.alignment = 256;
4814 memReq.memoryTypeBits = UINT32_MAX;
4815 memReq.size = 32;
4816
4817 newBufInfo.Buffer = VK_NULL_HANDLE;
4818 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4819 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004820 TEST(res == VK_SUCCESS);
4821 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004822 bufInfo.push_back(newBufInfo);
4823 }
4824
4825 //SaveAllocatorStatsToFile(L"TEST.json");
4826
Adam Sawicki21017c62018-09-07 15:26:59 +02004827 VmaPoolStats stats = {};
4828 vmaGetPoolStats(g_hAllocator, pool, &stats);
4829 int DBG = 0; // Set breakpoint here to inspect `stats`.
4830
Adam Sawicki80927152018-09-07 17:27:23 +02004831 // Allocate enough new buffers to surely fall into second block.
4832 for(uint32_t i = 0; i < 32; ++i)
4833 {
4834 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4835 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4836 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004837 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004838 bufInfo.push_back(newBufInfo);
4839 }
4840
4841 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4842
Adam Sawickia83793a2018-09-03 13:40:42 +02004843 // Destroy the buffers in random order.
4844 while(!bufInfo.empty())
4845 {
4846 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4847 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4848 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4849 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4850 }
4851
4852 vmaDestroyPool(g_hAllocator, pool);
4853}
4854
Adam Sawickif2975342018-10-16 13:49:02 +02004855// Test the testing environment.
4856static void TestGpuData()
4857{
4858 RandomNumberGenerator rand = { 53434 };
4859
4860 std::vector<AllocInfo> allocInfo;
4861
4862 for(size_t i = 0; i < 100; ++i)
4863 {
4864 AllocInfo info = {};
4865
4866 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4867 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4868 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4869 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4870 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4871
4872 VmaAllocationCreateInfo allocCreateInfo = {};
4873 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4874
4875 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
4876 TEST(res == VK_SUCCESS);
4877
4878 info.m_StartValue = rand.Generate();
4879
4880 allocInfo.push_back(std::move(info));
4881 }
4882
4883 UploadGpuData(allocInfo.data(), allocInfo.size());
4884
4885 ValidateGpuData(allocInfo.data(), allocInfo.size());
4886
4887 DestroyAllAllocations(allocInfo);
4888}
4889
Adam Sawickib8333fb2018-03-13 16:15:53 +01004890void Test()
4891{
4892 wprintf(L"TESTING:\n");
4893
Adam Sawickif2975342018-10-16 13:49:02 +02004894 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02004895 {
4896 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004897 // ########################################
4898 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02004899
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004900 TestDefragmentationGpu();
Adam Sawicki05704002018-11-08 16:07:29 +01004901 TestDefragmentationSimple();
4902 TestDefragmentationFull();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004903 return;
4904 }
4905
Adam Sawickib8333fb2018-03-13 16:15:53 +01004906 // # Simple tests
4907
4908 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02004909 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02004910#if VMA_DEBUG_MARGIN
4911 TestDebugMargin();
4912#else
4913 TestPool_SameSize();
4914 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01004915 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02004916#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02004917#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
4918 TestAllocationsInitialization();
4919#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01004920 TestMapping();
4921 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02004922 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02004923 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004924 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004925
Adam Sawicki4338f662018-09-07 14:12:37 +02004926 BasicTestBuddyAllocator();
4927
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004928 {
4929 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02004930 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004931 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02004932 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004933 fclose(file);
4934 }
4935
Adam Sawickib8333fb2018-03-13 16:15:53 +01004936 TestDefragmentationSimple();
4937 TestDefragmentationFull();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004938 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01004939
4940 // # Detailed tests
4941 FILE* file;
4942 fopen_s(&file, "Results.csv", "w");
4943 assert(file != NULL);
4944
4945 WriteMainTestResultHeader(file);
4946 PerformMainTests(file);
4947 //PerformCustomMainTest(file);
4948
4949 WritePoolTestResultHeader(file);
4950 PerformPoolTests(file);
4951 //PerformCustomPoolTest(file);
4952
4953 fclose(file);
4954
4955 wprintf(L"Done.\n");
4956}
4957
Adam Sawickif1a793c2018-03-13 15:42:22 +01004958#endif // #ifdef _WIN32